├── .coafile
├── .coveragerc
├── .deepsource.toml
├── .drone.yml
├── .editorconfig
├── .github
└── workflows
│ ├── docs.yml
│ └── tests.yml
├── .gitignore
├── CHANGELOG.md
├── COPYING
├── MANIFEST.in
├── Makefile
├── README.rst
├── aio_pika
├── __init__.py
├── abc.py
├── channel.py
├── connection.py
├── exceptions.py
├── exchange.py
├── log.py
├── message.py
├── patterns
│ ├── __init__.py
│ ├── base.py
│ ├── master.py
│ └── rpc.py
├── pool.py
├── py.typed
├── queue.py
├── robust_channel.py
├── robust_connection.py
├── robust_exchange.py
├── robust_queue.py
├── tools.py
└── transaction.py
├── docs
├── Makefile
├── requirements.txt
└── source
│ ├── _static
│ ├── .DS_Store
│ ├── custom.css
│ ├── favicon
│ │ ├── android-icon-144x144.png
│ │ ├── android-icon-192x192.png
│ │ ├── android-icon-36x36.png
│ │ ├── android-icon-48x48.png
│ │ ├── android-icon-72x72.png
│ │ ├── android-icon-96x96.png
│ │ ├── apple-icon-114x114.png
│ │ ├── apple-icon-120x120.png
│ │ ├── apple-icon-144x144.png
│ │ ├── apple-icon-152x152.png
│ │ ├── apple-icon-180x180.png
│ │ ├── apple-icon-57x57.png
│ │ ├── apple-icon-60x60.png
│ │ ├── apple-icon-72x72.png
│ │ ├── apple-icon-76x76.png
│ │ ├── apple-icon-precomposed.png
│ │ ├── apple-icon.png
│ │ ├── browserconfig.xml
│ │ ├── favicon-16x16.png
│ │ ├── favicon-32x32.png
│ │ ├── favicon-96x96.png
│ │ ├── favicon.ico
│ │ ├── manifest.json
│ │ ├── ms-icon-144x144.png
│ │ ├── ms-icon-150x150.png
│ │ ├── ms-icon-310x310.png
│ │ └── ms-icon-70x70.png
│ ├── icon.png
│ ├── logo.png
│ ├── logo2x.png
│ └── tutorial
│ │ ├── bindings.svg
│ │ ├── consumer.svg
│ │ ├── direct-exchange-multiple.svg
│ │ ├── direct-exchange.svg
│ │ ├── exchanges.svg
│ │ ├── prefetch-count.svg
│ │ ├── producer.svg
│ │ ├── python-five.svg
│ │ ├── python-four.svg
│ │ ├── python-one-overall.svg
│ │ ├── python-six.svg
│ │ ├── python-three-overall.svg
│ │ ├── python-two.svg
│ │ ├── queue.svg
│ │ ├── receiving.svg
│ │ └── sending.svg
│ ├── _templates
│ └── base.html
│ ├── apidoc.rst
│ ├── conf.py
│ ├── examples
│ ├── benchmark.py
│ ├── extend-patterns.py
│ ├── external-credentials.py
│ ├── log-level-set.py
│ ├── main.py
│ ├── master.py
│ ├── pooling.py
│ ├── rpc-callee.py
│ ├── rpc-caller.py
│ ├── simple_async_consumer.py
│ ├── simple_consumer.py
│ ├── simple_publisher.py
│ ├── simple_publisher_transactions.py
│ ├── tornado-pubsub.py
│ └── worker.py
│ ├── index.rst
│ ├── patterns.rst
│ ├── quick-start.rst
│ └── rabbitmq-tutorial
│ ├── 1-introduction.rst
│ ├── 2-work-queues.rst
│ ├── 3-publish-subscribe.rst
│ ├── 4-routing.rst
│ ├── 5-topics.rst
│ ├── 6-rpc.rst
│ ├── 7-publisher-confirms.rst
│ ├── examples
│ ├── 1-introduction
│ │ ├── receive.py
│ │ └── send.py
│ ├── 2-work-queues
│ │ ├── new_task.py
│ │ ├── new_task_initial.py
│ │ └── tasks_worker.py
│ ├── 3-publish-subscribe
│ │ ├── emit_log.py
│ │ └── receive_logs.py
│ ├── 4-routing
│ │ ├── emit_log_direct.py
│ │ ├── receive_logs_direct.py
│ │ └── receive_logs_direct_simple.py
│ ├── 5-topics
│ │ ├── emit_log_topic.py
│ │ └── receive_logs_topic.py
│ ├── 6-rpc
│ │ ├── rpc_client.py
│ │ └── rpc_server.py
│ └── 7-publisher-confirms
│ │ ├── publish_asynchronously.py
│ │ ├── publish_batches.py
│ │ └── publish_individually.py
│ └── index.rst
├── gray.conf
├── logo.svg
├── noxfile.py
├── poetry.lock
├── poetry.toml
├── pylama.ini
├── pyproject.toml
└── tests
├── __init__.py
├── conftest.py
├── test_amqp.py
├── test_amqp_robust.py
├── test_amqp_robust_proxy.py
├── test_amqps.py
├── test_connect.py
├── test_connection_params.py
├── test_master.py
├── test_memory_leak.py
├── test_message.py
├── test_pool.py
├── test_rpc.py
├── test_tools.py
└── test_types.py
/.coafile:
--------------------------------------------------------------------------------
1 | [Default]
2 | bears = PEP8Bear, PyUnusedCodeBear, FilenameBear, InvalidLinkBear
3 | files = aio_pika/**/*.py
4 | max_line_length = 120
5 |
--------------------------------------------------------------------------------
/.coveragerc:
--------------------------------------------------------------------------------
1 | [run]
2 | omit = aio_pika/compat.py
3 | branch = True
4 |
5 | [report]
6 | exclude_lines =
7 | pragma: no cover
8 | raise NotImplementedError
9 |
--------------------------------------------------------------------------------
/.deepsource.toml:
--------------------------------------------------------------------------------
1 | version = 1
2 |
3 | [[analyzers]]
4 | name = "python"
5 | enabled = true
6 | runtime_version = "3.x.x"
7 |
--------------------------------------------------------------------------------
/.drone.yml:
--------------------------------------------------------------------------------
1 | ---
2 | kind: pipeline
3 | name: default
4 |
5 |
6 | steps:
7 | - name: prepare toxenv
8 | image: snakepacker/python:all
9 | group: tests
10 | pull: always
11 | commands:
12 | - tox --notest
13 | volumes:
14 | - name: cache
15 | path: /drone/src/.tox
16 |
17 | - name: linter
18 | image: snakepacker/python:all
19 | commands:
20 | - tox
21 | environment:
22 | TOXENV: lint
23 | volumes:
24 | - name: cache
25 | path: /drone/src/.tox
26 |
27 | - name: mypy
28 | image: snakepacker/python:all
29 | group: tests
30 | pull: always
31 | commands:
32 | - tox
33 | environment:
34 | TOXENV: mypy
35 | volumes:
36 | - name: cache
37 | path: /drone/src/.tox
38 |
39 | - name: checkdoc
40 | image: snakepacker/python:all
41 | group: tests
42 | pull: always
43 | commands:
44 | - tox
45 | environment:
46 | TOXENV: checkdoc
47 | volumes:
48 | - name: cache
49 | path: /drone/src/.tox
50 |
51 | - name: python 3.8
52 | image: snakepacker/python:all
53 | commands:
54 | - tox
55 | environment:
56 | AMQP_URL: amqp://guest:guest@rabbitmq
57 | TOXENV: py38
58 | COVERALLS_REPO_TOKEN:
59 | from_secret: COVERALLS_TOKEN
60 | volumes:
61 | - name: cache
62 | path: /drone/src/.tox
63 |
64 | - name: python 3.8 uvloop
65 | image: snakepacker/python:all
66 | commands:
67 | - tox
68 | environment:
69 | AMQP_URL: amqp://guest:guest@rabbitmq
70 | TOXENV: py38-uvloop
71 | COVERALLS_REPO_TOKEN:
72 | from_secret: COVERALLS_TOKEN
73 | volumes:
74 | - name: cache
75 | path: /drone/src/.tox
76 |
77 | - name: python 3.7
78 | image: snakepacker/python:all
79 | commands:
80 | - tox
81 | environment:
82 | AMQP_URL: amqp://guest:guest@rabbitmq
83 | TOXENV: py37
84 | COVERALLS_REPO_TOKEN:
85 | from_secret: COVERALLS_TOKEN
86 | volumes:
87 | - name: cache
88 | path: /drone/src/.tox
89 |
90 | - name: python 3.7 uvloop
91 | image: snakepacker/python:all
92 | commands:
93 | - tox
94 | environment:
95 | AMQP_URL: amqp://guest:guest@rabbitmq
96 | TOXENV: py37-uvloop
97 | COVERALLS_REPO_TOKEN:
98 | from_secret: COVERALLS_TOKEN
99 | volumes:
100 | - name: cache
101 | path: /drone/src/.tox
102 |
103 | - name: python 3.6
104 | image: snakepacker/python:all
105 | commands:
106 | - tox
107 | environment:
108 | AMQP_URL: amqp://guest:guest@rabbitmq
109 | TOXENV: py36
110 | COVERALLS_REPO_TOKEN:
111 | from_secret: COVERALLS_TOKEN
112 | volumes:
113 | - name: cache
114 | path: /drone/src/.tox
115 |
116 | - name: python 3.6 uvloop
117 | image: snakepacker/python:all
118 | commands:
119 | - tox
120 | environment:
121 | AMQP_URL: amqp://guest:guest@rabbitmq
122 | TOXENV: py36-uvloop
123 | COVERALLS_REPO_TOKEN:
124 | from_secret: COVERALLS_TOKEN
125 | volumes:
126 | - name: cache
127 | path: /drone/src/.tox
128 |
129 | - name: python 3.5
130 | image: snakepacker/python:all
131 | commands:
132 | - tox
133 | environment:
134 | AMQP_URL: amqp://guest:guest@rabbitmq
135 | TOXENV: py35
136 | COVERALLS_REPO_TOKEN:
137 | from_secret: COVERALLS_TOKEN
138 | volumes:
139 | - name: cache
140 | path: /drone/src/.tox
141 |
142 | - name: python 3.5 uvloop
143 | image: snakepacker/python:all
144 | commands:
145 | - tox
146 | environment:
147 | AMQP_URL: amqp://guest:guest@rabbitmq
148 | TOXENV: py35-uvloop
149 | COVERALLS_REPO_TOKEN:
150 | from_secret: COVERALLS_TOKEN
151 | volumes:
152 | - name: cache
153 | path: /drone/src/.tox
154 |
155 | - name: notify
156 | image: drillster/drone-email
157 | settings:
158 | host:
159 | from_secret: SMTP_HOST
160 | username:
161 | from_secret: SMTP_USERNAME
162 | password:
163 | from_secret: SMTP_PASSWORD
164 | from:
165 | from_secret: SMTP_USERNAME
166 | when:
167 | status:
168 | - changed
169 | - failure
170 |
171 | volumes:
172 | - name: cache
173 | temp: {}
174 |
175 | services:
176 | - name: rabbitmq
177 | image: rabbitmq:3-alpine
178 | ---
179 | kind: signature
180 | hmac: 32a7f019710b16f795a6531ef6fab89d2ab24f50aaee729c3a7379a0dda472b0
181 |
182 | ...
183 |
--------------------------------------------------------------------------------
/.editorconfig:
--------------------------------------------------------------------------------
1 | root = true
2 |
3 | [*]
4 | end_of_line = lf
5 | insert_final_newline = true
6 | charset = utf-8
7 | trim_trailing_whitespace = true
8 |
9 | [*.{py,yml}]
10 | indent_style = space
11 | max_line_length = 79
12 |
13 | [*.py]
14 | indent_size = 4
15 |
16 | [*.rst]
17 | indent_size = 3
18 |
19 | [Makefile]
20 | indent_style = tab
21 |
22 | [*.yml]
23 | indent_size = 2
24 |
--------------------------------------------------------------------------------
/.github/workflows/docs.yml:
--------------------------------------------------------------------------------
1 | name: Deploy Documentation
2 |
3 | on:
4 | push:
5 | branches:
6 | - master
7 |
8 | jobs:
9 | build-and-deploy:
10 | runs-on: ubuntu-latest
11 |
12 | steps:
13 | - uses: actions/checkout@v2
14 | - name: Setup Python 3.12
15 | uses: actions/setup-python@v2
16 | with:
17 | python-version: "3.12"
18 | - name: Cache virtualenv
19 | id: venv-cache
20 | uses: actions/cache@v3
21 | with:
22 | path: .venv
23 | key: venv-${{ runner.os }}-${{ github.job }}-${{ github.ref }}-3.12
24 | restore-keys: |
25 | venv-${{ runner.os }}-${{ github.job }}-${{ github.ref }}-
26 | venv-${{ runner.os }}-${{ github.job }}-
27 | venv-${{ runner.os }}-
28 | - name: Install Poetry
29 | run: python -m pip install poetry
30 | - name: Cache Poetry and pip
31 | uses: actions/cache@v3
32 | with:
33 | path: |
34 | ~/.cache/pypoetry
35 | ~/.cache/pip
36 | key: poetry-pip-${{ runner.os }}-${{ hashFiles('**/poetry.lock') }}
37 | restore-keys: |
38 | poetry-pip-${{ runner.os }}-
39 | - name: Install Dependencies with Poetry
40 | run: poetry install --no-interaction --no-ansi
41 | - name: Build Documentation
42 | run: |
43 | cd docs
44 | poetry -C .. run make html
45 | - name: Install AWS CLI
46 | run: |
47 | sudo apt update
48 | sudo apt install -y awscli
49 | - name: Configure AWS CLI for Cloudflare R2
50 | run: |
51 | aws configure set aws_access_key_id ${{ secrets.CF_R2_ACCESS_KEY_ID }}
52 | aws configure set aws_secret_access_key ${{ secrets.CF_R2_SECRET_ACCESS_KEY }}
53 | aws configure set default.region us-east-1 # R2 uses us-east-1 by default
54 | aws configure set default.output json
55 | - name: Sync to Cloudflare R2
56 | env:
57 | CF_R2_ENDPOINT: ${{ secrets.CF_R2_ENDPOINT }}
58 | CF_R2_BUCKET_NAME: ${{ secrets.CF_R2_BUCKET_NAME }}
59 | run: |
60 | aws s3 sync docs/build/html s3://$CF_R2_BUCKET_NAME \
61 | --delete \
62 | --acl public-read \
63 | --endpoint-url $CF_R2_ENDPOINT
64 |
--------------------------------------------------------------------------------
/.github/workflows/tests.yml:
--------------------------------------------------------------------------------
1 | name: tests
2 |
3 |
4 | on:
5 | push:
6 | branches: [ master ]
7 | pull_request:
8 | branches: [ master ]
9 |
10 |
11 | jobs:
12 | pylama:
13 | runs-on: ubuntu-latest
14 | steps:
15 | - uses: actions/checkout@v2
16 | - name: Setup python3.10
17 | uses: actions/setup-python@v2
18 | with:
19 | python-version: "3.10"
20 | - name: Cache virtualenv
21 | id: venv-cache
22 | uses: actions/cache@v3
23 | with:
24 | path: .venv
25 | key: venv-${{ runner.os }}-${{ github.job }}-${{ github.ref }}
26 | - run: python -m pip install poetry
27 | - run: poetry install
28 | - run: poetry run pylama
29 | env:
30 | FORCE_COLOR: 1
31 | mypy:
32 | runs-on: ubuntu-latest
33 | steps:
34 | - uses: actions/checkout@v2
35 | - name: Setup python3.10
36 | uses: actions/setup-python@v2
37 | with:
38 | python-version: "3.10"
39 | - name: Cache virtualenv
40 | id: venv-cache
41 | uses: actions/cache@v3
42 | with:
43 | path: .venv
44 | key: venv-${{ runner.os }}-${{ github.job }}-${{ github.ref }}
45 | - run: python -m pip install poetry
46 | - run: poetry install
47 | - run: poetry run mypy
48 | env:
49 | FORCE_COLOR: 1
50 |
51 | tests:
52 | runs-on: ubuntu-latest
53 |
54 | strategy:
55 | fail-fast: false
56 |
57 | matrix:
58 | python:
59 | - '3.9'
60 | - '3.10'
61 | - '3.11'
62 | - '3.12'
63 | steps:
64 | - uses: actions/checkout@v2
65 | - name: Setup python${{ matrix.python }}
66 | uses: actions/setup-python@v2
67 | with:
68 | python-version: "${{ matrix.python }}"
69 | - name: Cache virtualenv
70 | id: venv-cache
71 | uses: actions/cache@v3
72 | with:
73 | path: .venv
74 | key: venv-${{ runner.os }}-${{ github.job }}-${{ github.ref }}-${{ matrix.python }}
75 | - run: python -m pip install poetry
76 | - run: poetry install --with=uvloop
77 | - name: pytest
78 | run: >-
79 | poetry run pytest \
80 | -vv \
81 | --cov=aio_pika \
82 | --cov-report=term-missing \
83 | --doctest-modules \
84 | --aiomisc-test-timeout=120 \
85 | tests
86 | env:
87 | FORCE_COLOR: 1
88 | - run: poetry run coveralls
89 | env:
90 | COVERALLS_PARALLEL: 'true'
91 | COVERALLS_SERVICE_NAME: github
92 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
93 |
94 | finish:
95 | needs:
96 | - tests
97 | runs-on: ubuntu-latest
98 | steps:
99 | - name: Coveralls Finished
100 | uses: coverallsapp/github-action@master
101 | with:
102 | github-token: ${{ secrets.github_token }}
103 | parallel-finished: true
104 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Created by .ignore support plugin (hsz.mobi)
2 | ### VirtualEnv template
3 | # Virtualenv
4 | # http://iamzed.com/2009/05/07/a-primer-on-virtualenv/
5 | .Python
6 | [Bb]in
7 | [Ii]nclude
8 | [Ll]ib
9 | [Ll]ib64
10 | [Ll]ocal
11 | [Ss]cripts
12 | pyvenv.cfg
13 | .venv
14 | pip-selfcheck.json
15 | ### IPythonNotebook template
16 | # Temporary data
17 | .ipynb_checkpoints/
18 | ### Python template
19 | # Byte-compiled / optimized / DLL files
20 | __pycache__/
21 | *.py[cod]
22 | *$py.class
23 |
24 | # C extensions
25 | *.so
26 |
27 | # Distribution / packaging
28 | .Python
29 | env/
30 | build/
31 | develop-eggs/
32 | dist/
33 | downloads/
34 | eggs/
35 | .eggs/
36 | lib/
37 | lib64/
38 | parts/
39 | sdist/
40 | var/
41 | *.egg-info/
42 | .installed.cfg
43 | *.egg
44 |
45 | # PyInstaller
46 | # Usually these files are written by a python script from a template
47 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
48 | *.manifest
49 | *.spec
50 |
51 | # Installer logs
52 | pip-log.txt
53 | pip-delete-this-directory.txt
54 |
55 | # Unit test / coverage reports
56 | htmlcov/
57 | .tox/
58 | .coverage
59 | .coverage.*
60 | .cache
61 | nosetests.xml
62 | coverage.xml
63 | *,cover
64 | .hypothesis/
65 |
66 | # Translations
67 | *.mo
68 | *.pot
69 |
70 | # Django stuff:
71 | *.log
72 | local_settings.py
73 |
74 | # Flask stuff:
75 | instance/
76 | .webassets-cache
77 |
78 | # Scrapy stuff:
79 | .scrapy
80 |
81 | # Sphinx documentation
82 | docs/_build/
83 | docs/source/apidoc
84 |
85 | # PyBuilder
86 | target/
87 |
88 | # IPython Notebook
89 | .ipynb_checkpoints
90 |
91 | # pyenv
92 | .python-version
93 |
94 | # pytest
95 | .pytest_cache
96 |
97 | # celery beat schedule file
98 | celerybeat-schedule
99 |
100 | # dotenv
101 | .env
102 |
103 | # virtualenv
104 | venv/
105 | ENV/
106 |
107 | # Spyder project settings
108 | .spyderproject
109 |
110 | # Rope project settings
111 | .ropeproject
112 | ### JetBrains template
113 | # Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio and Webstorm
114 | # Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
115 |
116 | # User-specific stuff:
117 | .idea/
118 | .vscode/
119 |
120 | ## File-based project format:
121 | *.iws
122 |
123 | ## Plugin-specific files:
124 |
125 | # IntelliJ
126 | /out/
127 |
128 | # mpeltonen/sbt-idea plugin
129 | .idea_modules/
130 |
131 | # JIRA plugin
132 | atlassian-ide-plugin.xml
133 |
134 | # Crashlytics plugin (for Android Studio and IntelliJ)
135 | com_crashlytics_export_strings.xml
136 | crashlytics.properties
137 | crashlytics-build.properties
138 | fabric.properties
139 |
140 | /htmlcov
141 | /temp
142 | .DS_Store
143 | .*cache
144 | .nox
145 |
--------------------------------------------------------------------------------
/MANIFEST.in:
--------------------------------------------------------------------------------
1 | recursive-exclude tests *
2 | recursive-exclude __pycache__ *
3 | exclude .*
4 |
5 | include README.rst
6 |
--------------------------------------------------------------------------------
/Makefile:
--------------------------------------------------------------------------------
1 | all: test
2 |
3 | RABBITMQ_IMAGE:=mosquito/aiormq-rabbitmq
4 |
5 | test:
6 | find . -name "*.pyc" -type f -delete
7 | tox
8 |
9 | rabbitmq:
10 | docker kill $(docker ps -f label=aio-pika.rabbitmq -q) || true
11 | docker pull $(RABBITMQ_IMAGE)
12 | docker run --rm -d \
13 | -l aio-pika.rabbitmq \
14 | -p 5671:5671 \
15 | -p 5672:5672 \
16 | -p 15671:15671 \
17 | -p 15672:15672 \
18 | $(RABBITMQ_IMAGE)
19 |
20 | upload:
21 | python3.7 setup.py sdist bdist_wheel
22 | twine upload dist/*$(shell python3 setup.py --version)*
23 |
--------------------------------------------------------------------------------
/aio_pika/__init__.py:
--------------------------------------------------------------------------------
1 | from . import abc, patterns, pool
2 | from .abc import DeliveryMode
3 | from .channel import Channel
4 | from .connection import Connection, connect
5 | from .exceptions import AMQPException, MessageProcessError
6 | from .exchange import Exchange, ExchangeType
7 | from .log import logger
8 | from .message import IncomingMessage, Message
9 | from .queue import Queue
10 | from .robust_channel import RobustChannel
11 | from .robust_connection import RobustConnection, connect_robust
12 | from .robust_exchange import RobustExchange
13 | from .robust_queue import RobustQueue
14 |
15 |
16 | from importlib.metadata import Distribution
17 | __version__ = Distribution.from_name("aio-pika").version
18 |
19 |
20 | __all__ = (
21 | "AMQPException",
22 | "Channel",
23 | "Connection",
24 | "DeliveryMode",
25 | "Exchange",
26 | "ExchangeType",
27 | "IncomingMessage",
28 | "Message",
29 | "MessageProcessError",
30 | "Queue",
31 | "RobustChannel",
32 | "RobustConnection",
33 | "RobustExchange",
34 | "RobustQueue",
35 | "__version__",
36 | "abc",
37 | "connect",
38 | "connect_robust",
39 | "logger",
40 | "patterns",
41 | "pool",
42 | )
43 |
--------------------------------------------------------------------------------
/aio_pika/exceptions.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 |
3 | import pamqp.exceptions
4 | from aiormq.exceptions import (
5 | AMQPChannelError, AMQPConnectionError, AMQPError, AMQPException,
6 | AuthenticationError, ChannelClosed, ChannelInvalidStateError,
7 | ChannelNotFoundEntity, ChannelPreconditionFailed, ConnectionClosed,
8 | DeliveryError, DuplicateConsumerTag, IncompatibleProtocolError,
9 | InvalidFrameError, MethodNotImplemented, ProbableAuthenticationError,
10 | ProtocolSyntaxError, PublishError,
11 | )
12 |
13 |
14 | CONNECTION_EXCEPTIONS = (
15 | AMQPError,
16 | ConnectionError,
17 | OSError,
18 | RuntimeError,
19 | StopAsyncIteration,
20 | pamqp.exceptions.PAMQPException,
21 | )
22 |
23 |
24 | class MessageProcessError(AMQPError):
25 | reason = "%s: %r"
26 |
27 |
28 | class QueueEmpty(AMQPError, asyncio.QueueEmpty):
29 | pass
30 |
31 |
32 | __all__ = (
33 | "AMQPChannelError",
34 | "AMQPConnectionError",
35 | "AMQPError",
36 | "AMQPException",
37 | "AuthenticationError",
38 | "CONNECTION_EXCEPTIONS",
39 | "ChannelClosed",
40 | "ChannelInvalidStateError",
41 | "ChannelNotFoundEntity",
42 | "ChannelPreconditionFailed",
43 | "ConnectionClosed",
44 | "DeliveryError",
45 | "DuplicateConsumerTag",
46 | "IncompatibleProtocolError",
47 | "InvalidFrameError",
48 | "MessageProcessError",
49 | "MethodNotImplemented",
50 | "ProbableAuthenticationError",
51 | "ProtocolSyntaxError",
52 | "PublishError",
53 | "QueueEmpty",
54 | )
55 |
--------------------------------------------------------------------------------
/aio_pika/exchange.py:
--------------------------------------------------------------------------------
1 | from typing import Optional, Union
2 |
3 | import aiormq
4 | from pamqp.common import Arguments
5 |
6 | from .abc import (
7 | AbstractChannel, AbstractExchange, AbstractMessage, ExchangeParamType,
8 | ExchangeType, TimeoutType, get_exchange_name,
9 | )
10 | from .log import get_logger
11 |
12 |
13 | log = get_logger(__name__)
14 |
15 |
16 | class Exchange(AbstractExchange):
17 | """ Exchange abstraction """
18 | channel: AbstractChannel
19 |
20 | def __init__(
21 | self,
22 | channel: AbstractChannel,
23 | name: str,
24 | type: Union[ExchangeType, str] = ExchangeType.DIRECT,
25 | *,
26 | auto_delete: bool = False,
27 | durable: bool = False,
28 | internal: bool = False,
29 | passive: bool = False,
30 | arguments: Arguments = None,
31 | ):
32 | self._type = type.value if isinstance(type, ExchangeType) else type
33 | self.channel = channel
34 | self.name = name
35 | self.auto_delete = auto_delete
36 | self.durable = durable
37 | self.internal = internal
38 | self.passive = passive
39 | self.arguments = arguments or {}
40 |
41 | def __str__(self) -> str:
42 | return self.name
43 |
44 | def __repr__(self) -> str:
45 | return (
46 | f"<{self.__class__.__name__}({self}):"
47 | f" auto_delete={self.auto_delete},"
48 | f" durable={self.durable},"
49 | f" arguments={self.arguments!r})>"
50 | )
51 |
52 | async def declare(
53 | self, timeout: TimeoutType = None,
54 | ) -> aiormq.spec.Exchange.DeclareOk:
55 | channel = await self.channel.get_underlay_channel()
56 | return await channel.exchange_declare(
57 | self.name,
58 | exchange_type=self._type,
59 | durable=self.durable,
60 | auto_delete=self.auto_delete,
61 | internal=self.internal,
62 | passive=self.passive,
63 | arguments=self.arguments,
64 | timeout=timeout,
65 | )
66 |
67 | async def bind(
68 | self,
69 | exchange: ExchangeParamType,
70 | routing_key: str = "",
71 | *,
72 | arguments: Arguments = None,
73 | timeout: TimeoutType = None,
74 | ) -> aiormq.spec.Exchange.BindOk:
75 |
76 | """ A binding can also be a relationship between two exchanges.
77 | This can be simply read as: this exchange is interested in messages
78 | from another exchange.
79 |
80 | Bindings can take an extra routing_key parameter. To avoid the confusion
81 | with a basic_publish parameter we're going to call it a binding key.
82 |
83 | .. code-block:: python
84 |
85 | client = await connect()
86 |
87 | routing_key = 'simple_routing_key'
88 | src_exchange_name = "source_exchange"
89 | dest_exchange_name = "destination_exchange"
90 |
91 | channel = await client.channel()
92 | src_exchange = await channel.declare_exchange(
93 | src_exchange_name, auto_delete=True
94 | )
95 | dest_exchange = await channel.declare_exchange(
96 | dest_exchange_name, auto_delete=True
97 | )
98 | queue = await channel.declare_queue(auto_delete=True)
99 |
100 | await queue.bind(dest_exchange, routing_key)
101 | await dest_exchange.bind(src_exchange, routing_key)
102 |
103 | :param exchange: :class:`aio_pika.exchange.Exchange` instance
104 | :param routing_key: routing key
105 | :param arguments: additional arguments
106 | :param timeout: execution timeout
107 | :return: :class:`None`
108 | """
109 |
110 | log.debug(
111 | "Binding exchange %r to exchange %r, routing_key=%r, arguments=%r",
112 | self,
113 | exchange,
114 | routing_key,
115 | arguments,
116 | )
117 |
118 | channel = await self.channel.get_underlay_channel()
119 | return await channel.exchange_bind(
120 | arguments=arguments,
121 | destination=self.name,
122 | routing_key=routing_key,
123 | source=get_exchange_name(exchange),
124 | timeout=timeout,
125 | )
126 |
127 | async def unbind(
128 | self,
129 | exchange: ExchangeParamType,
130 | routing_key: str = "",
131 | arguments: Arguments = None,
132 | timeout: TimeoutType = None,
133 | ) -> aiormq.spec.Exchange.UnbindOk:
134 |
135 | """ Remove exchange-to-exchange binding for this
136 | :class:`Exchange` instance
137 |
138 | :param exchange: :class:`aio_pika.exchange.Exchange` instance
139 | :param routing_key: routing key
140 | :param arguments: additional arguments
141 | :param timeout: execution timeout
142 | :return: :class:`None`
143 | """
144 |
145 | log.debug(
146 | "Unbinding exchange %r from exchange %r, "
147 | "routing_key=%r, arguments=%r",
148 | self,
149 | exchange,
150 | routing_key,
151 | arguments,
152 | )
153 |
154 | channel = await self.channel.get_underlay_channel()
155 | return await channel.exchange_unbind(
156 | arguments=arguments,
157 | destination=self.name,
158 | routing_key=routing_key,
159 | source=get_exchange_name(exchange),
160 | timeout=timeout,
161 | )
162 |
163 | async def publish(
164 | self,
165 | message: AbstractMessage,
166 | routing_key: str,
167 | *,
168 | mandatory: bool = True,
169 | immediate: bool = False,
170 | timeout: TimeoutType = None,
171 | ) -> Optional[aiormq.abc.ConfirmationFrameType]:
172 |
173 | """ Publish the message to the queue. `aio-pika` uses
174 | `publisher confirms`_ extension for message delivery.
175 |
176 | .. _publisher confirms: https://www.rabbitmq.com/confirms.html
177 |
178 | """
179 |
180 | log.debug(
181 | "Publishing message with routing key %r via exchange %r: %r",
182 | routing_key,
183 | self,
184 | message,
185 | )
186 |
187 | if self.internal:
188 | # Caught on the client side to prevent channel closure
189 | raise ValueError(
190 | f"Can not publish to internal exchange: '{self.name}'!",
191 | )
192 |
193 | if self.channel.is_closed:
194 | raise aiormq.exceptions.ChannelInvalidStateError(
195 | "%r closed" % self.channel,
196 | )
197 |
198 | channel = await self.channel.get_underlay_channel()
199 | return await channel.basic_publish(
200 | exchange=self.name,
201 | routing_key=routing_key,
202 | body=message.body,
203 | properties=message.properties,
204 | mandatory=mandatory,
205 | immediate=immediate,
206 | timeout=timeout,
207 | )
208 |
209 | async def delete(
210 | self, if_unused: bool = False, timeout: TimeoutType = None,
211 | ) -> aiormq.spec.Exchange.DeleteOk:
212 |
213 | """ Delete the queue
214 |
215 | :param timeout: operation timeout
216 | :param if_unused: perform deletion when queue has no bindings.
217 | """
218 |
219 | log.info("Deleting %r", self)
220 | channel = await self.channel.get_underlay_channel()
221 | result = await channel.exchange_delete(
222 | self.name, if_unused=if_unused, timeout=timeout,
223 | )
224 | del self.channel
225 | return result
226 |
227 |
228 | __all__ = ("Exchange", "ExchangeType", "ExchangeParamType")
229 |
--------------------------------------------------------------------------------
/aio_pika/log.py:
--------------------------------------------------------------------------------
1 | import logging
2 |
3 |
4 | logger: logging.Logger = logging.getLogger("aio_pika")
5 |
6 |
7 | def get_logger(name: str) -> logging.Logger:
8 | package, module = name.split(".", 1)
9 | if package == logger.name:
10 | name = module
11 | return logger.getChild(name)
12 |
--------------------------------------------------------------------------------
/aio_pika/patterns/__init__.py:
--------------------------------------------------------------------------------
1 | from .master import JsonMaster, Master, NackMessage, RejectMessage, Worker
2 | from .rpc import RPC, JsonRPC
3 |
4 |
5 | __all__ = (
6 | "Master",
7 | "NackMessage",
8 | "RejectMessage",
9 | "RPC",
10 | "Worker",
11 | "JsonMaster",
12 | "JsonRPC",
13 | )
14 |
--------------------------------------------------------------------------------
/aio_pika/patterns/base.py:
--------------------------------------------------------------------------------
1 | import pickle
2 | from typing import Any, Awaitable, Callable, TypeVar
3 |
4 |
5 | T = TypeVar("T")
6 | CallbackType = Callable[..., Awaitable[T]]
7 |
8 |
9 | class Method:
10 | __slots__ = (
11 | "name",
12 | "func",
13 | )
14 |
15 | def __init__(self, name: str, func: Callable[..., Any]):
16 | self.name = name
17 | self.func = func
18 |
19 | def __getattr__(self, item: str) -> "Method":
20 | return Method(".".join((self.name, item)), func=self.func)
21 |
22 | def __call__(self, **kwargs: Any) -> Any:
23 | return self.func(self.name, kwargs=kwargs)
24 |
25 |
26 | class Proxy:
27 | __slots__ = ("func",)
28 |
29 | def __init__(self, func: Callable[..., Any]):
30 | self.func = func
31 |
32 | def __getattr__(self, item: str) -> Method:
33 | return Method(item, self.func)
34 |
35 |
36 | class Base:
37 | __slots__ = ()
38 |
39 | SERIALIZER = pickle
40 | CONTENT_TYPE = "application/python-pickle"
41 |
42 | def serialize(self, data: Any) -> bytes:
43 | """ Serialize data to the bytes.
44 | Uses `pickle` by default.
45 | You should overlap this method when you want to change serializer
46 |
47 | :param data: Data which will be serialized
48 | """
49 | return self.SERIALIZER.dumps(data)
50 |
51 | def deserialize(self, data: bytes) -> Any:
52 | """ Deserialize data from bytes.
53 | Uses `pickle` by default.
54 | You should overlap this method when you want to change serializer
55 |
56 | :param data: Data which will be deserialized
57 | """
58 | return self.SERIALIZER.loads(data)
59 |
--------------------------------------------------------------------------------
/aio_pika/patterns/master.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 | import gzip
3 | import json
4 | import logging
5 | from functools import partial
6 | from types import MappingProxyType
7 | from typing import Any, Awaitable, Mapping, Optional
8 |
9 | import aiormq
10 |
11 | from aio_pika.abc import (
12 | AbstractChannel, AbstractExchange, AbstractIncomingMessage, AbstractQueue,
13 | ConsumerTag, DeliveryMode,
14 | )
15 | from aio_pika.message import Message
16 |
17 | from ..tools import create_task, ensure_awaitable
18 | from .base import Base, CallbackType, Proxy, T
19 |
20 |
21 | log = logging.getLogger(__name__)
22 |
23 |
24 | class MessageProcessingError(Exception):
25 | pass
26 |
27 |
28 | class NackMessage(MessageProcessingError):
29 | def __init__(self, requeue: bool = False):
30 | self.requeue = requeue
31 |
32 |
33 | class RejectMessage(MessageProcessingError):
34 | def __init__(self, requeue: bool = False):
35 | self.requeue = requeue
36 |
37 |
38 | class Worker:
39 | __slots__ = (
40 | "queue",
41 | "consumer_tag",
42 | "loop",
43 | )
44 |
45 | def __init__(
46 | self, queue: AbstractQueue, consumer_tag: ConsumerTag,
47 | loop: asyncio.AbstractEventLoop,
48 | ):
49 | self.queue = queue
50 | self.consumer_tag = consumer_tag
51 | self.loop = loop
52 |
53 | def close(self) -> Awaitable[None]:
54 | """ Cancel subscription to the channel
55 |
56 | :return: :class:`asyncio.Task`
57 | """
58 |
59 | async def closer() -> None:
60 | await self.queue.cancel(self.consumer_tag)
61 |
62 | return create_task(closer)
63 |
64 |
65 | class Master(Base):
66 | __slots__ = (
67 | "channel",
68 | "loop",
69 | "proxy",
70 | "_requeue",
71 | "_reject_on_redelivered",
72 | )
73 |
74 | DELIVERY_MODE = DeliveryMode.PERSISTENT
75 |
76 | __doc__ = """
77 | Implements Master/Worker pattern.
78 | Usage example:
79 |
80 | `worker.py` ::
81 |
82 | master = Master(channel)
83 | worker = await master.create_worker('test_worker', lambda x: print(x))
84 |
85 | `master.py` ::
86 |
87 | master = Master(channel)
88 | await master.proxy.test_worker('foo')
89 | """
90 |
91 | def __init__(
92 | self,
93 | channel: AbstractChannel,
94 | requeue: bool = True,
95 | reject_on_redelivered: bool = False,
96 | ):
97 | """ Creates a new :class:`Master` instance.
98 |
99 | :param channel: Initialized instance of :class:`aio_pika.Channel`
100 | """
101 | self.channel: AbstractChannel = channel
102 | self.loop: asyncio.AbstractEventLoop = asyncio.get_event_loop()
103 | self.proxy = Proxy(self.create_task)
104 |
105 | self.channel.return_callbacks.add(self.on_message_returned)
106 |
107 | self._requeue = requeue
108 | self._reject_on_redelivered = reject_on_redelivered
109 |
110 | @property
111 | def exchange(self) -> AbstractExchange:
112 | return self.channel.default_exchange
113 |
114 | @staticmethod
115 | def on_message_returned(
116 | channel: Optional[AbstractChannel],
117 | message: AbstractIncomingMessage,
118 | ) -> None:
119 | log.warning(
120 | "Message returned. Probably destination queue does not exists: %r",
121 | message,
122 | )
123 |
124 | def serialize(self, data: Any) -> bytes:
125 | """ Serialize data to the bytes.
126 | Uses `pickle` by default.
127 | You should overlap this method when you want to change serializer
128 |
129 | :param data: Data which will be serialized
130 | :returns: bytes
131 | """
132 | return super().serialize(data)
133 |
134 | def deserialize(self, data: bytes) -> Any:
135 | """ Deserialize data from bytes.
136 | Uses `pickle` by default.
137 | You should overlap this method when you want to change serializer
138 |
139 | :param data: Data which will be deserialized
140 | :returns: :class:`Any`
141 | """
142 | return super().deserialize(data)
143 |
144 | @classmethod
145 | async def execute(
146 | cls, func: CallbackType, kwargs: Any,
147 | ) -> T:
148 | kwargs = kwargs or {}
149 |
150 | if not isinstance(kwargs, dict):
151 | logging.error("Bad kwargs %r received for the %r", kwargs, func)
152 | raise RejectMessage(requeue=False)
153 |
154 | return await func(**kwargs)
155 |
156 | async def on_message(
157 | self, func: CallbackType,
158 | message: AbstractIncomingMessage,
159 | ) -> None:
160 | async with message.process(
161 | requeue=self._requeue,
162 | reject_on_redelivered=self._reject_on_redelivered,
163 | ignore_processed=True,
164 | ):
165 | try:
166 | await self.execute(func, self.deserialize(message.body))
167 | except RejectMessage as e:
168 | await message.reject(requeue=e.requeue)
169 | except NackMessage as e:
170 | await message.nack(requeue=e.requeue)
171 |
172 | async def create_queue(
173 | self, queue_name: str, **kwargs: Any,
174 | ) -> AbstractQueue:
175 | return await self.channel.declare_queue(queue_name, **kwargs)
176 |
177 | async def create_worker(
178 | self, queue_name: str,
179 | func: CallbackType,
180 | **kwargs: Any,
181 | ) -> Worker:
182 | """ Creates a new :class:`Worker` instance. """
183 | queue = await self.create_queue(queue_name, **kwargs)
184 | consumer_tag = await queue.consume(
185 | partial(self.on_message, ensure_awaitable(func)),
186 | )
187 |
188 | return Worker(queue, consumer_tag, self.loop)
189 |
190 | async def create_task(
191 | self, channel_name: str,
192 | kwargs: Mapping[str, Any] = MappingProxyType({}),
193 | **message_kwargs: Any,
194 | ) -> Optional[aiormq.abc.ConfirmationFrameType]:
195 |
196 | """ Creates a new task for the worker """
197 | message = Message(
198 | body=self.serialize(kwargs),
199 | content_type=self.CONTENT_TYPE,
200 | delivery_mode=self.DELIVERY_MODE,
201 | **message_kwargs,
202 | )
203 |
204 | return await self.exchange.publish(
205 | message, channel_name, mandatory=True,
206 | )
207 |
208 |
209 | class JsonMaster(Master):
210 | SERIALIZER = json
211 | CONTENT_TYPE = "application/json"
212 |
213 | def serialize(self, data: Any) -> bytes:
214 | return self.SERIALIZER.dumps(data, ensure_ascii=False).encode()
215 |
216 |
217 | class CompressedJsonMaster(Master):
218 | SERIALIZER = json
219 | CONTENT_TYPE = "application/json;compression=gzip"
220 | COMPRESS_LEVEL = 6
221 |
222 | def serialize(self, data: Any) -> bytes:
223 | return gzip.compress(
224 | self.SERIALIZER.dumps(data, ensure_ascii=False).encode(),
225 | compresslevel=self.COMPRESS_LEVEL,
226 | )
227 |
228 | def deserialize(self, data: bytes) -> Any:
229 | return self.SERIALIZER.loads(gzip.decompress(data))
230 |
--------------------------------------------------------------------------------
/aio_pika/pool.py:
--------------------------------------------------------------------------------
1 | import abc
2 | import asyncio
3 | from types import TracebackType
4 | from typing import (
5 | Any, AsyncContextManager, Awaitable, Callable, Generic, Optional, Set,
6 | Tuple, Type, TypeVar,
7 | )
8 |
9 | from aio_pika.log import get_logger
10 | from aio_pika.tools import create_task
11 |
12 |
13 | log = get_logger(__name__)
14 |
15 |
16 | class PoolInstance(abc.ABC):
17 | @abc.abstractmethod
18 | def close(self) -> Awaitable[None]:
19 | raise NotImplementedError
20 |
21 |
22 | T = TypeVar("T")
23 | ConstructorType = Callable[
24 | ...,
25 | Awaitable[PoolInstance],
26 | ]
27 |
28 |
29 | class PoolInvalidStateError(RuntimeError):
30 | pass
31 |
32 |
33 | class Pool(Generic[T]):
34 | __slots__ = (
35 | "loop",
36 | "__max_size",
37 | "__items",
38 | "__constructor",
39 | "__created",
40 | "__lock",
41 | "__constructor_args",
42 | "__item_set",
43 | "__closed",
44 | )
45 |
46 | def __init__(
47 | self,
48 | constructor: ConstructorType,
49 | *args: Any,
50 | max_size: Optional[int] = None,
51 | loop: Optional[asyncio.AbstractEventLoop] = None,
52 | ):
53 | self.loop = loop or asyncio.get_event_loop()
54 | self.__closed = False
55 | self.__constructor: Callable[..., Awaitable[Any]] = constructor
56 | self.__constructor_args: Tuple[Any, ...] = args or ()
57 | self.__created: int = 0
58 | self.__item_set: Set[PoolInstance] = set()
59 | self.__items: asyncio.Queue = asyncio.Queue()
60 | self.__lock: asyncio.Lock = asyncio.Lock()
61 | self.__max_size: Optional[int] = max_size
62 |
63 | @property
64 | def is_closed(self) -> bool:
65 | return self.__closed
66 |
67 | def acquire(self) -> "PoolItemContextManager[T]":
68 | if self.__closed:
69 | raise PoolInvalidStateError("acquire operation on closed pool")
70 |
71 | return PoolItemContextManager[T](self)
72 |
73 | @property
74 | def _has_released(self) -> bool:
75 | return self.__items.qsize() > 0
76 |
77 | @property
78 | def _is_overflow(self) -> bool:
79 | if self.__max_size:
80 | return self.__created >= self.__max_size or self._has_released
81 | return self._has_released
82 |
83 | async def _create_item(self) -> T:
84 | if self.__closed:
85 | raise PoolInvalidStateError("create item operation on closed pool")
86 |
87 | async with self.__lock:
88 | if self._is_overflow:
89 | return await self.__items.get()
90 |
91 | log.debug("Creating a new instance of %r", self.__constructor)
92 | item = await self.__constructor(*self.__constructor_args)
93 | self.__created += 1
94 | self.__item_set.add(item)
95 | return item
96 |
97 | async def _get(self) -> T:
98 | if self.__closed:
99 | raise PoolInvalidStateError("get operation on closed pool")
100 |
101 | if self._is_overflow:
102 | return await self.__items.get()
103 |
104 | return await self._create_item()
105 |
106 | def put(self, item: T) -> None:
107 | if self.__closed:
108 | raise PoolInvalidStateError("put operation on closed pool")
109 |
110 | self.__items.put_nowait(item)
111 |
112 | async def close(self) -> None:
113 | async with self.__lock:
114 | self.__closed = True
115 | tasks = []
116 |
117 | for item in self.__item_set:
118 | tasks.append(create_task(item.close))
119 |
120 | if tasks:
121 | await asyncio.gather(*tasks, return_exceptions=True)
122 |
123 | async def __aenter__(self) -> "Pool":
124 | return self
125 |
126 | async def __aexit__(
127 | self,
128 | exc_type: Optional[Type[BaseException]],
129 | exc_val: Optional[BaseException],
130 | exc_tb: Optional[TracebackType],
131 | ) -> None:
132 | if self.__closed:
133 | return
134 |
135 | await asyncio.ensure_future(self.close())
136 |
137 |
138 | class PoolItemContextManager(Generic[T], AsyncContextManager):
139 | __slots__ = "pool", "item"
140 |
141 | def __init__(self, pool: Pool):
142 | self.pool = pool
143 | self.item: T
144 |
145 | async def __aenter__(self) -> T:
146 | # noinspection PyProtectedMember
147 | self.item = await self.pool._get()
148 | return self.item
149 |
150 | async def __aexit__(
151 | self,
152 | exc_type: Optional[Type[BaseException]],
153 | exc_val: Optional[BaseException],
154 | exc_tb: Optional[TracebackType],
155 | ) -> None:
156 | if self.item is not None:
157 | self.pool.put(self.item)
158 |
--------------------------------------------------------------------------------
/aio_pika/py.typed:
--------------------------------------------------------------------------------
1 |
2 |
--------------------------------------------------------------------------------
/aio_pika/robust_exchange.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 | import warnings
3 | from typing import Any, Dict, Union
4 |
5 | import aiormq
6 | from pamqp.common import Arguments
7 |
8 | from .abc import (
9 | AbstractChannel, AbstractExchange, AbstractRobustExchange,
10 | ExchangeParamType, TimeoutType,
11 | )
12 | from .exchange import Exchange, ExchangeType
13 | from .log import get_logger
14 |
15 |
16 | log = get_logger(__name__)
17 |
18 |
19 | class RobustExchange(Exchange, AbstractRobustExchange):
20 | """ Exchange abstraction """
21 |
22 | _bindings: Dict[Union[AbstractExchange, str], Dict[str, Any]]
23 |
24 | def __init__(
25 | self,
26 | channel: AbstractChannel,
27 | name: str,
28 | type: Union[ExchangeType, str] = ExchangeType.DIRECT,
29 | *,
30 | auto_delete: bool = False,
31 | durable: bool = False,
32 | internal: bool = False,
33 | passive: bool = False,
34 | arguments: Arguments = None,
35 | ):
36 | super().__init__(
37 | channel=channel,
38 | name=name,
39 | type=type,
40 | auto_delete=auto_delete,
41 | durable=durable,
42 | internal=internal,
43 | passive=passive,
44 | arguments=arguments,
45 | )
46 |
47 | self._bindings = {}
48 | self.__restore_lock = asyncio.Lock()
49 |
50 | async def restore(self, channel: Any = None) -> None:
51 | if channel is not None:
52 | warnings.warn(
53 | "Channel argument will be ignored because you "
54 | "don't need to pass this anymore.",
55 | DeprecationWarning,
56 | )
57 | async with self.__restore_lock:
58 | try:
59 | # special case for default exchange
60 | if self.name == "":
61 | return
62 |
63 | await self.declare()
64 |
65 | for exchange, kwargs in tuple(self._bindings.items()):
66 | await self.bind(exchange, **kwargs)
67 | except Exception:
68 | raise
69 |
70 | async def bind(
71 | self,
72 | exchange: ExchangeParamType,
73 | routing_key: str = "",
74 | *,
75 | arguments: Arguments = None,
76 | timeout: TimeoutType = None,
77 | robust: bool = True,
78 | ) -> aiormq.spec.Exchange.BindOk:
79 | result = await super().bind(
80 | exchange,
81 | routing_key=routing_key,
82 | arguments=arguments,
83 | timeout=timeout,
84 | )
85 |
86 | if robust:
87 | self._bindings[exchange] = dict(
88 | routing_key=routing_key,
89 | arguments=arguments,
90 | )
91 |
92 | return result
93 |
94 | async def unbind(
95 | self,
96 | exchange: ExchangeParamType,
97 | routing_key: str = "",
98 | arguments: Arguments = None,
99 | timeout: TimeoutType = None,
100 | ) -> aiormq.spec.Exchange.UnbindOk:
101 | result = await super().unbind(
102 | exchange, routing_key, arguments=arguments, timeout=timeout,
103 | )
104 | self._bindings.pop(exchange, None)
105 | return result
106 |
107 |
108 | __all__ = ("RobustExchange",)
109 |
--------------------------------------------------------------------------------
/aio_pika/robust_queue.py:
--------------------------------------------------------------------------------
1 | import uuid
2 | import warnings
3 | from typing import Any, Awaitable, Callable, Dict, Optional, Tuple, Union
4 |
5 | import aiormq
6 | from aiormq import ChannelInvalidStateError
7 | from pamqp.common import Arguments
8 |
9 | from .abc import (
10 | AbstractChannel, AbstractExchange, AbstractIncomingMessage,
11 | AbstractQueueIterator, AbstractRobustQueue, ConsumerTag, TimeoutType,
12 | )
13 | from .exchange import ExchangeParamType
14 | from .log import get_logger
15 | from .queue import Queue, QueueIterator
16 |
17 |
18 | log = get_logger(__name__)
19 |
20 |
21 | class RobustQueue(Queue, AbstractRobustQueue):
22 | __slots__ = ("_consumers", "_bindings")
23 |
24 | _consumers: Dict[ConsumerTag, Dict[str, Any]]
25 | _bindings: Dict[Tuple[Union[AbstractExchange, str], str], Dict[str, Any]]
26 |
27 | def __init__(
28 | self,
29 | channel: AbstractChannel,
30 | name: Optional[str],
31 | durable: bool = False,
32 | exclusive: bool = False,
33 | auto_delete: bool = False,
34 | arguments: Arguments = None,
35 | passive: bool = False,
36 | ):
37 |
38 | super().__init__(
39 | channel=channel,
40 | name=name or f"amq_{uuid.uuid4().hex}",
41 | durable=durable,
42 | exclusive=exclusive,
43 | auto_delete=auto_delete,
44 | arguments=arguments,
45 | passive=passive,
46 | )
47 |
48 | self._consumers = {}
49 | self._bindings = {}
50 |
51 | async def restore(self, channel: Any = None) -> None:
52 | if channel is not None:
53 | warnings.warn(
54 | "Channel argument will be ignored because you "
55 | "don't need to pass this anymore.",
56 | DeprecationWarning,
57 | )
58 |
59 | await self.declare()
60 | bindings = tuple(self._bindings.items())
61 | consumers = tuple(self._consumers.items())
62 |
63 | for (exchange, routing_key), kwargs in bindings:
64 | await self.bind(exchange, routing_key, **kwargs)
65 |
66 | for consumer_tag, kwargs in consumers:
67 | await self.consume(consumer_tag=consumer_tag, **kwargs)
68 |
69 | async def bind(
70 | self,
71 | exchange: ExchangeParamType,
72 | routing_key: Optional[str] = None,
73 | *,
74 | arguments: Arguments = None,
75 | timeout: TimeoutType = None,
76 | robust: bool = True,
77 | ) -> aiormq.spec.Queue.BindOk:
78 | if routing_key is None:
79 | routing_key = self.name
80 |
81 | result = await super().bind(
82 | exchange=exchange, routing_key=routing_key,
83 | arguments=arguments, timeout=timeout,
84 | )
85 |
86 | if robust:
87 | self._bindings[(exchange, routing_key)] = dict(
88 | arguments=arguments,
89 | )
90 |
91 | return result
92 |
93 | async def unbind(
94 | self,
95 | exchange: ExchangeParamType,
96 | routing_key: Optional[str] = None,
97 | arguments: Arguments = None,
98 | timeout: TimeoutType = None,
99 | ) -> aiormq.spec.Queue.UnbindOk:
100 | if routing_key is None:
101 | routing_key = self.name
102 |
103 | result = await super().unbind(
104 | exchange, routing_key, arguments, timeout,
105 | )
106 | self._bindings.pop((exchange, routing_key), None)
107 |
108 | return result
109 |
110 | async def consume(
111 | self,
112 | callback: Callable[[AbstractIncomingMessage], Awaitable[Any]],
113 | no_ack: bool = False,
114 | exclusive: bool = False,
115 | arguments: Arguments = None,
116 | consumer_tag: Optional[ConsumerTag] = None,
117 | timeout: TimeoutType = None,
118 | robust: bool = True,
119 | ) -> ConsumerTag:
120 | consumer_tag = await super().consume(
121 | consumer_tag=consumer_tag,
122 | timeout=timeout,
123 | callback=callback,
124 | no_ack=no_ack,
125 | exclusive=exclusive,
126 | arguments=arguments,
127 | )
128 |
129 | if robust:
130 | self._consumers[consumer_tag] = dict(
131 | callback=callback,
132 | no_ack=no_ack,
133 | exclusive=exclusive,
134 | arguments=arguments,
135 | )
136 |
137 | return consumer_tag
138 |
139 | async def cancel(
140 | self,
141 | consumer_tag: ConsumerTag,
142 | timeout: TimeoutType = None,
143 | nowait: bool = False,
144 | ) -> aiormq.spec.Basic.CancelOk:
145 | result = await super().cancel(consumer_tag, timeout, nowait)
146 | self._consumers.pop(consumer_tag, None)
147 | return result
148 |
149 | def iterator(self, **kwargs: Any) -> AbstractQueueIterator:
150 | return RobustQueueIterator(self, **kwargs)
151 |
152 |
153 | class RobustQueueIterator(QueueIterator):
154 | def __init__(self, queue: Queue, **kwargs: Any):
155 | super().__init__(queue, **kwargs)
156 |
157 | self._amqp_queue.close_callbacks.discard(self._set_closed)
158 |
159 | async def consume(self) -> None:
160 | while True:
161 | try:
162 | return await super().consume()
163 | except ChannelInvalidStateError:
164 | await self._amqp_queue.channel.get_underlay_channel()
165 |
166 |
167 | __all__ = ("RobustQueue",)
168 |
--------------------------------------------------------------------------------
/aio_pika/transaction.py:
--------------------------------------------------------------------------------
1 | from types import TracebackType
2 | from typing import Optional, Type
3 |
4 | import aiormq
5 | from pamqp import commands
6 |
7 | from .abc import (
8 | AbstractChannel, AbstractTransaction, TimeoutType, TransactionState,
9 | )
10 |
11 |
12 | class Transaction(AbstractTransaction):
13 | def __repr__(self) -> str:
14 | return f"<{self.__class__.__name__} {self.state.value}>"
15 |
16 | def __str__(self) -> str:
17 | return self.state.value
18 |
19 | def __init__(self, channel: AbstractChannel):
20 | self.__channel = channel
21 | self.state: TransactionState = TransactionState.CREATED
22 |
23 | @property
24 | def channel(self) -> AbstractChannel:
25 | if self.__channel is None:
26 | raise RuntimeError("Channel not opened")
27 |
28 | if self.__channel.is_closed:
29 | raise RuntimeError("Closed channel")
30 |
31 | return self.__channel
32 |
33 | async def select(
34 | self, timeout: TimeoutType = None,
35 | ) -> aiormq.spec.Tx.SelectOk:
36 | channel = await self.channel.get_underlay_channel()
37 | result = await channel.tx_select(timeout=timeout)
38 |
39 | self.state = TransactionState.STARTED
40 | return result
41 |
42 | async def rollback(
43 | self, timeout: TimeoutType = None,
44 | ) -> commands.Tx.RollbackOk:
45 | channel = await self.channel.get_underlay_channel()
46 | result = await channel.tx_rollback(timeout=timeout)
47 | self.state = TransactionState.ROLLED_BACK
48 | return result
49 |
50 | async def commit(
51 | self, timeout: TimeoutType = None,
52 | ) -> commands.Tx.CommitOk:
53 | channel = await self.channel.get_underlay_channel()
54 | result = await channel.tx_commit(timeout=timeout)
55 | self.state = TransactionState.COMMITED
56 | return result
57 |
58 | async def __aenter__(self) -> "Transaction":
59 | await self.select()
60 | return self
61 |
62 | async def __aexit__(
63 | self,
64 | exc_type: Optional[Type[BaseException]],
65 | exc_val: Optional[BaseException],
66 | exc_tb: Optional[TracebackType],
67 | ) -> None:
68 | if exc_type:
69 | await self.rollback()
70 | else:
71 | await self.commit()
72 |
--------------------------------------------------------------------------------
/docs/Makefile:
--------------------------------------------------------------------------------
1 | # Minimal makefile for Sphinx documentation
2 | #
3 |
4 | # You can set these variables from the command line.
5 | SPHINXOPTS =
6 | SPHINXBUILD = sphinx-build
7 | SPHINXPROJ = aio-pika
8 | SOURCEDIR = source
9 | BUILDDIR = build
10 |
11 | # Put it first so that "make" without argument is like "make help".
12 | help:
13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
14 |
15 | .PHONY: help Makefile
16 |
17 | # Catch-all target: route all unknown targets to Sphinx using the new
18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
19 | %: Makefile
20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
--------------------------------------------------------------------------------
/docs/requirements.txt:
--------------------------------------------------------------------------------
1 | # ONLY FOR ReadTheDocs
2 | autodoc
3 | furo
4 |
--------------------------------------------------------------------------------
/docs/source/_static/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mosquito/aio-pika/c281314c6721e6a90389a8e170052ab36e5b3741/docs/source/_static/.DS_Store
--------------------------------------------------------------------------------
/docs/source/_static/custom.css:
--------------------------------------------------------------------------------
1 | @import url('https://fonts.googleapis.com/css?family=Roboto|Roboto+Condensed|Roboto+Mono|Roboto+Slab');
2 |
3 | h1.logo {
4 | text-align: center !important;
5 | }
6 |
--------------------------------------------------------------------------------
/docs/source/_static/favicon/android-icon-144x144.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mosquito/aio-pika/c281314c6721e6a90389a8e170052ab36e5b3741/docs/source/_static/favicon/android-icon-144x144.png
--------------------------------------------------------------------------------
/docs/source/_static/favicon/android-icon-192x192.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mosquito/aio-pika/c281314c6721e6a90389a8e170052ab36e5b3741/docs/source/_static/favicon/android-icon-192x192.png
--------------------------------------------------------------------------------
/docs/source/_static/favicon/android-icon-36x36.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mosquito/aio-pika/c281314c6721e6a90389a8e170052ab36e5b3741/docs/source/_static/favicon/android-icon-36x36.png
--------------------------------------------------------------------------------
/docs/source/_static/favicon/android-icon-48x48.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mosquito/aio-pika/c281314c6721e6a90389a8e170052ab36e5b3741/docs/source/_static/favicon/android-icon-48x48.png
--------------------------------------------------------------------------------
/docs/source/_static/favicon/android-icon-72x72.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mosquito/aio-pika/c281314c6721e6a90389a8e170052ab36e5b3741/docs/source/_static/favicon/android-icon-72x72.png
--------------------------------------------------------------------------------
/docs/source/_static/favicon/android-icon-96x96.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mosquito/aio-pika/c281314c6721e6a90389a8e170052ab36e5b3741/docs/source/_static/favicon/android-icon-96x96.png
--------------------------------------------------------------------------------
/docs/source/_static/favicon/apple-icon-114x114.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mosquito/aio-pika/c281314c6721e6a90389a8e170052ab36e5b3741/docs/source/_static/favicon/apple-icon-114x114.png
--------------------------------------------------------------------------------
/docs/source/_static/favicon/apple-icon-120x120.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mosquito/aio-pika/c281314c6721e6a90389a8e170052ab36e5b3741/docs/source/_static/favicon/apple-icon-120x120.png
--------------------------------------------------------------------------------
/docs/source/_static/favicon/apple-icon-144x144.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mosquito/aio-pika/c281314c6721e6a90389a8e170052ab36e5b3741/docs/source/_static/favicon/apple-icon-144x144.png
--------------------------------------------------------------------------------
/docs/source/_static/favicon/apple-icon-152x152.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mosquito/aio-pika/c281314c6721e6a90389a8e170052ab36e5b3741/docs/source/_static/favicon/apple-icon-152x152.png
--------------------------------------------------------------------------------
/docs/source/_static/favicon/apple-icon-180x180.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mosquito/aio-pika/c281314c6721e6a90389a8e170052ab36e5b3741/docs/source/_static/favicon/apple-icon-180x180.png
--------------------------------------------------------------------------------
/docs/source/_static/favicon/apple-icon-57x57.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mosquito/aio-pika/c281314c6721e6a90389a8e170052ab36e5b3741/docs/source/_static/favicon/apple-icon-57x57.png
--------------------------------------------------------------------------------
/docs/source/_static/favicon/apple-icon-60x60.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mosquito/aio-pika/c281314c6721e6a90389a8e170052ab36e5b3741/docs/source/_static/favicon/apple-icon-60x60.png
--------------------------------------------------------------------------------
/docs/source/_static/favicon/apple-icon-72x72.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mosquito/aio-pika/c281314c6721e6a90389a8e170052ab36e5b3741/docs/source/_static/favicon/apple-icon-72x72.png
--------------------------------------------------------------------------------
/docs/source/_static/favicon/apple-icon-76x76.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mosquito/aio-pika/c281314c6721e6a90389a8e170052ab36e5b3741/docs/source/_static/favicon/apple-icon-76x76.png
--------------------------------------------------------------------------------
/docs/source/_static/favicon/apple-icon-precomposed.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mosquito/aio-pika/c281314c6721e6a90389a8e170052ab36e5b3741/docs/source/_static/favicon/apple-icon-precomposed.png
--------------------------------------------------------------------------------
/docs/source/_static/favicon/apple-icon.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mosquito/aio-pika/c281314c6721e6a90389a8e170052ab36e5b3741/docs/source/_static/favicon/apple-icon.png
--------------------------------------------------------------------------------
/docs/source/_static/favicon/browserconfig.xml:
--------------------------------------------------------------------------------
1 |
2 | #ffffff
--------------------------------------------------------------------------------
/docs/source/_static/favicon/favicon-16x16.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mosquito/aio-pika/c281314c6721e6a90389a8e170052ab36e5b3741/docs/source/_static/favicon/favicon-16x16.png
--------------------------------------------------------------------------------
/docs/source/_static/favicon/favicon-32x32.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mosquito/aio-pika/c281314c6721e6a90389a8e170052ab36e5b3741/docs/source/_static/favicon/favicon-32x32.png
--------------------------------------------------------------------------------
/docs/source/_static/favicon/favicon-96x96.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mosquito/aio-pika/c281314c6721e6a90389a8e170052ab36e5b3741/docs/source/_static/favicon/favicon-96x96.png
--------------------------------------------------------------------------------
/docs/source/_static/favicon/favicon.ico:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mosquito/aio-pika/c281314c6721e6a90389a8e170052ab36e5b3741/docs/source/_static/favicon/favicon.ico
--------------------------------------------------------------------------------
/docs/source/_static/favicon/manifest.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "App",
3 | "icons": [
4 | {
5 | "src": "\/android-icon-36x36.png",
6 | "sizes": "36x36",
7 | "type": "image\/png",
8 | "density": "0.75"
9 | },
10 | {
11 | "src": "\/android-icon-48x48.png",
12 | "sizes": "48x48",
13 | "type": "image\/png",
14 | "density": "1.0"
15 | },
16 | {
17 | "src": "\/android-icon-72x72.png",
18 | "sizes": "72x72",
19 | "type": "image\/png",
20 | "density": "1.5"
21 | },
22 | {
23 | "src": "\/android-icon-96x96.png",
24 | "sizes": "96x96",
25 | "type": "image\/png",
26 | "density": "2.0"
27 | },
28 | {
29 | "src": "\/android-icon-144x144.png",
30 | "sizes": "144x144",
31 | "type": "image\/png",
32 | "density": "3.0"
33 | },
34 | {
35 | "src": "\/android-icon-192x192.png",
36 | "sizes": "192x192",
37 | "type": "image\/png",
38 | "density": "4.0"
39 | }
40 | ]
41 | }
--------------------------------------------------------------------------------
/docs/source/_static/favicon/ms-icon-144x144.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mosquito/aio-pika/c281314c6721e6a90389a8e170052ab36e5b3741/docs/source/_static/favicon/ms-icon-144x144.png
--------------------------------------------------------------------------------
/docs/source/_static/favicon/ms-icon-150x150.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mosquito/aio-pika/c281314c6721e6a90389a8e170052ab36e5b3741/docs/source/_static/favicon/ms-icon-150x150.png
--------------------------------------------------------------------------------
/docs/source/_static/favicon/ms-icon-310x310.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mosquito/aio-pika/c281314c6721e6a90389a8e170052ab36e5b3741/docs/source/_static/favicon/ms-icon-310x310.png
--------------------------------------------------------------------------------
/docs/source/_static/favicon/ms-icon-70x70.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mosquito/aio-pika/c281314c6721e6a90389a8e170052ab36e5b3741/docs/source/_static/favicon/ms-icon-70x70.png
--------------------------------------------------------------------------------
/docs/source/_static/icon.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mosquito/aio-pika/c281314c6721e6a90389a8e170052ab36e5b3741/docs/source/_static/icon.png
--------------------------------------------------------------------------------
/docs/source/_static/logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mosquito/aio-pika/c281314c6721e6a90389a8e170052ab36e5b3741/docs/source/_static/logo.png
--------------------------------------------------------------------------------
/docs/source/_static/logo2x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mosquito/aio-pika/c281314c6721e6a90389a8e170052ab36e5b3741/docs/source/_static/logo2x.png
--------------------------------------------------------------------------------
/docs/source/_static/tutorial/bindings.svg:
--------------------------------------------------------------------------------
1 |
2 |
3 |
--------------------------------------------------------------------------------
/docs/source/_static/tutorial/consumer.svg:
--------------------------------------------------------------------------------
1 |
2 |
3 |
--------------------------------------------------------------------------------
/docs/source/_static/tutorial/exchanges.svg:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
--------------------------------------------------------------------------------
/docs/source/_static/tutorial/prefetch-count.svg:
--------------------------------------------------------------------------------
1 |
2 |
3 |
--------------------------------------------------------------------------------
/docs/source/_static/tutorial/producer.svg:
--------------------------------------------------------------------------------
1 |
2 |
3 |
--------------------------------------------------------------------------------
/docs/source/_static/tutorial/python-one-overall.svg:
--------------------------------------------------------------------------------
1 |
2 |
3 |
--------------------------------------------------------------------------------
/docs/source/_static/tutorial/python-two.svg:
--------------------------------------------------------------------------------
1 |
2 |
3 |
--------------------------------------------------------------------------------
/docs/source/_static/tutorial/queue.svg:
--------------------------------------------------------------------------------
1 |
2 |
3 |
--------------------------------------------------------------------------------
/docs/source/_static/tutorial/receiving.svg:
--------------------------------------------------------------------------------
1 |
2 |
3 |
--------------------------------------------------------------------------------
/docs/source/_static/tutorial/sending.svg:
--------------------------------------------------------------------------------
1 |
2 |
3 |
--------------------------------------------------------------------------------
/docs/source/_templates/base.html:
--------------------------------------------------------------------------------
1 | {% extends "!base.html" %}
2 | {% block extrahead %}
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 | {{ super() }}
21 | {% endblock %}
22 |
--------------------------------------------------------------------------------
/docs/source/apidoc.rst:
--------------------------------------------------------------------------------
1 | API Reference
2 | =============
3 |
4 | .. automodule:: aio_pika
5 | :members:
6 |
7 | .. autoclass:: aio_pika.patterns.base
8 | :members:
9 |
10 | .. autoclass:: aio_pika.patterns.Master
11 | :members:
12 |
13 | .. autoclass:: aio_pika.patterns.Worker
14 | :members:
15 |
16 | .. autoclass:: aio_pika.patterns.RPC
17 | :members:
18 |
--------------------------------------------------------------------------------
/docs/source/examples/benchmark.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 | import os
3 | import time
4 | from contextlib import contextmanager
5 | from typing import Generator, Any
6 |
7 | import aio_pika
8 | from aio_pika import connect_robust
9 |
10 |
11 | @contextmanager
12 | def timeit(message: str, iterations: int) -> Generator[Any, Any, Any]:
13 | delay = -time.perf_counter()
14 | print(f"{message} started")
15 | try:
16 | yield
17 | finally:
18 | delay += time.perf_counter()
19 | print(
20 | f"{message} completed in {delay:.6f} seconds, "
21 | f"{iterations} iterations {delay / iterations:.6f} seconds "
22 | f"per iteration"
23 | )
24 |
25 |
26 | async def main() -> None:
27 | connect = await connect_robust(
28 | os.getenv("AMQP_URL", "amqp://guest:guest@localhost")
29 | )
30 |
31 | iterations = 100_000
32 |
33 | async with connect:
34 | message = aio_pika.Message(b"test")
35 | incoming_message: aio_pika.abc.AbstractIncomingMessage
36 |
37 | async with connect.channel() as channel:
38 | queue = await channel.declare_queue(auto_delete=True)
39 |
40 | with timeit(
41 | "Sequential publisher confirms", iterations=iterations
42 | ):
43 | for _ in range(iterations):
44 | await channel.default_exchange.publish(
45 | message, routing_key=queue.name
46 | )
47 |
48 | with timeit("Iterator consume no_ack=False", iterations=iterations):
49 | counter = 0
50 | async for incoming_message in queue.iterator(no_ack=False):
51 | await incoming_message.ack()
52 | counter += 1
53 | if counter >= iterations:
54 | break
55 |
56 | async with connect.channel(publisher_confirms=False) as channel:
57 | queue = await channel.declare_queue(auto_delete=True)
58 |
59 | with timeit(
60 | "Sequential no publisher confirms", iterations=iterations
61 | ):
62 | for _ in range(iterations):
63 | await channel.default_exchange.publish(
64 | message, routing_key=queue.name
65 | )
66 |
67 | with timeit("Iterator consume no_ack=True", iterations=iterations):
68 | counter = 0
69 | async for _ in queue.iterator(no_ack=True):
70 | counter += 1
71 | if counter >= iterations:
72 | break
73 |
74 |
75 | if __name__ == "__main__":
76 | asyncio.run(main())
77 |
--------------------------------------------------------------------------------
/docs/source/examples/extend-patterns.py:
--------------------------------------------------------------------------------
1 | from typing import Any
2 |
3 | import msgpack # type: ignore
4 |
5 | from aio_pika.patterns import RPC, Master
6 |
7 |
8 | class MsgpackRPC(RPC):
9 | CONTENT_TYPE = "application/msgpack"
10 |
11 | def serialize(self, data: Any) -> bytes:
12 | return msgpack.dumps(data)
13 |
14 | def deserialize(self, data: bytes) -> bytes:
15 | return msgpack.loads(data)
16 |
17 |
18 | class MsgpackMaster(Master):
19 | CONTENT_TYPE = "application/msgpack"
20 |
21 | def serialize(self, data: Any) -> bytes:
22 | return msgpack.dumps(data)
23 |
24 | def deserialize(self, data: bytes) -> bytes:
25 | return msgpack.loads(data)
26 |
--------------------------------------------------------------------------------
/docs/source/examples/external-credentials.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 | import ssl
3 |
4 | import aio_pika
5 | from aio_pika.abc import SSLOptions
6 |
7 |
8 | async def main() -> None:
9 | connection = await aio_pika.connect_robust(
10 | host="127.0.0.1",
11 | login="",
12 | ssl=True,
13 | ssl_options=SSLOptions(
14 | cafile="cacert.pem",
15 | certfile="cert.pem",
16 | keyfile="key.pem",
17 | no_verify_ssl=ssl.CERT_REQUIRED,
18 | ),
19 | client_properties={"connection_name": "aio-pika external credentials"},
20 | )
21 |
22 | async with connection:
23 | routing_key = "test_queue"
24 |
25 | channel = await connection.channel()
26 |
27 | await channel.default_exchange.publish(
28 | aio_pika.Message(body="Hello {}".format(routing_key).encode()),
29 | routing_key=routing_key,
30 | )
31 |
32 |
33 | if __name__ == "__main__":
34 | asyncio.run(main())
35 |
--------------------------------------------------------------------------------
/docs/source/examples/log-level-set.py:
--------------------------------------------------------------------------------
1 | import logging
2 |
3 | from aio_pika import logger
4 |
5 |
6 | logger.setLevel(logging.ERROR)
7 |
--------------------------------------------------------------------------------
/docs/source/examples/main.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 | from typing import Optional
3 |
4 | from aio_pika import Message, connect_robust
5 | from aio_pika.abc import AbstractIncomingMessage
6 |
7 |
8 | async def main() -> None:
9 | connection = await connect_robust(
10 | "amqp://guest:guest@127.0.0.1/?name=aio-pika%20example",
11 | )
12 |
13 | queue_name = "test_queue"
14 | routing_key = "test_queue"
15 |
16 | # Creating channel
17 | channel = await connection.channel()
18 |
19 | # Declaring exchange
20 | exchange = await channel.declare_exchange("direct", auto_delete=True)
21 |
22 | # Declaring queue
23 | queue = await channel.declare_queue(queue_name, auto_delete=True)
24 |
25 | # Binding queue
26 | await queue.bind(exchange, routing_key)
27 |
28 | await exchange.publish(
29 | Message(
30 | bytes("Hello", "utf-8"),
31 | content_type="text/plain",
32 | headers={"foo": "bar"},
33 | ),
34 | routing_key,
35 | )
36 |
37 | # Receiving one message
38 | incoming_message: Optional[AbstractIncomingMessage] = await queue.get(
39 | timeout=5, fail=False
40 | )
41 | if incoming_message:
42 | # Confirm message
43 | await incoming_message.ack()
44 | else:
45 | print("Queue empty")
46 |
47 | await queue.unbind(exchange, routing_key)
48 | await queue.delete()
49 | await connection.close()
50 |
51 |
52 | if __name__ == "__main__":
53 | asyncio.run(main())
54 |
--------------------------------------------------------------------------------
/docs/source/examples/master.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 |
3 | from aio_pika import connect_robust
4 | from aio_pika.patterns import Master
5 |
6 |
7 | async def main() -> None:
8 | connection = await connect_robust(
9 | "amqp://guest:guest@127.0.0.1/?name=aio-pika%20master",
10 | )
11 |
12 | async with connection:
13 | # Creating channel
14 | channel = await connection.channel()
15 |
16 | master = Master(channel)
17 |
18 | # Creates tasks by proxy object
19 | for task_id in range(1000):
20 | await master.proxy.my_task_name(task_id=task_id)
21 |
22 | # Or using create_task method
23 | for task_id in range(1000):
24 | await master.create_task(
25 | "my_task_name", kwargs=dict(task_id=task_id),
26 | )
27 |
28 |
29 | if __name__ == "__main__":
30 | asyncio.run(main())
31 |
--------------------------------------------------------------------------------
/docs/source/examples/pooling.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 |
3 | import aio_pika
4 | from aio_pika.abc import AbstractRobustConnection
5 | from aio_pika.pool import Pool
6 |
7 |
8 | async def main() -> None:
9 | async def get_connection() -> AbstractRobustConnection:
10 | return await aio_pika.connect_robust("amqp://guest:guest@localhost/")
11 |
12 | connection_pool: Pool = Pool(get_connection, max_size=2)
13 |
14 | async def get_channel() -> aio_pika.Channel:
15 | async with connection_pool.acquire() as connection:
16 | return await connection.channel()
17 |
18 | channel_pool: Pool = Pool(get_channel, max_size=10)
19 | queue_name = "pool_queue"
20 |
21 | async def consume() -> None:
22 | async with channel_pool.acquire() as channel: # type: aio_pika.Channel
23 | await channel.set_qos(10)
24 |
25 | queue = await channel.declare_queue(
26 | queue_name, durable=False, auto_delete=False,
27 | )
28 |
29 | async with queue.iterator() as queue_iter:
30 | async for message in queue_iter:
31 | print(message)
32 | await message.ack()
33 |
34 | async def publish() -> None:
35 | async with channel_pool.acquire() as channel: # type: aio_pika.Channel
36 | await channel.default_exchange.publish(
37 | aio_pika.Message(("Channel: %r" % channel).encode()),
38 | queue_name,
39 | )
40 |
41 | async with connection_pool, channel_pool:
42 | task = asyncio.create_task(consume())
43 | await asyncio.wait([asyncio.create_task(publish()) for _ in range(50)])
44 | await task
45 |
46 |
47 | if __name__ == "__main__":
48 | asyncio.run(main())
49 |
--------------------------------------------------------------------------------
/docs/source/examples/rpc-callee.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 |
3 | from aio_pika import connect_robust
4 | from aio_pika.patterns import RPC
5 |
6 |
7 | async def multiply(*, x: int, y: int) -> int:
8 | return x * y
9 |
10 |
11 | async def main() -> None:
12 | connection = await connect_robust(
13 | "amqp://guest:guest@127.0.0.1/",
14 | client_properties={"connection_name": "callee"},
15 | )
16 |
17 | # Creating channel
18 | channel = await connection.channel()
19 |
20 | rpc = await RPC.create(channel)
21 | await rpc.register("multiply", multiply, auto_delete=True)
22 |
23 | try:
24 | await asyncio.Future()
25 | finally:
26 | await connection.close()
27 |
28 |
29 | if __name__ == "__main__":
30 | asyncio.run(main())
31 |
--------------------------------------------------------------------------------
/docs/source/examples/rpc-caller.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 |
3 | from aio_pika import connect_robust
4 | from aio_pika.patterns import RPC
5 |
6 |
7 | async def main() -> None:
8 | connection = await connect_robust(
9 | "amqp://guest:guest@127.0.0.1/",
10 | client_properties={"connection_name": "caller"},
11 | )
12 |
13 | async with connection:
14 | # Creating channel
15 | channel = await connection.channel()
16 |
17 | rpc = await RPC.create(channel)
18 |
19 | # Creates tasks by proxy object
20 | for i in range(1000):
21 | print(await rpc.proxy.multiply(x=100, y=i))
22 |
23 | # Or using create_task method
24 | for i in range(1000):
25 | print(await rpc.call("multiply", kwargs=dict(x=100, y=i)))
26 |
27 |
28 | if __name__ == "__main__":
29 | asyncio.run(main())
30 |
--------------------------------------------------------------------------------
/docs/source/examples/simple_async_consumer.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 |
3 | import aio_pika
4 |
5 |
6 | async def process_message(
7 | message: aio_pika.abc.AbstractIncomingMessage,
8 | ) -> None:
9 | async with message.process():
10 | print(message.body)
11 | await asyncio.sleep(1)
12 |
13 |
14 | async def main() -> None:
15 | connection = await aio_pika.connect_robust(
16 | "amqp://guest:guest@127.0.0.1/",
17 | )
18 |
19 | queue_name = "test_queue"
20 |
21 | # Creating channel
22 | channel = await connection.channel()
23 |
24 | # Maximum message count which will be processing at the same time.
25 | await channel.set_qos(prefetch_count=100)
26 |
27 | # Declaring queue
28 | queue = await channel.declare_queue(queue_name, auto_delete=True)
29 |
30 | await queue.consume(process_message)
31 |
32 | try:
33 | # Wait until terminate
34 | await asyncio.Future()
35 | finally:
36 | await connection.close()
37 |
38 |
39 | if __name__ == "__main__":
40 | asyncio.run(main())
41 |
--------------------------------------------------------------------------------
/docs/source/examples/simple_consumer.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 | import logging
3 |
4 | import aio_pika
5 |
6 |
7 | async def main() -> None:
8 | logging.basicConfig(level=logging.DEBUG)
9 | connection = await aio_pika.connect_robust(
10 | "amqp://guest:guest@127.0.0.1/",
11 | )
12 |
13 | queue_name = "test_queue"
14 |
15 | async with connection:
16 | # Creating channel
17 | channel = await connection.channel()
18 |
19 | # Will take no more than 10 messages in advance
20 | await channel.set_qos(prefetch_count=10)
21 |
22 | # Declaring queue
23 | queue = await channel.declare_queue(queue_name, auto_delete=True)
24 |
25 | async with queue.iterator() as queue_iter:
26 | async for message in queue_iter:
27 | async with message.process():
28 | print(message.body)
29 |
30 | if queue.name in message.body.decode():
31 | break
32 |
33 |
34 | if __name__ == "__main__":
35 | asyncio.run(main())
36 |
--------------------------------------------------------------------------------
/docs/source/examples/simple_publisher.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 |
3 | import aio_pika
4 |
5 |
6 | async def main() -> None:
7 | connection = await aio_pika.connect_robust(
8 | "amqp://guest:guest@127.0.0.1/",
9 | )
10 |
11 | async with connection:
12 | routing_key = "test_queue"
13 |
14 | channel = await connection.channel()
15 |
16 | await channel.default_exchange.publish(
17 | aio_pika.Message(body=f"Hello {routing_key}".encode()),
18 | routing_key=routing_key,
19 | )
20 |
21 |
22 | if __name__ == "__main__":
23 | asyncio.run(main())
24 |
--------------------------------------------------------------------------------
/docs/source/examples/simple_publisher_transactions.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 |
3 | import aio_pika
4 |
5 |
6 | async def main() -> None:
7 | connection = await aio_pika.connect_robust(
8 | "amqp://guest:guest@127.0.0.1/",
9 | )
10 |
11 | async with connection:
12 | routing_key = "test_queue"
13 |
14 | # Transactions conflicts with `publisher_confirms`
15 | channel = await connection.channel(publisher_confirms=False)
16 |
17 | # Use transactions with async context manager
18 | async with channel.transaction():
19 | # Publishing messages but delivery will not be done
20 | # before committing this transaction
21 | for i in range(10):
22 | message = aio_pika.Message(body="Hello #{}".format(i).encode())
23 |
24 | await channel.default_exchange.publish(
25 | message, routing_key=routing_key,
26 | )
27 |
28 | # Using transactions manually
29 | tx = channel.transaction()
30 |
31 | # start transaction manually
32 | await tx.select()
33 |
34 | await channel.default_exchange.publish(
35 | aio_pika.Message(body="Hello {}".format(routing_key).encode()),
36 | routing_key=routing_key,
37 | )
38 |
39 | await tx.commit()
40 |
41 | # Using transactions manually
42 | tx = channel.transaction()
43 |
44 | # start transaction manually
45 | await tx.select()
46 |
47 | await channel.default_exchange.publish(
48 | aio_pika.Message(body="Should be rejected".encode()),
49 | routing_key=routing_key,
50 | )
51 |
52 | await tx.rollback()
53 |
54 |
55 | if __name__ == "__main__":
56 | asyncio.run(main())
57 |
--------------------------------------------------------------------------------
/docs/source/examples/tornado-pubsub.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 |
3 | import tornado.ioloop
4 | import tornado.web
5 |
6 | from aio_pika import Message, connect_robust
7 |
8 |
9 | class Base:
10 | QUEUE: asyncio.Queue
11 |
12 |
13 | class SubscriberHandler(tornado.web.RequestHandler, Base):
14 | async def get(self) -> None:
15 | message = await self.QUEUE.get()
16 | await self.finish(message.body)
17 |
18 |
19 | class PublisherHandler(tornado.web.RequestHandler):
20 | async def post(self) -> None:
21 | connection = self.application.settings["amqp_connection"]
22 | channel = await connection.channel()
23 |
24 | try:
25 | await channel.default_exchange.publish(
26 | Message(body=self.request.body), routing_key="test",
27 | )
28 | finally:
29 | await channel.close()
30 |
31 | await self.finish("OK")
32 |
33 |
34 | async def make_app() -> tornado.web.Application:
35 | amqp_connection = await connect_robust()
36 |
37 | channel = await amqp_connection.channel()
38 | queue = await channel.declare_queue("test", auto_delete=True)
39 | Base.QUEUE = asyncio.Queue()
40 |
41 | await queue.consume(Base.QUEUE.put, no_ack=True)
42 |
43 | return tornado.web.Application(
44 | [(r"/publish", PublisherHandler), (r"/subscribe", SubscriberHandler)],
45 | amqp_connection=amqp_connection,
46 | )
47 |
48 |
49 | async def main() -> None:
50 | app = await make_app()
51 | app.listen(8888)
52 | await asyncio.Future()
53 |
54 |
55 | if __name__ == "__main__":
56 | asyncio.run(main())
57 |
--------------------------------------------------------------------------------
/docs/source/examples/worker.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 |
3 | from aio_pika import connect_robust
4 | from aio_pika.patterns import Master, NackMessage, RejectMessage
5 |
6 |
7 | async def worker(*, task_id: int) -> None:
8 | # If you want to reject message or send
9 | # nack you might raise special exception
10 |
11 | if task_id % 2 == 0:
12 | raise RejectMessage(requeue=False)
13 |
14 | if task_id % 2 == 1:
15 | raise NackMessage(requeue=False)
16 |
17 | print(task_id)
18 |
19 |
20 | async def main() -> None:
21 | connection = await connect_robust(
22 | "amqp://guest:guest@127.0.0.1/?name=aio-pika%20worker",
23 | )
24 |
25 | # Creating channel
26 | channel = await connection.channel()
27 |
28 | # Initializing Master with channel
29 | master = Master(channel)
30 | await master.create_worker("my_task_name", worker, auto_delete=True)
31 |
32 | try:
33 | await asyncio.Future()
34 | finally:
35 | await connection.close()
36 |
37 |
38 | if __name__ == "__main__":
39 | asyncio.run(main())
40 |
--------------------------------------------------------------------------------
/docs/source/patterns.rst:
--------------------------------------------------------------------------------
1 | .. _aio-pika: https://github.com/mosquito/aio-pika
2 |
3 |
4 | Patterns and helpers
5 | ++++++++++++++++++++
6 |
7 | .. note:: Available since `aio-pika>=1.7.0`
8 |
9 | `aio-pika`_ includes some useful patterns for creating distributed systems.
10 |
11 |
12 | .. _patterns-worker:
13 |
14 | Master/Worker
15 | ~~~~~~~~~~~~~
16 |
17 | Helper which implements Master/Worker pattern.
18 | This applicable for balancing tasks between multiple workers.
19 |
20 | The master creates tasks:
21 |
22 | .. literalinclude:: examples/master.py
23 | :language: python
24 |
25 |
26 | Worker code:
27 |
28 | .. literalinclude:: examples/worker.py
29 | :language: python
30 |
31 | The one or multiple workers executes tasks.
32 |
33 |
34 | .. _patterns-rpc:
35 |
36 | RPC
37 | ~~~
38 |
39 | Helper which implements Remote Procedure Call pattern.
40 | This applicable for balancing tasks between multiple workers.
41 |
42 | The caller creates tasks and awaiting results:
43 |
44 | .. literalinclude:: examples/rpc-caller.py
45 | :language: python
46 |
47 |
48 | One or multiple callees executing tasks:
49 |
50 | .. literalinclude:: examples/rpc-callee.py
51 | :language: python
52 |
53 | Extending
54 | ~~~~~~~~~
55 |
56 | Both patterns serialization behaviour might be changed by inheritance and
57 | redefinition of methods :func:`aio_pika.patterns.base.serialize`
58 | and :func:`aio_pika.patterns.base.deserialize`.
59 |
60 |
61 | Following examples demonstrates it:
62 |
63 | .. literalinclude:: examples/extend-patterns.py
64 | :language: python
65 |
--------------------------------------------------------------------------------
/docs/source/quick-start.rst:
--------------------------------------------------------------------------------
1 | Quick start
2 | +++++++++++
3 |
4 | Some useful examples.
5 |
6 | Simple consumer
7 | ~~~~~~~~~~~~~~~
8 |
9 | .. literalinclude:: examples/simple_consumer.py
10 | :language: python
11 |
12 | Simple publisher
13 | ~~~~~~~~~~~~~~~~
14 |
15 | .. literalinclude:: examples/simple_publisher.py
16 | :language: python
17 |
18 | Asynchronous message processing
19 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
20 |
21 | .. literalinclude:: examples/simple_async_consumer.py
22 | :language: python
23 |
24 |
25 | Working with RabbitMQ transactions
26 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
27 |
28 | .. literalinclude:: examples/simple_publisher_transactions.py
29 | :language: python
30 |
31 | Get single message example
32 | ~~~~~~~~~~~~~~~~~~~~~~~~~~
33 |
34 | .. literalinclude:: examples/main.py
35 | :language: python
36 |
37 | Set logging level
38 | ~~~~~~~~~~~~~~~~~
39 |
40 | Sometimes you want to see only your debug logs, but when you just call
41 | `logging.basicConfig(logging.DEBUG)` you set the debug log level for all
42 | loggers, includes all aio_pika's modules. If you want to set logging level
43 | independently see following example:
44 |
45 | .. literalinclude:: examples/log-level-set.py
46 | :language: python
47 |
48 | Tornado example
49 | ~~~~~~~~~~~~~~~
50 |
51 | .. literalinclude:: examples/tornado-pubsub.py
52 | :language: python
53 |
54 | External credentials example
55 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~
56 |
57 | .. literalinclude:: examples/external-credentials.py
58 | :language: python
59 |
60 | Connection pooling
61 | ~~~~~~~~~~~~~~~~~~
62 |
63 | .. literalinclude:: examples/pooling.py
64 | :language: python
65 |
--------------------------------------------------------------------------------
/docs/source/rabbitmq-tutorial/4-routing.rst:
--------------------------------------------------------------------------------
1 | .. _issue: https://github.com/mosquito/aio-pika/issues
2 | .. _pull request: https://github.com/mosquito/aio-pika/compare
3 | .. _aio-pika: https://github.com/mosquito/aio-pika
4 | .. _official tutorial: https://www.rabbitmq.com/tutorials/tutorial-four-python.html
5 | .. _routing:
6 |
7 | Routing
8 | =======
9 |
10 | .. warning::
11 |
12 | This is a beta version of the port from `official tutorial`_. Please when you found an
13 | error create `issue`_ or `pull request`_ for me.
14 |
15 |
16 | .. note::
17 | Using the `aio-pika`_ async Python client
18 |
19 | .. note::
20 |
21 | **Prerequisites**
22 |
23 | This tutorial assumes RabbitMQ is installed_ and running on localhost on standard port (`5672`).
24 | In case you use a different host, port or credentials, connections settings would require adjusting.
25 |
26 | .. _installed: https://www.rabbitmq.com/download.html
27 |
28 | **Where to get help**
29 |
30 | If you're having trouble going through this tutorial you can `contact us`_ through the mailing list.
31 |
32 | .. _contact us: https://groups.google.com/forum/#!forum/rabbitmq-users
33 |
34 |
35 | In the :ref:`previous tutorial ` we built a simple logging system.
36 | We were able to broadcast log messages to many receivers.
37 |
38 | In this tutorial we're going to add a feature to it — we're going to make it possible to subscribe only to a subset
39 | of the messages. For example, we will be able to direct only critical error messages to the log
40 | file (to save disk space), while still being able to print all of the log messages on the console.
41 |
42 |
43 | Bindings
44 | ++++++++
45 |
46 | In previous examples we were already creating bindings. You may recall code like:
47 |
48 | .. code-block:: python
49 |
50 | async def main():
51 | ...
52 |
53 | # Binding the queue to the exchange
54 | await queue.bind(logs_exchange)
55 |
56 | ...
57 |
58 |
59 | A binding is a relationship between an exchange and a queue. This can be simply read as:
60 | the queue is interested in messages from this exchange.
61 |
62 | Bindings can take an extra *routing_key* parameter. To avoid the confusion with a
63 | *basic_publish* parameter we're going to call it a *binding key*.
64 | This is how we could create a binding with a key:
65 |
66 | .. code-block:: python
67 |
68 | async def main():
69 | ...
70 |
71 | # Binding the queue to the exchange
72 | await queue.bind(logs_exchange,
73 | routing_key="black")
74 |
75 | ...
76 |
77 |
78 | The meaning of a binding key depends on the exchange type. The *fanout* exchanges, which we
79 | used previously, simply ignored its value.
80 |
81 | Direct exchange
82 | +++++++++++++++
83 |
84 | Our logging system from the previous tutorial broadcasts all messages to all consumers.
85 | We want to extend that to allow filtering messages based on their severity. For example
86 | we may want the script which is writing log messages to the disk to only receive critical
87 | errors, and not waste disk space on warning or info log messages.
88 |
89 | We were using a fanout exchange, which doesn't give us too much flexibility — it's only
90 | capable of mindless broadcasting.
91 |
92 | We will use a direct exchange instead. The routing algorithm behind a direct exchange
93 | is simple — a message goes to the queues whose binding key exactly matches the routing key of the message.
94 |
95 | To illustrate that, consider the following setup:
96 |
97 | .. image:: /_static/tutorial/direct-exchange.svg
98 | :align: center
99 |
100 | In this setup, we can see the *direct* exchange X with two queues bound to it. The first queue is
101 | bound with binding key *orange*, and the second has two bindings, one with
102 | binding key *black* and the other one with *green*.
103 |
104 | In such a setup a message published to the exchange with a routing key *orange*
105 | will be routed to queue *Q1*. Messages with a routing key of *black* or *green* will go to *Q2*.
106 | All other messages will be discarded.
107 |
108 |
109 | Multiple bindings
110 | +++++++++++++++++
111 |
112 | .. image:: /_static/tutorial/direct-exchange-multiple.svg
113 | :align: center
114 |
115 | It is perfectly legal to bind multiple queues with the same binding key. In our
116 | example we could add a binding between *X* and *Q1* with binding key *black*. In that
117 | case, the *direct* exchange will behave like fanout and will broadcast the message
118 | to all the matching queues. A message with routing key black will be delivered to both *Q1* and *Q2*.
119 |
120 |
121 | Emitting logs
122 | +++++++++++++
123 |
124 | We'll use this model for our logging system. Instead of *fanout* we'll send messages to a *direct* exchange.
125 | We will supply the log severity as a *routing key*. That way the receiving script will be able to select
126 | the severity it wants to receive. Let's focus on emitting logs first.
127 |
128 | Like always we need to create an exchange first:
129 |
130 | .. code-block:: python
131 |
132 | from aio_pika import ExchangeType
133 |
134 | async def main():
135 | ...
136 |
137 | direct_logs_exchange = await channel.declare_exchange(
138 | 'logs', ExchangeType.DIRECT
139 | )
140 |
141 | And we're ready to send a message:
142 |
143 | .. code-block:: python
144 |
145 | async def main():
146 | ...
147 |
148 | await direct_logs_exchange.publish(
149 | Message(message_body),
150 | routing_key=severity,
151 | )
152 |
153 | To simplify things we will assume that `'severity'` can be one of `'info'`, `'warning'`, `'error'`.
154 |
155 | Subscribing
156 | +++++++++++
157 |
158 | Receiving messages will work just like in the previous tutorial, with one exception - we're
159 | going to create a new binding for each severity we're interested in.
160 |
161 |
162 | .. code-block:: python
163 |
164 | async def main():
165 | ...
166 |
167 | # Declaring queue
168 | queue = await channel.declare_queue(exclusive=True)
169 |
170 | # Binding the queue to the exchange
171 | await queue.bind(direct_logs_exchange,
172 | routing_key=severity)
173 |
174 | ...
175 |
176 |
177 | Putting it all together
178 | +++++++++++++++++++++++
179 |
180 | .. image:: /_static/tutorial/python-four.svg
181 | :align: center
182 |
183 | The simplified code for :download:`receive_logs_direct_simple.py `:
184 |
185 | .. literalinclude:: examples/4-routing/receive_logs_direct_simple.py
186 | :language: python
187 |
188 | The code for :download:`emit_log_direct.py `:
189 |
190 | .. literalinclude:: examples/4-routing/emit_log_direct.py
191 | :language: python
192 |
193 | .. note::
194 |
195 | The callback-based code for :download:`receive_logs_direct.py `:
196 |
197 | .. literalinclude:: examples/4-routing/receive_logs_direct.py
198 | :language: python
199 |
200 |
201 | If you want to save only *'warning'* and *'error'* (and not *'info'*) log messages to a file,
202 | just open a console and type::
203 |
204 | $ python receive_logs_direct_simple.py warning error > logs_from_rabbit.log
205 |
206 | If you'd like to see all the log messages on your screen, open a new terminal and do::
207 |
208 | $ python receive_logs_direct.py info warning error
209 | [*] Waiting for logs. To exit press CTRL+C
210 |
211 | And, for example, to emit an error log message just type::
212 |
213 | $ python emit_log_direct.py error "Run. Run. Or it will explode."
214 | [x] Sent 'error':'Run. Run. Or it will explode.'
215 |
216 | Move on to :ref:`tutorial 5 ` to find out how to listen for messages based on a pattern.
217 |
218 |
219 | .. note::
220 |
221 | This material was adopted from `official tutorial`_ on **rabbitmq.org**.
222 |
--------------------------------------------------------------------------------
/docs/source/rabbitmq-tutorial/5-topics.rst:
--------------------------------------------------------------------------------
1 | .. _issue: https://github.com/mosquito/aio-pika/issues
2 | .. _pull request: https://github.com/mosquito/aio-pika/compare
3 | .. _aio-pika: https://github.com/mosquito/aio-pika
4 | .. _syslog: http://en.wikipedia.org/wiki/Syslog
5 | .. _official tutorial: https://www.rabbitmq.com/tutorials/tutorial-five-python.html
6 | .. _topics:
7 |
8 | Topics
9 | ======
10 |
11 | .. warning::
12 |
13 | This is a beta version of the port from `official tutorial`_. Please when you found an
14 | error create `issue`_ or `pull request`_ for me.
15 |
16 |
17 | .. note::
18 | Using the `aio-pika`_ async Python client
19 |
20 | .. note::
21 |
22 | **Prerequisites**
23 |
24 | This tutorial assumes RabbitMQ is installed_ and running on localhost on standard port (`5672`).
25 | In case you use a different host, port or credentials, connections settings would require adjusting.
26 |
27 | .. _installed: https://www.rabbitmq.com/download.html
28 |
29 | **Where to get help**
30 |
31 | If you're having trouble going through this tutorial you can `contact us`_ through the mailing list.
32 |
33 | .. _contact us: https://groups.google.com/forum/#!forum/rabbitmq-users
34 |
35 |
36 | In the :ref:`previous tutorial ` we improved our logging system. Instead of using a fanout
37 | exchange only capable of dummy broadcasting, we used a direct one, and gained a
38 | possibility of selectively receiving the logs.
39 |
40 | Although using the direct exchange improved our system, it still has limitations — it can't do routing based on
41 | multiple criteria.
42 |
43 | In our logging system we might want to subscribe to not only logs based on severity, but
44 | also based on the source which emitted the log. You might know this concept from the syslog_
45 | unix tool, which routes logs based on both severity (`info`/`warn`/`crit`...)
46 | and facility (`auth`/`cron`/`kern`...).
47 |
48 | That would give us a lot of flexibility - we may want to listen to just critical errors coming
49 | from 'cron' but also all logs from 'kern'.
50 |
51 | To implement that in our logging system we need to learn about a more complex topic exchange.
52 |
53 | Topic exchange
54 | ++++++++++++++
55 |
56 | Messages sent to a topic exchange can't have an arbitrary *routing_key* - it must be a list of words,
57 | delimited by dots. The words can be anything, but usually they specify some features connected to
58 | the message. A few valid routing key examples: `"stock.usd.nyse"`, `"nyse.vmw"`, `"quick.orange.rabbit"`.
59 | There can be as many words in the routing key as you like, up to the limit of 255 bytes.
60 |
61 | The binding key must also be in the same form. The logic behind the topic exchange is similar
62 | to a direct one - a message sent with a particular routing key will be delivered to all the
63 | queues that are bound with a matching binding key. However there are two important special
64 | cases for binding keys:
65 |
66 | * `*` (star) can substitute for exactly one word.
67 | * `#` (hash) can substitute for zero or more words.
68 |
69 | It's easiest to explain this in an example:
70 |
71 | .. image:: /_static/tutorial/python-five.svg
72 | :align: center
73 |
74 | In this example, we're going to send messages which all describe animals. The messages will be sent
75 | with a routing key that consists of three words (two dots). The first word in the routing key will
76 | describe a celerity, second a colour and third a species: `".."`.
77 |
78 | We created three bindings: *Q1* is bound with binding key `"*.orange.*"` and Q2 with `"*.*.rabbit"` and `"lazy.#"`.
79 |
80 | These bindings can be summarised as:
81 |
82 | * Q1 is interested in all the orange animals.
83 | * Q2 wants to hear everything about rabbits, and everything about lazy animals.
84 | * A message with a routing key set to `"quick.orange.rabbit"` will be delivered to both queues.
85 | Message `"lazy.orange.elephant"` also will go to both of them. On the other hand `"quick.orange.fox"` will only go to
86 | the first queue, and `"lazy.brown.fox"` only to the second. `"lazy.pink.rabbit"` will be delivered to the second
87 | queue only once, even though it matches two bindings. "quick.brown.fox" doesn't match any binding so it will be
88 | discarded.
89 |
90 | What happens if we break our contract and send a message with one or four words,
91 | like `"orange"` or `"quick.orange.male.rabbit"`? Well, these messages won't match any bindings and will be lost.
92 |
93 | On the other hand `"lazy.orange.male.rabbit"`, even though it has four words, will match the last binding and will be
94 | delivered to the second queue.
95 |
96 | .. note::
97 |
98 | **Topic exchange**
99 |
100 | Topic exchange is powerful and can behave like other exchanges.
101 |
102 | When a queue is bound with `"#"` (hash) binding key - it will receive all the messages, regardless of the routing
103 | key - like in fanout exchange.
104 |
105 | When special characters `"*"` (star) and `"#"` (hash) aren't used in bindings, the topic exchange will behave just
106 | like a direct one.
107 |
108 |
109 | Putting it all together
110 | +++++++++++++++++++++++
111 |
112 | We're going to use a topic exchange in our logging system. We'll start off with a working assumption
113 | that the routing keys of logs will have two words: `"."`.
114 |
115 | The code is almost the same as in the :ref:`previous tutorial `.
116 |
117 | The code for :download:`emit_log_topic.py `:
118 |
119 | .. literalinclude:: examples/5-topics/emit_log_topic.py
120 | :language: python
121 |
122 | The code for :download:`receive_logs_topic.py `:
123 |
124 | .. literalinclude:: examples/5-topics/receive_logs_topic.py
125 | :language: python
126 |
127 | To receive all the logs run::
128 |
129 | python receive_logs_topic.py "#"
130 |
131 | To receive all logs from the facility `"kern"`::
132 |
133 | python receive_logs_topic.py "kern.*"
134 |
135 | Or if you want to hear only about `"critical"` logs::
136 |
137 | python receive_logs_topic.py "*.critical"
138 |
139 | You can create multiple bindings::
140 |
141 | python receive_logs_topic.py "kern.*" "*.critical"
142 |
143 | And to emit a log with a routing key `"kern.critical"` type::
144 |
145 | python emit_log_topic.py "kern.critical" "A critical kernel error"
146 |
147 | Have fun playing with these programs. Note that the code doesn't make any assumption
148 | about the routing or binding keys, you may want to play with more than two routing key parameters.
149 |
150 | Move on to :ref:`tutorial 6 ` to learn about RPC.
151 |
152 |
153 | .. note::
154 |
155 | This material was adopted from `official tutorial`_ on **rabbitmq.org**.
156 |
--------------------------------------------------------------------------------
/docs/source/rabbitmq-tutorial/7-publisher-confirms.rst:
--------------------------------------------------------------------------------
1 | .. _issue: https://github.com/mosquito/aio-pika/issues
2 | .. _pull request: https://github.com/mosquito/aio-pika/compare
3 | .. _aio-pika: https://github.com/mosquito/aio-pika
4 | .. _official tutorial: https://www.rabbitmq.com/tutorials/tutorial-seven-php.html
5 | .. _publisher-confirms:
6 |
7 | Publisher Confirms
8 | ==================
9 |
10 | .. warning::
11 |
12 | This is a beta version of the port from `official tutorial`_. Please when you found an
13 | error create `issue`_ or `pull request`_ for me.
14 |
15 |
16 | .. note::
17 | Using the `aio-pika`_ async Python client
18 |
19 |
20 | .. note::
21 |
22 | **Prerequisites**
23 |
24 | This tutorial assumes RabbitMQ is installed_ and running on localhost on standard port (`5672`).
25 | In case you use a different host, port or credentials, connections settings would require adjusting.
26 |
27 | .. _installed: https://www.rabbitmq.com/download.html
28 |
29 | **Where to get help**
30 |
31 | If you're having trouble going through this tutorial you can `contact us`_ through the mailing list.
32 |
33 | .. _contact us: https://groups.google.com/forum/#!forum/rabbitmq-users
34 |
35 |
36 | `Publisher confirms `_ are a RabbitMQ
37 | extension to implement reliable publishing.
38 | When publisher confirms are enabled on a channel, messages the client publishes are confirmed
39 | asynchronously by the broker, meaning they have been taken care of on the server side.
40 |
41 | Overview
42 | ++++++++
43 |
44 | In this tutorial we're going to use publisher confirms to make sure published messages have safely reached the broker.
45 | We will cover several strategies to using publisher confirms and explain their pros and cons.
46 |
47 | Enabling Publisher Confirms on a Channel
48 | ++++++++++++++++++++++++++++++++++++++++
49 |
50 | Publisher confirms are a RabbitMQ extension to the AMQP 0.9.1 protocol.
51 | Publisher confirms are enabled at the channel level by setting the :code:`publisher_confirms` parameter to :code:`True`,
52 | which is the default.
53 |
54 | .. code-block:: python
55 |
56 | channel = await connection.channel(
57 | publisher_confirms=True, # This is the default
58 | )
59 |
60 | Strategy #1: Publishing Messages Individually
61 | +++++++++++++++++++++++++++++++++++++++++++++
62 |
63 | Let's start with the simplest approach to publishing with confirms, that is, publishing a message and
64 | waiting synchronously for its confirmation:
65 |
66 | .. literalinclude:: examples/7-publisher-confirms/publish_individually.py
67 | :language: python
68 | :start-at: # Sending the messages
69 | :end-before: # Done sending messages
70 |
71 | In the previous example we publish a message as usual and wait for its confirmation with the :code:`await` keyword.
72 | The :code:`await` returns as soon as the message has been confirmed.
73 | If the message is not confirmed within the timeout or if it is nack-ed (meaning the broker could not take care of it for
74 | some reason), the :code:`await` will throw an exception.
75 | The :code:`on_return_raises` parameter of :code:`aio_pika.connect()` and :code:`connection.channel()` controls this behaivior for if a mandatory
76 | message is returned.
77 | The handling of the exception usually consists in logging an error message and/or retrying to send the message.
78 |
79 | Different client libraries have different ways to synchronously deal with publisher confirms, so make sure to read
80 | carefully the documentation of the client you are using.
81 |
82 | This technique is very straightforward but also has a major drawback: it **significantly slows down publishing**, as the
83 | confirmation of a message blocks the publishing of all subsequent messages.
84 | This approach is not going to deliver throughput of more than a few hundreds of published messages per second.
85 | Nevertheless, this can be good enough for some applications.
86 |
87 | Strategy #2: Publishing Messages in Batches
88 | +++++++++++++++++++++++++++++++++++++++++++
89 |
90 | To improve upon our previous example, we can publish a batch of messages and wait for this whole batch to be confirmed.
91 | The following example uses a batch of 100:
92 |
93 | .. literalinclude:: examples/7-publisher-confirms/publish_batches.py
94 | :language: python
95 | :start-at: batchsize = 100
96 | :end-before: # Done sending messages
97 |
98 | Waiting for a batch of messages to be confirmed improves throughput drastically over waiting for a confirm for individual
99 | message (up to 20-30 times with a remote RabbitMQ node).
100 | One drawback is that we do not know exactly what went wrong in case of failure, so we may have to keep a whole batch in memory
101 | to log something meaningful or to re-publish the messages.
102 | And this solution is still synchronous, so it blocks the publishing of messages.
103 |
104 | .. note::
105 |
106 | To initiate message sending asynchronously, a task is created with :code:`asyncio.create_task`, so the execution of our function
107 | is handled by the event-loop.
108 | The :code:`await asyncio.sleep(0)` is required to make the event loop switch to our coroutine.
109 | Any :code:`await` would have sufficed, though.
110 | Using :code:`async for` with an :code:`async` generator also requires the generator to yield control flow with :code:`await` for message
111 | sending to be initiated.
112 |
113 | Without the task and the :code:`await` the message sending would only be initiated with the :code:`asyncio.gather` call.
114 | For some applications this behaivior might be acceptable.
115 |
116 |
117 | Strategy #3: Handling Publisher Confirms Asynchronously
118 | +++++++++++++++++++++++++++++++++++++++++++++++++++++++
119 |
120 | The broker confirms published messages asynchronously, our helper function will publish the messages and be notified of these confirms:
121 |
122 | .. literalinclude:: examples/7-publisher-confirms/publish_asynchronously.py
123 | :language: python
124 | :start-at: # List for storing tasks
125 | :end-at: await asyncio.gather(*tasks)
126 |
127 | In Python 3.11 a :code:`TaskGroup` can be used instead of the :code:`list` with :code:`asyncio.gather`.
128 |
129 | The helper function publishes the message and awaits the confirmation.
130 | This way the helper function knows which message the confirmation, timeout or rejection belongs to.
131 |
132 | .. literalinclude:: examples/7-publisher-confirms/publish_asynchronously.py
133 | :language: python
134 | :pyobject: publish_and_handle_confirm
135 |
136 |
137 | Summary
138 | +++++++
139 |
140 | Making sure published messages made it to the broker can be essential in some applications.
141 | Publisher confirms are a RabbitMQ feature that helps to meet this requirement.
142 | Publisher confirms are asynchronous in nature but it is also possible to handle them synchronously.
143 | There is no definitive way to implement publisher confirms, this usually comes down to the constraints in the application
144 | and in the overall system. Typical techniques are:
145 |
146 | * publishing messages individually, waiting for the confirmation synchronously: simple, but very limited throughput.
147 | * publishing messages in batch, waiting for the confirmation synchronously for a batch: simple, reasonable throughput, but hard to reason about when something goes wrong.
148 | * asynchronous handling: best performance and use of resources, good control in case of error, but can be involved to implement correctly.
149 |
150 |
151 |
152 | .. note::
153 |
154 | This material was adopted from `official tutorial`_ on **rabbitmq.org**.
155 |
--------------------------------------------------------------------------------
/docs/source/rabbitmq-tutorial/examples/1-introduction/receive.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 |
3 | from aio_pika import connect
4 | from aio_pika.abc import AbstractIncomingMessage
5 |
6 |
7 | async def on_message(message: AbstractIncomingMessage) -> None:
8 | """
9 | on_message doesn't necessarily have to be defined as async.
10 | Here it is to show that it's possible.
11 | """
12 | print(" [x] Received message %r" % message)
13 | print("Message body is: %r" % message.body)
14 |
15 | print("Before sleep!")
16 | await asyncio.sleep(5) # Represents async I/O operations
17 | print("After sleep!")
18 |
19 |
20 | async def main() -> None:
21 | # Perform connection
22 | connection = await connect("amqp://guest:guest@localhost/")
23 | async with connection:
24 | # Creating a channel
25 | channel = await connection.channel()
26 |
27 | # Declaring queue
28 | queue = await channel.declare_queue("hello")
29 |
30 | # Start listening the queue with name 'hello'
31 | await queue.consume(on_message, no_ack=True)
32 |
33 | print(" [*] Waiting for messages. To exit press CTRL+C")
34 | await asyncio.Future()
35 |
36 |
37 | if __name__ == "__main__":
38 | asyncio.run(main())
39 |
--------------------------------------------------------------------------------
/docs/source/rabbitmq-tutorial/examples/1-introduction/send.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 |
3 | from aio_pika import Message, connect
4 |
5 |
6 | async def main() -> None:
7 | # Perform connection
8 | connection = await connect("amqp://guest:guest@localhost/")
9 |
10 | async with connection:
11 | # Creating a channel
12 | channel = await connection.channel()
13 |
14 | # Declaring queue
15 | queue = await channel.declare_queue("hello")
16 |
17 | # Sending the message
18 | await channel.default_exchange.publish(
19 | Message(b"Hello World!"),
20 | routing_key=queue.name,
21 | )
22 |
23 | print(" [x] Sent 'Hello World!'")
24 |
25 |
26 | if __name__ == "__main__":
27 | asyncio.run(main())
28 |
--------------------------------------------------------------------------------
/docs/source/rabbitmq-tutorial/examples/2-work-queues/new_task.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 | import sys
3 |
4 | from aio_pika import DeliveryMode, Message, connect
5 |
6 |
7 | async def main() -> None:
8 | # Perform connection
9 | connection = await connect("amqp://guest:guest@localhost/")
10 |
11 | async with connection:
12 | # Creating a channel
13 | channel = await connection.channel()
14 |
15 | message_body = b" ".join(
16 | arg.encode() for arg in sys.argv[1:]
17 | ) or b"Hello World!"
18 |
19 | message = Message(
20 | message_body, delivery_mode=DeliveryMode.PERSISTENT,
21 | )
22 |
23 | # Sending the message
24 | await channel.default_exchange.publish(
25 | message, routing_key="task_queue",
26 | )
27 |
28 | print(f" [x] Sent {message!r}")
29 |
30 |
31 | if __name__ == "__main__":
32 | asyncio.run(main())
33 |
--------------------------------------------------------------------------------
/docs/source/rabbitmq-tutorial/examples/2-work-queues/new_task_initial.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 | import sys
3 |
4 | from aio_pika import Message, connect
5 |
6 |
7 | async def main() -> None:
8 | # Perform connection
9 | connection = await connect("amqp://guest:guest@localhost/")
10 |
11 | async with connection:
12 | # Creating a channel
13 | channel = await connection.channel()
14 |
15 | message_body = b" ".join(
16 | arg.encode() for arg in sys.argv[1:]
17 | ) or b"Hello World!"
18 |
19 | # Sending the message
20 | await channel.default_exchange.publish(
21 | Message(message_body),
22 | routing_key="hello",
23 | )
24 |
25 | print(f" [x] Sent {message_body!r}")
26 |
27 |
28 | if __name__ == "__main__":
29 | asyncio.run(main())
30 |
--------------------------------------------------------------------------------
/docs/source/rabbitmq-tutorial/examples/2-work-queues/tasks_worker.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 |
3 | from aio_pika import connect
4 | from aio_pika.abc import AbstractIncomingMessage
5 |
6 |
7 | async def on_message(message: AbstractIncomingMessage) -> None:
8 | async with message.process():
9 | print(f" [x] Received message {message!r}")
10 | await asyncio.sleep(message.body.count(b'.'))
11 | print(f" Message body is: {message.body!r}")
12 |
13 |
14 | async def main() -> None:
15 | # Perform connection
16 | connection = await connect("amqp://guest:guest@localhost/")
17 |
18 | async with connection:
19 | # Creating a channel
20 | channel = await connection.channel()
21 | await channel.set_qos(prefetch_count=1)
22 |
23 | # Declaring queue
24 | queue = await channel.declare_queue(
25 | "task_queue",
26 | durable=True,
27 | )
28 |
29 | # Start listening the queue with name 'task_queue'
30 | await queue.consume(on_message)
31 |
32 | print(" [*] Waiting for messages. To exit press CTRL+C")
33 | await asyncio.Future()
34 |
35 |
36 | if __name__ == "__main__":
37 | asyncio.run(main())
38 |
--------------------------------------------------------------------------------
/docs/source/rabbitmq-tutorial/examples/3-publish-subscribe/emit_log.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 | import sys
3 |
4 | from aio_pika import DeliveryMode, ExchangeType, Message, connect
5 |
6 |
7 | async def main() -> None:
8 | # Perform connection
9 | connection = await connect("amqp://guest:guest@localhost/")
10 |
11 | async with connection:
12 | # Creating a channel
13 | channel = await connection.channel()
14 |
15 | logs_exchange = await channel.declare_exchange(
16 | "logs", ExchangeType.FANOUT,
17 | )
18 |
19 | message_body = b" ".join(
20 | arg.encode() for arg in sys.argv[1:]
21 | ) or b"Hello World!"
22 |
23 | message = Message(
24 | message_body,
25 | delivery_mode=DeliveryMode.PERSISTENT,
26 | )
27 |
28 | # Sending the message
29 | await logs_exchange.publish(message, routing_key="info")
30 |
31 | print(f" [x] Sent {message!r}")
32 |
33 |
34 | if __name__ == "__main__":
35 | asyncio.run(main())
36 |
--------------------------------------------------------------------------------
/docs/source/rabbitmq-tutorial/examples/3-publish-subscribe/receive_logs.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 |
3 | from aio_pika import ExchangeType, connect
4 | from aio_pika.abc import AbstractIncomingMessage
5 |
6 |
7 | async def on_message(message: AbstractIncomingMessage) -> None:
8 | async with message.process():
9 | print(f"[x] {message.body!r}")
10 |
11 |
12 | async def main() -> None:
13 | # Perform connection
14 | connection = await connect("amqp://guest:guest@localhost/")
15 |
16 | async with connection:
17 | # Creating a channel
18 | channel = await connection.channel()
19 | await channel.set_qos(prefetch_count=1)
20 |
21 | logs_exchange = await channel.declare_exchange(
22 | "logs", ExchangeType.FANOUT,
23 | )
24 |
25 | # Declaring queue
26 | queue = await channel.declare_queue(exclusive=True)
27 |
28 | # Binding the queue to the exchange
29 | await queue.bind(logs_exchange)
30 |
31 | # Start listening the queue
32 | await queue.consume(on_message)
33 |
34 | print(" [*] Waiting for logs. To exit press CTRL+C")
35 | await asyncio.Future()
36 |
37 |
38 | if __name__ == "__main__":
39 | asyncio.run(main())
40 |
--------------------------------------------------------------------------------
/docs/source/rabbitmq-tutorial/examples/4-routing/emit_log_direct.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 | import sys
3 |
4 | from aio_pika import DeliveryMode, ExchangeType, Message, connect
5 |
6 |
7 | async def main() -> None:
8 | # Perform connection
9 | connection = await connect("amqp://guest:guest@localhost/")
10 |
11 | async with connection:
12 | # Creating a channel
13 | channel = await connection.channel()
14 |
15 | logs_exchange = await channel.declare_exchange(
16 | "logs", ExchangeType.DIRECT,
17 | )
18 |
19 | message_body = b" ".join(
20 | arg.encode() for arg in sys.argv[2:]
21 | ) or b"Hello World!"
22 |
23 | message = Message(
24 | message_body,
25 | delivery_mode=DeliveryMode.PERSISTENT,
26 | )
27 |
28 | # Sending the message
29 | routing_key = sys.argv[1] if len(sys.argv) > 2 else "info"
30 | await logs_exchange.publish(message, routing_key=routing_key)
31 |
32 | print(f" [x] Sent {message.body!r}")
33 |
34 |
35 | if __name__ == "__main__":
36 | asyncio.run(main())
37 |
--------------------------------------------------------------------------------
/docs/source/rabbitmq-tutorial/examples/4-routing/receive_logs_direct.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 | import sys
3 |
4 | from aio_pika import ExchangeType, connect
5 | from aio_pika.abc import AbstractIncomingMessage
6 |
7 |
8 | async def on_message(message: AbstractIncomingMessage) -> None:
9 | async with message.process():
10 | print(" [x] %r:%r" % (message.routing_key, message.body))
11 |
12 |
13 | async def main() -> None:
14 | # Perform connection
15 | connection = await connect("amqp://guest:guest@localhost/")
16 |
17 | async with connection:
18 | # Creating a channel
19 | channel = await connection.channel()
20 | await channel.set_qos(prefetch_count=1)
21 |
22 | severities = sys.argv[1:]
23 |
24 | if not severities:
25 | sys.stderr.write(
26 | "Usage: %s [info] [warning] [error]\n" % sys.argv[0],
27 | )
28 | sys.exit(1)
29 |
30 | # Declare an exchange
31 | direct_logs_exchange = await channel.declare_exchange(
32 | "logs", ExchangeType.DIRECT,
33 | )
34 |
35 | # Declaring random queue
36 | queue = await channel.declare_queue(durable=True)
37 |
38 | for severity in severities:
39 | await queue.bind(direct_logs_exchange, routing_key=severity)
40 |
41 | # Start listening the random queue
42 | await queue.consume(on_message)
43 |
44 | print(" [*] Waiting for messages. To exit press CTRL+C")
45 | await asyncio.Future()
46 |
47 |
48 | if __name__ == "__main__":
49 | asyncio.run(main())
50 |
--------------------------------------------------------------------------------
/docs/source/rabbitmq-tutorial/examples/4-routing/receive_logs_direct_simple.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 | import sys
3 |
4 | from aio_pika import ExchangeType, connect
5 | from aio_pika.abc import AbstractIncomingMessage
6 |
7 |
8 | async def main() -> None:
9 | # Perform connection
10 | connection = await connect("amqp://guest:guest@localhost/")
11 |
12 | async with connection:
13 | # Creating a channel
14 | channel = await connection.channel()
15 | await channel.set_qos(prefetch_count=1)
16 |
17 | severities = sys.argv[1:]
18 |
19 | if not severities:
20 | sys.stderr.write(
21 | f"Usage: {sys.argv[0]} [info] [warning] [error]\n",
22 | )
23 | sys.exit(1)
24 |
25 | # Declare an exchange
26 | direct_logs_exchange = await channel.declare_exchange(
27 | "logs", ExchangeType.DIRECT,
28 | )
29 |
30 | # Declaring random queue
31 | queue = await channel.declare_queue(durable=True)
32 |
33 | for severity in severities:
34 | await queue.bind(direct_logs_exchange, routing_key=severity)
35 |
36 | async with queue.iterator() as iterator:
37 | message: AbstractIncomingMessage
38 | async for message in iterator:
39 | async with message.process():
40 | print(f" [x] {message.routing_key!r}:{message.body!r}")
41 |
42 | print(" [*] Waiting for messages. To exit press CTRL+C")
43 | await asyncio.Future()
44 |
45 |
46 | if __name__ == "__main__":
47 | asyncio.run(main())
48 |
--------------------------------------------------------------------------------
/docs/source/rabbitmq-tutorial/examples/5-topics/emit_log_topic.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 | import sys
3 |
4 | from aio_pika import DeliveryMode, ExchangeType, Message, connect
5 |
6 |
7 | async def main() -> None:
8 | # Perform connection
9 | connection = await connect(
10 | "amqp://guest:guest@localhost/",
11 | )
12 |
13 | async with connection:
14 | # Creating a channel
15 | channel = await connection.channel()
16 |
17 | topic_logs_exchange = await channel.declare_exchange(
18 | "topic_logs", ExchangeType.TOPIC,
19 | )
20 |
21 | routing_key = sys.argv[1] if len(sys.argv) > 2 else "anonymous.info"
22 |
23 | message_body = b" ".join(
24 | arg.encode() for arg in sys.argv[2:]
25 | ) or b"Hello World!"
26 |
27 | message = Message(
28 | message_body,
29 | delivery_mode=DeliveryMode.PERSISTENT,
30 | )
31 |
32 | # Sending the message
33 | await topic_logs_exchange.publish(message, routing_key=routing_key)
34 |
35 | print(f" [x] Sent {message!r}")
36 |
37 |
38 | if __name__ == "__main__":
39 | asyncio.run(main())
40 |
--------------------------------------------------------------------------------
/docs/source/rabbitmq-tutorial/examples/5-topics/receive_logs_topic.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 | import sys
3 |
4 | from aio_pika import ExchangeType, connect
5 | from aio_pika.abc import AbstractIncomingMessage
6 |
7 |
8 | async def main() -> None:
9 | # Perform connection
10 | connection = await connect("amqp://guest:guest@localhost/")
11 |
12 | # Creating a channel
13 | channel = await connection.channel()
14 | await channel.set_qos(prefetch_count=1)
15 |
16 | # Declare an exchange
17 | topic_logs_exchange = await channel.declare_exchange(
18 | "topic_logs", ExchangeType.TOPIC,
19 | )
20 |
21 | # Declaring queue
22 | queue = await channel.declare_queue(
23 | "task_queue", durable=True,
24 | )
25 |
26 | binding_keys = sys.argv[1:]
27 |
28 | if not binding_keys:
29 | sys.stderr.write("Usage: %s [binding_key]...\n" % sys.argv[0])
30 | sys.exit(1)
31 |
32 | for binding_key in binding_keys:
33 | await queue.bind(topic_logs_exchange, routing_key=binding_key)
34 |
35 | print(" [*] Waiting for messages. To exit press CTRL+C")
36 |
37 | # Start listening the queue with name 'task_queue'
38 | async with queue.iterator() as iterator:
39 | message: AbstractIncomingMessage
40 | async for message in iterator:
41 | async with message.process():
42 | print(f" [x] {message.routing_key!r}:{message.body!r}")
43 |
44 |
45 | if __name__ == "__main__":
46 | asyncio.run(main())
47 |
--------------------------------------------------------------------------------
/docs/source/rabbitmq-tutorial/examples/6-rpc/rpc_client.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 | import uuid
3 | from typing import MutableMapping
4 |
5 | from aio_pika import Message, connect
6 | from aio_pika.abc import (
7 | AbstractChannel, AbstractConnection, AbstractIncomingMessage, AbstractQueue,
8 | )
9 |
10 |
11 | class FibonacciRpcClient:
12 | connection: AbstractConnection
13 | channel: AbstractChannel
14 | callback_queue: AbstractQueue
15 |
16 | def __init__(self) -> None:
17 | self.futures: MutableMapping[str, asyncio.Future] = {}
18 |
19 | async def connect(self) -> "FibonacciRpcClient":
20 | self.connection = await connect("amqp://guest:guest@localhost/")
21 | self.channel = await self.connection.channel()
22 | self.callback_queue = await self.channel.declare_queue(exclusive=True)
23 | await self.callback_queue.consume(self.on_response, no_ack=True)
24 |
25 | return self
26 |
27 | async def on_response(self, message: AbstractIncomingMessage) -> None:
28 | if message.correlation_id is None:
29 | print(f"Bad message {message!r}")
30 | return
31 |
32 | future: asyncio.Future = self.futures.pop(message.correlation_id)
33 | future.set_result(message.body)
34 |
35 | async def call(self, n: int) -> int:
36 | correlation_id = str(uuid.uuid4())
37 | loop = asyncio.get_running_loop()
38 | future = loop.create_future()
39 |
40 | self.futures[correlation_id] = future
41 |
42 | await self.channel.default_exchange.publish(
43 | Message(
44 | str(n).encode(),
45 | content_type="text/plain",
46 | correlation_id=correlation_id,
47 | reply_to=self.callback_queue.name,
48 | ),
49 | routing_key="rpc_queue",
50 | )
51 |
52 | return int(await future)
53 |
54 |
55 | async def main() -> None:
56 | fibonacci_rpc = await FibonacciRpcClient().connect()
57 | print(" [x] Requesting fib(30)")
58 | response = await fibonacci_rpc.call(30)
59 | print(f" [.] Got {response!r}")
60 |
61 |
62 | if __name__ == "__main__":
63 | asyncio.run(main())
64 |
--------------------------------------------------------------------------------
/docs/source/rabbitmq-tutorial/examples/6-rpc/rpc_server.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 | import logging
3 |
4 | from aio_pika import Message, connect
5 | from aio_pika.abc import AbstractIncomingMessage
6 |
7 |
8 | def fib(n: int) -> int:
9 | if n == 0:
10 | return 0
11 | elif n == 1:
12 | return 1
13 | else:
14 | return fib(n - 1) + fib(n - 2)
15 |
16 |
17 | async def main() -> None:
18 | # Perform connection
19 | connection = await connect("amqp://guest:guest@localhost/")
20 |
21 | # Creating a channel
22 | channel = await connection.channel()
23 | exchange = channel.default_exchange
24 |
25 | # Declaring queue
26 | queue = await channel.declare_queue("rpc_queue")
27 |
28 | print(" [x] Awaiting RPC requests")
29 |
30 | # Start listening the queue with name 'hello'
31 | async with queue.iterator() as qiterator:
32 | message: AbstractIncomingMessage
33 | async for message in qiterator:
34 | try:
35 | async with message.process(requeue=False):
36 | assert message.reply_to is not None
37 |
38 | n = int(message.body.decode())
39 |
40 | print(f" [.] fib({n})")
41 | response = str(fib(n)).encode()
42 |
43 | await exchange.publish(
44 | Message(
45 | body=response,
46 | correlation_id=message.correlation_id,
47 | ),
48 | routing_key=message.reply_to,
49 | )
50 | print("Request complete")
51 | except Exception:
52 | logging.exception("Processing error for message %r", message)
53 |
54 | if __name__ == "__main__":
55 | asyncio.run(main())
56 |
--------------------------------------------------------------------------------
/docs/source/rabbitmq-tutorial/examples/7-publisher-confirms/publish_asynchronously.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 | from typing import Generator
3 |
4 | from aio_pika import Message, connect
5 | from aiormq.exceptions import DeliveryError
6 | from pamqp.commands import Basic
7 |
8 | from aio_pika.abc import AbstractExchange
9 |
10 |
11 | def get_messages_to_publish() -> Generator[bytes, None, None]:
12 | for i in range(10000):
13 | yield f"Hello World {i}!".encode()
14 |
15 |
16 | async def publish_and_handle_confirm(
17 | exchange: AbstractExchange,
18 | queue_name: str,
19 | message_body: bytes,
20 | ) -> None:
21 | try:
22 | confirmation = await exchange.publish(
23 | Message(message_body),
24 | routing_key=queue_name,
25 | timeout=5.0,
26 | )
27 | except DeliveryError as e:
28 | print(f"Delivery of {message_body!r} failed with exception: {e}")
29 | except TimeoutError:
30 | print(f"Timeout occured for {message_body!r}")
31 | else:
32 | if not isinstance(confirmation, Basic.Ack):
33 | print(f"Message {message_body!r} was not acknowledged by broker!")
34 |
35 |
36 | async def main() -> None:
37 | # Perform connection
38 | connection = await connect("amqp://guest:guest@localhost/")
39 |
40 | async with connection:
41 | # Creating a channel
42 | channel = await connection.channel()
43 |
44 | # Declaring queue
45 | queue = await channel.declare_queue("hello")
46 |
47 | # List for storing tasks
48 | tasks = []
49 | # Sending the messages
50 | for msg in get_messages_to_publish():
51 | task = asyncio.create_task(
52 | publish_and_handle_confirm(
53 | channel.default_exchange,
54 | queue.name,
55 | msg,
56 | )
57 | )
58 | tasks.append(task)
59 | # Yield control flow to event loop, so message sending is initiated:
60 | await asyncio.sleep(0)
61 |
62 | # Await all tasks
63 | await asyncio.gather(*tasks)
64 |
65 | print(" [x] Sent and confirmed multiple messages asynchronously. ")
66 |
67 |
68 | if __name__ == "__main__":
69 | asyncio.run(main())
70 |
--------------------------------------------------------------------------------
/docs/source/rabbitmq-tutorial/examples/7-publisher-confirms/publish_batches.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 | from typing import Generator
3 |
4 | from aio_pika import Message, connect
5 |
6 |
7 | def get_messages_to_publish() -> Generator[bytes, None, None]:
8 | for i in range(10000):
9 | yield f"Hello World {i}!".encode()
10 |
11 |
12 | async def main() -> None:
13 | # Perform connection
14 | connection = await connect("amqp://guest:guest@localhost/")
15 |
16 | async with connection:
17 | # Creating a channel
18 | channel = await connection.channel()
19 |
20 | # Declaring queue
21 | queue = await channel.declare_queue("hello")
22 |
23 | batchsize = 100
24 | outstanding_messages = []
25 |
26 | # Sending the messages
27 | for msg in get_messages_to_publish():
28 | outstanding_messages.append(
29 | asyncio.create_task(
30 | channel.default_exchange.publish(
31 | Message(msg),
32 | routing_key=queue.name,
33 | timeout=5.0,
34 | )
35 | )
36 | )
37 | # Yield control flow to event loop, so message sending is initiated:
38 | await asyncio.sleep(0)
39 |
40 | if len(outstanding_messages) == batchsize:
41 | await asyncio.gather(*outstanding_messages)
42 | outstanding_messages.clear()
43 |
44 | if len(outstanding_messages) > 0:
45 | await asyncio.gather(*outstanding_messages)
46 | outstanding_messages.clear()
47 | # Done sending messages
48 |
49 | print(" [x] Sent and confirmed multiple messages in batches. ")
50 |
51 |
52 | if __name__ == "__main__":
53 | asyncio.run(main())
54 |
--------------------------------------------------------------------------------
/docs/source/rabbitmq-tutorial/examples/7-publisher-confirms/publish_individually.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 | from typing import Generator
3 |
4 | from aio_pika import Message, connect
5 |
6 |
7 | def get_messages_to_publish() -> Generator[bytes, None, None]:
8 | for i in range(10000):
9 | yield f"Hello World {i}!".encode()
10 |
11 |
12 | async def main() -> None:
13 | # Perform connection
14 | connection = await connect("amqp://guest:guest@localhost/")
15 |
16 | async with connection:
17 | # Creating a channel
18 | channel = await connection.channel()
19 |
20 | # Declaring queue
21 | queue = await channel.declare_queue("hello")
22 |
23 | # Sending the messages
24 | for msg in get_messages_to_publish():
25 | # Waiting for publisher confirmation with timeout for every message
26 | await channel.default_exchange.publish(
27 | Message(msg),
28 | routing_key=queue.name,
29 | timeout=5.0,
30 | )
31 | # Done sending messages
32 | print(" [x] Sent and confirmed multiple messages individually. ")
33 |
34 |
35 | if __name__ == "__main__":
36 | asyncio.run(main())
37 |
--------------------------------------------------------------------------------
/docs/source/rabbitmq-tutorial/index.rst:
--------------------------------------------------------------------------------
1 | RabbitMQ tutorial
2 | =================
3 |
4 | .. toctree::
5 | :glob:
6 | :maxdepth: 3
7 | :caption: RabbitMQ tutorial adopted for aio-pika
8 |
9 | *-*
10 |
--------------------------------------------------------------------------------
/gray.conf:
--------------------------------------------------------------------------------
1 | formatters = add-trailing-comma,isort,unify
2 | min-python-version = 3.7
3 | log-level = error
4 |
--------------------------------------------------------------------------------
/noxfile.py:
--------------------------------------------------------------------------------
1 | import nox
2 | from nox import Session
3 |
4 |
5 | @nox.session
6 | def docs(session: Session) -> None:
7 | session.install(".")
8 | session.install("sphinx", "sphinx-autobuild")
9 | session.run("rm", "-rf", "build/html", external=True)
10 | sphinx_args = ["-W", "docs/source", "build/html"]
11 |
12 | if "serve" in session.posargs:
13 | session.run("sphinx-autobuild", *sphinx_args)
14 | else:
15 | session.run("sphinx-build", *sphinx_args)
16 |
--------------------------------------------------------------------------------
/poetry.toml:
--------------------------------------------------------------------------------
1 | cache-dir = ".cache"
2 |
3 | [virtualenvs]
4 | path = ".venv"
5 | in-project = true
6 |
7 | [installer]
8 | modern-installation = false
9 |
--------------------------------------------------------------------------------
/pylama.ini:
--------------------------------------------------------------------------------
1 | [pylama]
2 | linters = mccabe,pycodestyle,pyflakes
3 | skip = *env*,.tox*,*build*,.*,env/*,.venv/*
4 | ignore = C901
5 |
6 | [pylama:pycodestyle]
7 | max_line_length = 80
8 | show-pep8 = True
9 | show-source = True
10 |
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [tool.poetry]
2 | name = "aio-pika"
3 | version = "9.5.5"
4 | description = "Wrapper around the aiormq for asyncio and humans"
5 | authors = ["Dmitry Orlov "]
6 | readme = "README.rst"
7 | license = "Apache-2.0"
8 | keywords=["rabbitmq", "asyncio", "amqp", "amqp 0.9.1", "aiormq"]
9 | homepage = "https://github.com/mosquito/aio-pika"
10 | classifiers = [
11 | "Intended Audience :: Developers",
12 | "License :: OSI Approved :: Apache Software License",
13 | "Natural Language :: English",
14 | "Operating System :: MacOS",
15 | "Operating System :: Microsoft",
16 | "Operating System :: POSIX",
17 | "Programming Language :: Python :: 3",
18 | "Programming Language :: Python :: 3.9",
19 | "Programming Language :: Python :: 3.10",
20 | "Programming Language :: Python :: 3.11",
21 | "Programming Language :: Python :: 3.12",
22 | "Programming Language :: Python :: Implementation :: CPython",
23 | "Programming Language :: Python :: Implementation :: PyPy",
24 | "Programming Language :: Python",
25 | "Topic :: Internet",
26 | "Topic :: Software Development :: Libraries",
27 | "Topic :: Software Development",
28 | "Typing :: Typed",
29 | ]
30 | packages = [{ include = "aio_pika" }]
31 |
32 | [tool.poetry.urls]
33 | "Source" = "https://github.com/mosquito/aio-pika"
34 | "Tracker" = "https://github.com/mosquito/aio-pika/issues"
35 | "Documentation" = "https://docs.aio-pika.com/"
36 |
37 | [tool.poetry.dependencies]
38 | python = "^3.9"
39 | aiormq = "~6.8"
40 | yarl = [{ version = '*'}]
41 | exceptiongroup = "^1"
42 | typing-extensions = [{ version = '*', python = "< 3.10" }]
43 |
44 | [tool.poetry.group.dev.dependencies]
45 | aiomisc = "^17.5"
46 | aiomisc-pytest = "^1.1.1"
47 | collective-checkdocs = "^0.2"
48 | coverage = "^6.5.0"
49 | coveralls = "^3.3.1"
50 | mypy = "^1"
51 | nox = "*"
52 | pylama = "^8.4.1"
53 | pytest = "^8.0"
54 | pytest-cov = "^4.0.0"
55 | pytest-rst = "^0.0"
56 | shortuuid = "^1.0"
57 | sphinx = "*"
58 | sphinx-autobuild = "^2021.3.14"
59 | timeout-decorator = "^0.5.0"
60 | types-setuptools = "^65.6.0.2"
61 | typing-extensions = "*"
62 | setuptools = "^69.0.3"
63 | testcontainers = "^3.7.1"
64 | autodoc = "*"
65 | furo = "*"
66 | sphinxcontrib-googleanalytics = "*"
67 |
68 | [tool.poetry.group.uvloop]
69 | optional = true
70 |
71 | [tool.poetry.group.uvloop.dependencies]
72 | uvloop = "^0.19"
73 |
74 | [build-system]
75 | requires = ["poetry-core"]
76 | build-backend = "poetry.core.masonry.api"
77 |
78 | [tool.mypy]
79 | check_untyped_defs = true
80 | disallow_any_generics = false
81 | disallow_incomplete_defs = true
82 | disallow_subclassing_any = true
83 | disallow_untyped_calls = true
84 | disallow_untyped_decorators = true
85 | disallow_untyped_defs = true
86 | follow_imports = "silent"
87 | no_implicit_reexport = true
88 | strict_optional = true
89 | warn_redundant_casts = true
90 | warn_unused_configs = true
91 | warn_unused_ignores = true
92 | files = [
93 | "aio_pika",
94 | "tests",
95 | "docs/source/examples",
96 | "docs/source/rabbitmq-tutorial/examples/1-introduction",
97 | "docs/source/rabbitmq-tutorial/examples/2-work-queues",
98 | "docs/source/rabbitmq-tutorial/examples/3-publish-subscribe",
99 | "docs/source/rabbitmq-tutorial/examples/4-routing",
100 | "docs/source/rabbitmq-tutorial/examples/5-topics",
101 | "docs/source/rabbitmq-tutorial/examples/6-rpc",
102 | ]
103 |
104 | [[tool.mypy.overrides]]
105 | module = ["tests.*"]
106 | check_untyped_defs = true
107 | disallow_incomplete_defs = false
108 | disallow_untyped_calls = false
109 | disallow_untyped_decorators = false
110 | disallow_untyped_defs = false
111 | warn_unused_ignores = false
112 |
113 | [[tool.mypy.overrides]]
114 | module = ["testcontainers.*"]
115 | ignore_missing_imports = true
116 |
117 | [tool.pytest.ini_options]
118 | log_cli = true
119 | addopts = "-p no:asyncio"
120 | markers = [
121 | "asyncio: asyncio"
122 | ]
123 |
--------------------------------------------------------------------------------
/tests/__init__.py:
--------------------------------------------------------------------------------
1 | from typing import Any
2 |
3 | import shortuuid
4 |
5 |
6 | def get_random_name(*args: Any) -> str:
7 | prefix = ["test"]
8 | for item in args:
9 | prefix.append(item)
10 | prefix.append(shortuuid.uuid())
11 |
12 | return ".".join(prefix)
13 |
--------------------------------------------------------------------------------
/tests/conftest.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 | import gc
3 | import socket
4 | import tracemalloc
5 | from contextlib import suppress
6 | from functools import partial
7 | from time import sleep
8 | from typing import Any, Generator
9 |
10 | import aiormq
11 | import pamqp
12 | import pytest
13 | from aiomisc import awaitable
14 | from testcontainers.core.container import DockerContainer
15 | from yarl import URL
16 |
17 | import aio_pika
18 |
19 |
20 | @pytest.fixture
21 | async def add_cleanup(event_loop):
22 | entities = []
23 |
24 | def payload(func, *args, **kwargs):
25 | nonlocal entities
26 | func = partial(awaitable(func), *args, **kwargs)
27 | entities.append(func)
28 |
29 | try:
30 | yield payload
31 | finally:
32 | for func in entities[::-1]:
33 | await func()
34 |
35 | entities.clear()
36 |
37 |
38 | @pytest.fixture
39 | async def create_task(event_loop):
40 | tasks = []
41 |
42 | def payload(coroutine):
43 | nonlocal tasks
44 | task = event_loop.create_task(coroutine)
45 | tasks.append(task)
46 | return task
47 |
48 | try:
49 | yield payload
50 | finally:
51 | cancelled = []
52 | for task in tasks:
53 | if task.done():
54 | continue
55 | task.cancel()
56 | cancelled.append(task)
57 |
58 | results = await asyncio.gather(*cancelled, return_exceptions=True)
59 |
60 | for result in results:
61 | if not isinstance(result, asyncio.CancelledError):
62 | raise result
63 |
64 |
65 | class RabbitmqContainer(DockerContainer): # type: ignore
66 | _amqp_port: int
67 | _amqps_port: int
68 |
69 | def get_amqp_url(self) -> URL:
70 | return URL.build(
71 | scheme="amqp", user="guest", password="guest", path="//",
72 | host=self.get_container_host_ip(),
73 | port=self._amqp_port,
74 | )
75 |
76 | def get_amqps_url(self) -> URL:
77 | return URL.build(
78 | scheme="amqps", user="guest", password="guest", path="//",
79 | host=self.get_container_host_ip(),
80 | port=self._amqps_port,
81 | )
82 |
83 | def readiness_probe(self) -> None:
84 | host = self.get_container_host_ip()
85 | port = int(self.get_exposed_port(5672))
86 | while True:
87 | with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock:
88 | try:
89 | sock.connect((host, port))
90 | sock.send(b"AMQP\0x0\0x0\0x9\0x1")
91 | data = sock.recv(4)
92 | if len(data) != 4:
93 | sleep(0.3)
94 | continue
95 | except ConnectionError:
96 | sleep(0.3)
97 | continue
98 | return
99 |
100 | def start(self) -> "RabbitmqContainer":
101 | self.with_exposed_ports(5672, 5671)
102 | super().start()
103 | self.readiness_probe()
104 | self._amqp_port = int(self.get_exposed_port(5672))
105 | self._amqps_port = int(self.get_exposed_port(5671))
106 | return self
107 |
108 |
109 | @pytest.fixture(scope="module")
110 | def rabbitmq_container() -> Generator[RabbitmqContainer, Any, Any]:
111 | with RabbitmqContainer("mosquito/aiormq-rabbitmq") as container:
112 | yield container
113 |
114 |
115 | @pytest.fixture(scope="module")
116 | def amqp_direct_url(request, rabbitmq_container: RabbitmqContainer) -> URL:
117 | return rabbitmq_container.get_amqp_url().update_query(
118 | name=request.node.nodeid
119 | )
120 |
121 |
122 | @pytest.fixture
123 | def amqp_url(request, amqp_direct_url) -> URL:
124 | query = dict(amqp_direct_url.query)
125 | query["name"] = request.node.nodeid
126 | return amqp_direct_url.with_query(**query)
127 |
128 |
129 | @pytest.fixture(
130 | scope="module",
131 | params=[aio_pika.connect, aio_pika.connect_robust],
132 | ids=["connect", "connect_robust"],
133 | )
134 | def connection_fabric(request):
135 | return request.param
136 |
137 |
138 | @pytest.fixture
139 | def create_connection(connection_fabric, event_loop, amqp_url):
140 | return partial(connection_fabric, amqp_url, loop=event_loop)
141 |
142 |
143 | @pytest.fixture
144 | def create_channel(connection: aio_pika.Connection, add_cleanup):
145 | conn = connection
146 |
147 | async def fabric(cleanup=True, connection=None, *args, **kwargs):
148 | nonlocal add_cleanup, conn
149 |
150 | if connection is None:
151 | connection = conn
152 |
153 | channel = await connection.channel(*args, **kwargs)
154 | if cleanup:
155 | add_cleanup(channel.close)
156 |
157 | return channel
158 |
159 | return fabric
160 |
161 |
162 | # noinspection PyTypeChecker
163 | @pytest.fixture
164 | async def connection(create_connection) -> aio_pika.Connection: # type: ignore
165 | async with await create_connection() as conn:
166 | yield conn
167 |
168 |
169 | # noinspection PyTypeChecker
170 | @pytest.fixture
171 | async def channel( # type: ignore
172 | connection: aio_pika.Connection,
173 | ) -> aio_pika.Channel:
174 | async with connection.channel() as ch:
175 | yield ch
176 |
177 |
178 | @pytest.fixture
179 | def declare_queue(connection, channel, add_cleanup):
180 | ch = channel
181 |
182 | async def fabric(
183 | *args, cleanup=True, channel=None, **kwargs,
184 | ) -> aio_pika.Queue:
185 | nonlocal ch, add_cleanup
186 |
187 | if channel is None:
188 | channel = ch
189 |
190 | queue = await channel.declare_queue(*args, **kwargs)
191 |
192 | if cleanup and not kwargs.get("auto_delete"):
193 | add_cleanup(queue.delete)
194 |
195 | return queue
196 |
197 | return fabric
198 |
199 |
200 | @pytest.fixture
201 | def declare_exchange(connection, channel, add_cleanup):
202 | ch = channel
203 |
204 | async def fabric(
205 | *args, channel=None, cleanup=True, **kwargs,
206 | ) -> aio_pika.Exchange:
207 | nonlocal ch, add_cleanup
208 |
209 | if channel is None:
210 | channel = ch
211 |
212 | exchange = await channel.declare_exchange(*args, **kwargs)
213 |
214 | if cleanup and not kwargs.get("auto_delete"):
215 | add_cleanup(exchange.delete)
216 |
217 | return exchange
218 |
219 | return fabric
220 |
221 |
222 | @pytest.fixture(autouse=True)
223 | def memory_tracer():
224 | tracemalloc.start()
225 | tracemalloc.clear_traces()
226 |
227 | filters = (
228 | tracemalloc.Filter(True, aiormq.__file__),
229 | tracemalloc.Filter(True, pamqp.__file__),
230 | tracemalloc.Filter(True, aio_pika.__file__),
231 | )
232 |
233 | snapshot_before = tracemalloc.take_snapshot().filter_traces(filters)
234 |
235 | try:
236 | yield
237 |
238 | with suppress(Exception):
239 | gc.collect()
240 |
241 | snapshot_after = tracemalloc.take_snapshot().filter_traces(filters)
242 |
243 | top_stats = snapshot_after.compare_to(
244 | snapshot_before, "lineno", cumulative=True,
245 | )
246 |
247 | assert not top_stats
248 | finally:
249 | tracemalloc.stop()
250 |
--------------------------------------------------------------------------------
/tests/test_amqp_robust.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 | from functools import partial
3 |
4 | import pytest
5 | from aiormq import ChannelNotFoundEntity
6 | from aiormq.exceptions import ChannelPreconditionFailed
7 |
8 | import aio_pika
9 | from aio_pika import RobustChannel
10 | from tests import get_random_name
11 | from tests.test_amqp import (
12 | TestCaseAmqp, TestCaseAmqpNoConfirms, TestCaseAmqpWithConfirms,
13 | )
14 |
15 |
16 | @pytest.fixture
17 | def connection_fabric():
18 | return aio_pika.connect_robust
19 |
20 |
21 | @pytest.fixture
22 | def create_connection(connection_fabric, event_loop, amqp_url):
23 | return partial(connection_fabric, amqp_url, loop=event_loop)
24 |
25 |
26 | class TestCaseNoRobust(TestCaseAmqp):
27 | PARAMS = [{"robust": True}, {"robust": False}]
28 | IDS = ["robust=1", "robust=0"]
29 |
30 | @staticmethod
31 | @pytest.fixture(name="declare_queue", params=PARAMS, ids=IDS)
32 | def declare_queue_(request, declare_queue):
33 | async def fabric(*args, **kwargs) -> aio_pika.Queue:
34 | kwargs.update(request.param)
35 | return await declare_queue(*args, **kwargs)
36 |
37 | return fabric
38 |
39 | @staticmethod
40 | @pytest.fixture(name="declare_exchange", params=PARAMS, ids=IDS)
41 | def declare_exchange_(request, declare_exchange):
42 | async def fabric(*args, **kwargs) -> aio_pika.Queue:
43 | kwargs.update(request.param)
44 | return await declare_exchange(*args, **kwargs)
45 |
46 | return fabric
47 |
48 | async def test_add_reconnect_callback(self, create_connection):
49 | connection = await create_connection()
50 |
51 | def cb(*a, **kw):
52 | pass
53 |
54 | connection.reconnect_callbacks.add(cb)
55 |
56 | del cb
57 | assert len(connection.reconnect_callbacks) == 1
58 |
59 | async def test_channel_blocking_timeout_reopen(self, connection):
60 | channel: RobustChannel = await connection.channel() # type: ignore
61 | close_reasons = []
62 | close_event = asyncio.Event()
63 | reopen_event = asyncio.Event()
64 | channel.reopen_callbacks.add(lambda *_: reopen_event.set())
65 |
66 | queue_name = get_random_name("test_channel_blocking_timeout_reopen")
67 |
68 | def on_done(*args):
69 | close_reasons.append(args)
70 | close_event.set()
71 | return
72 |
73 | channel.close_callbacks.add(on_done)
74 |
75 | with pytest.raises(ChannelNotFoundEntity):
76 | await channel.declare_queue(queue_name, passive=True)
77 |
78 | await close_event.wait()
79 | assert channel.is_closed
80 |
81 | # Ensure close callback has been called
82 | assert close_reasons
83 |
84 | await asyncio.wait_for(reopen_event.wait(), timeout=60)
85 | await channel.declare_queue(queue_name, auto_delete=True)
86 |
87 | async def test_get_queue_fail(self, connection):
88 | channel: RobustChannel = await connection.channel() # type: ignore
89 | close_event = asyncio.Event()
90 | reopen_event = asyncio.Event()
91 | channel.close_callbacks.add(lambda *_: close_event.set())
92 | channel.reopen_callbacks.add(lambda *_: reopen_event.set())
93 |
94 | name = get_random_name("passive", "queue")
95 |
96 | await channel.declare_queue(
97 | name,
98 | auto_delete=True,
99 | arguments={"x-max-length": 1},
100 | )
101 | with pytest.raises(ChannelPreconditionFailed):
102 | await channel.declare_queue(name, auto_delete=True)
103 | await asyncio.sleep(0)
104 | await close_event.wait()
105 | await reopen_event.wait()
106 | with pytest.raises(ChannelPreconditionFailed):
107 | await channel.declare_queue(name, auto_delete=True)
108 |
109 | async def test_channel_is_ready_after_close_and_reopen(self, connection):
110 | channel: RobustChannel = await connection.channel() # type: ignore
111 | await channel.ready()
112 | await channel.close()
113 | assert channel.is_closed is True
114 |
115 | await channel.reopen()
116 | await asyncio.wait_for(channel.ready(), timeout=1)
117 |
118 | assert channel.is_closed is False
119 |
120 | async def test_channel_can_be_closed(self, connection):
121 | channel: RobustChannel = await connection.channel() # type: ignore
122 | await channel.ready()
123 | await channel.close()
124 |
125 | assert channel.is_closed
126 |
127 | with pytest.raises(asyncio.TimeoutError):
128 | await asyncio.wait_for(channel.ready(), timeout=1)
129 |
130 | assert channel.is_closed
131 |
132 |
133 | class TestCaseAmqpNoConfirmsRobust(TestCaseAmqpNoConfirms):
134 | pass
135 |
136 |
137 | class TestCaseAmqpWithConfirmsRobust(TestCaseAmqpWithConfirms):
138 | pass
139 |
--------------------------------------------------------------------------------
/tests/test_amqps.py:
--------------------------------------------------------------------------------
1 | import ssl
2 | from functools import partial
3 |
4 | import pytest
5 |
6 | import aio_pika
7 | from tests import test_amqp as amqp
8 |
9 |
10 | @pytest.fixture(
11 | scope="module", params=[aio_pika.connect, aio_pika.connect_robust],
12 | )
13 | def connection_fabric(request):
14 | return request.param
15 |
16 |
17 | @pytest.fixture
18 | def create_connection(connection_fabric, event_loop, rabbitmq_container):
19 | ssl_context = ssl.create_default_context()
20 | ssl_context.check_hostname = False
21 | ssl_context.verify_mode = ssl.VerifyMode.CERT_NONE
22 |
23 | return partial(
24 | connection_fabric,
25 | rabbitmq_container.get_amqps_url(),
26 | loop=event_loop,
27 | ssl_context=ssl_context,
28 | )
29 |
30 |
31 | async def test_default_context(connection_fabric, amqp_url):
32 | with pytest.raises(ConnectionError):
33 | await connection_fabric(
34 | amqp_url.with_scheme("amqps").with_port(5671),
35 | ssl_context=None,
36 | )
37 |
38 | ssl_context = ssl.create_default_context()
39 |
40 | with pytest.raises(ConnectionError):
41 | await connection_fabric(
42 | amqp_url.with_scheme("amqps").with_port(5671),
43 | ssl_context=ssl_context,
44 | )
45 |
46 |
47 | class TestCaseAMQPS(amqp.TestCaseAmqp):
48 | pass
49 |
--------------------------------------------------------------------------------
/tests/test_connect.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 |
3 | import pytest
4 | from yarl import URL
5 |
6 | from aio_pika import connect
7 |
8 |
9 | VARIANTS = (
10 | (dict(url="amqp://localhost/"), "amqp://localhost/"),
11 | (dict(url="amqp://localhost"), "amqp://localhost/"),
12 | (dict(url="amqp://localhost:5674"), "amqp://localhost:5674/"),
13 | (dict(url="amqp://localhost:5674//"), "amqp://localhost:5674//"),
14 | (dict(url="amqp://localhost:5674/"), "amqp://localhost:5674/"),
15 | (dict(host="localhost", port=8888), "amqp://guest:guest@localhost:8888//"),
16 | (
17 | dict(host="localhost", port=8888, virtualhost="foo"),
18 | "amqp://guest:guest@localhost:8888/foo",
19 | ),
20 | (
21 | dict(host="localhost", port=8888, virtualhost="/foo"),
22 | "amqp://guest:guest@localhost:8888//foo",
23 | ),
24 | )
25 |
26 |
27 | class FakeConnection:
28 | def __init__(self, url, **kwargs):
29 | self.url = URL(url)
30 | self.kwargs = kwargs
31 |
32 | async def connect(self, timeout=None, **kwargs):
33 | return
34 |
35 |
36 | @pytest.mark.parametrize("kwargs,expected", VARIANTS)
37 | def test_simple(kwargs, expected):
38 | loop = asyncio.get_event_loop()
39 | conn: FakeConnection = loop.run_until_complete(
40 | connect(connection_class=FakeConnection, **kwargs), # type: ignore
41 | )
42 |
43 | assert conn.url == URL(expected)
44 |
--------------------------------------------------------------------------------
/tests/test_connection_params.py:
--------------------------------------------------------------------------------
1 | from typing import Type
2 |
3 | from aiormq.connection import parse_bool, parse_int, parse_timeout
4 | from yarl import URL
5 |
6 | from aio_pika import connect
7 | from aio_pika.abc import AbstractConnection
8 | from aio_pika.connection import Connection
9 | from aio_pika.robust_connection import RobustConnection, connect_robust
10 |
11 |
12 | class MockConnection(Connection):
13 | async def connect(self, timeout=None, **kwargs):
14 | return self
15 |
16 |
17 | class MockConnectionRobust(RobustConnection):
18 | async def connect(self, timeout=None, **kwargs):
19 | return self
20 |
21 |
22 | VALUE_GENERATORS = {
23 | parse_int: {
24 | "-1": -1,
25 | "0": 0,
26 | "43": 43,
27 | "9999999999999999": 9999999999999999,
28 | "hello": 0,
29 | },
30 | parse_bool: {
31 | "disabled": False,
32 | "enable": True,
33 | "yes": True,
34 | "no": False,
35 | "": False,
36 | },
37 | parse_timeout: {
38 | "0": 0,
39 | "Vasyan": 0,
40 | "0.1": 0.1,
41 | "0.54": 0.54,
42 | "1": 1,
43 | "100": 100,
44 | "1000:": 0,
45 | },
46 | float: {
47 | "0": 0.,
48 | "0.0": 0.,
49 | ".0": 0.,
50 | "0.1": 0.1,
51 | "1": 1.,
52 | "hello": None,
53 | },
54 | }
55 |
56 |
57 | class TestCase:
58 | CONNECTION_CLASS: Type[AbstractConnection] = MockConnection
59 |
60 | async def get_instance(self, url, **kwargs) -> AbstractConnection:
61 | return await connect( # type: ignore
62 | url, connection_class=self.CONNECTION_CLASS, **kwargs,
63 | )
64 |
65 | async def test_kwargs(self):
66 | instance = await self.get_instance("amqp://localhost/")
67 |
68 | for parameter in self.CONNECTION_CLASS.PARAMETERS:
69 | if parameter.is_kwarg:
70 | continue
71 |
72 | assert hasattr(instance, parameter.name)
73 | assert (
74 | getattr(instance, parameter.name) is
75 | parameter.parse(parameter.default)
76 | )
77 |
78 | async def test_kwargs_values(self):
79 | for parameter in self.CONNECTION_CLASS.PARAMETERS:
80 | positives = VALUE_GENERATORS[parameter.parser] # type: ignore
81 | for example, expected in positives.items(): # type: ignore
82 | instance = await self.get_instance(
83 | f"amqp://localhost/?{parameter.name}={example}",
84 | )
85 |
86 | assert parameter.parse(example) == expected
87 |
88 | if parameter.is_kwarg:
89 | assert instance.kwargs[parameter.name] == expected
90 | else:
91 | assert hasattr(instance, parameter.name)
92 | assert getattr(instance, parameter.name) == expected
93 |
94 | instance = await self.get_instance(
95 | "amqp://localhost", **{parameter.name: example},
96 | )
97 | assert hasattr(instance, parameter.name)
98 | assert getattr(instance, parameter.name) == expected
99 |
100 |
101 | class TestCaseRobust(TestCase):
102 | CONNECTION_CLASS: Type[MockConnectionRobust] = MockConnectionRobust
103 |
104 | async def get_instance(self, url, **kwargs) -> AbstractConnection:
105 | return await connect_robust( # type: ignore
106 | url,
107 | connection_class=self.CONNECTION_CLASS, # type: ignore
108 | **kwargs,
109 | )
110 |
111 |
112 | def test_connection_interleave(amqp_url: URL):
113 | url = amqp_url.update_query(interleave="1")
114 | connection = Connection(url=url)
115 | assert "interleave" in connection.kwargs
116 | assert connection.kwargs["interleave"] == 1
117 |
118 | connection = Connection(url=amqp_url)
119 | assert "interleave" not in connection.kwargs
120 |
121 |
122 | def test_connection_happy_eyeballs_delay(amqp_url: URL):
123 | url = amqp_url.update_query(happy_eyeballs_delay=".1")
124 | connection = Connection(url=url)
125 | assert "happy_eyeballs_delay" in connection.kwargs
126 | assert connection.kwargs["happy_eyeballs_delay"] == 0.1
127 |
128 | connection = Connection(url=amqp_url)
129 | assert "happy_eyeballs_delay" not in connection.kwargs
130 |
131 |
132 | def test_robust_connection_interleave(amqp_url: URL):
133 | url = amqp_url.update_query(interleave="1")
134 | connection = RobustConnection(url=url)
135 | assert "interleave" in connection.kwargs
136 | assert connection.kwargs["interleave"] == 1
137 |
138 | connection = RobustConnection(url=amqp_url)
139 | assert "interleave" not in connection.kwargs
140 |
141 |
142 | def test_robust_connection_happy_eyeballs_delay(amqp_url: URL):
143 | url = amqp_url.update_query(happy_eyeballs_delay=".1")
144 | connection = RobustConnection(url=url)
145 | assert "happy_eyeballs_delay" in connection.kwargs
146 | assert connection.kwargs["happy_eyeballs_delay"] == 0.1
147 |
148 | connection = RobustConnection(url=amqp_url)
149 | assert "happy_eyeballs_delay" not in connection.kwargs
150 |
--------------------------------------------------------------------------------
/tests/test_master.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 | from typing import Any, List
3 |
4 | import aio_pika
5 | from aio_pika.patterns.master import (
6 | CompressedJsonMaster, JsonMaster, Master, NackMessage, RejectMessage,
7 | )
8 |
9 |
10 | class TestMaster:
11 | MASTER_CLASS = Master
12 |
13 | async def test_simple(self, channel: aio_pika.Channel):
14 | master = self.MASTER_CLASS(channel)
15 | event = asyncio.Event()
16 |
17 | self.state: List[Any] = []
18 |
19 | def worker_func(*, foo, bar):
20 | nonlocal event
21 | self.state.append((foo, bar))
22 | event.set()
23 |
24 | worker = await master.create_worker(
25 | "worker.foo", worker_func, auto_delete=True,
26 | )
27 |
28 | await master.proxy.worker.foo(foo=1, bar=2)
29 |
30 | await event.wait()
31 |
32 | assert self.state == [(1, 2)]
33 |
34 | await worker.close()
35 |
36 | async def test_simple_coro(self, channel: aio_pika.Channel):
37 | master = self.MASTER_CLASS(channel)
38 | event = asyncio.Event()
39 |
40 | self.state = []
41 |
42 | async def worker_func(*, foo, bar):
43 | nonlocal event
44 | self.state.append((foo, bar))
45 | event.set()
46 |
47 | worker = await master.create_worker(
48 | "worker.foo", worker_func, auto_delete=True,
49 | )
50 |
51 | await master.proxy.worker.foo(foo=1, bar=2)
52 |
53 | await event.wait()
54 |
55 | assert self.state == [(1, 2)]
56 |
57 | await worker.close()
58 |
59 | async def test_simple_many(self, channel: aio_pika.Channel):
60 | master = self.MASTER_CLASS(channel)
61 | tasks = 100
62 |
63 | state = []
64 |
65 | def worker_func(*, foo):
66 | nonlocal tasks, state
67 |
68 | state.append(foo)
69 | tasks -= 1
70 |
71 | worker = await master.create_worker(
72 | "worker.foo", worker_func, auto_delete=True,
73 | )
74 |
75 | for item in range(100):
76 | await master.proxy.worker.foo(foo=item)
77 |
78 | while tasks > 0:
79 | await asyncio.sleep(0)
80 |
81 | assert state == list(range(100))
82 |
83 | await worker.close()
84 |
85 | async def test_exception_classes(self, channel: aio_pika.Channel):
86 | master = self.MASTER_CLASS(channel)
87 | counter = 200
88 |
89 | self.state = []
90 |
91 | def worker_func(*, foo):
92 | nonlocal counter
93 | counter -= 1
94 |
95 | if foo < 50:
96 | raise RejectMessage(requeue=False)
97 | if foo > 100:
98 | raise NackMessage(requeue=False)
99 |
100 | self.state.append(foo)
101 |
102 | worker = await master.create_worker(
103 | "worker.foo", worker_func, auto_delete=True,
104 | )
105 |
106 | for item in range(200):
107 | await master.proxy.worker.foo(foo=item)
108 |
109 | while counter > 0:
110 | await asyncio.sleep(0)
111 |
112 | assert self.state == list(range(50, 101))
113 |
114 | await worker.close()
115 |
116 |
117 | class TestJsonMaster(TestMaster):
118 | MASTER_CLASS = JsonMaster
119 |
120 |
121 | class TestCompressedJsonMaster(TestMaster):
122 | MASTER_CLASS = CompressedJsonMaster
123 |
--------------------------------------------------------------------------------
/tests/test_memory_leak.py:
--------------------------------------------------------------------------------
1 | import gc
2 | import weakref
3 | from typing import AbstractSet
4 |
5 | import aio_pika
6 |
7 |
8 | async def test_leak_unclosed_channel(create_connection):
9 | rabbitmq_connection = await create_connection()
10 |
11 | weakset: AbstractSet[aio_pika.abc.AbstractChannel] = weakref.WeakSet()
12 |
13 | async def f(rabbitmq_connection: aio_pika.Connection, weakset):
14 | weakset.add(await rabbitmq_connection.channel())
15 |
16 | async with rabbitmq_connection:
17 | for i in range(5):
18 | await f(rabbitmq_connection, weakset)
19 |
20 | gc.collect()
21 |
22 | assert len(tuple(weakset)) == 0
23 |
24 |
25 | async def test_leak_closed_channel(create_connection):
26 | rabbitmq_connection = await create_connection()
27 |
28 | weakset: AbstractSet[aio_pika.abc.AbstractConnection] = weakref.WeakSet()
29 |
30 | async def f(rabbitmq_connection: aio_pika.Connection, weakset):
31 | async with rabbitmq_connection.channel() as channel:
32 | weakset.add(channel)
33 |
34 | async with rabbitmq_connection:
35 | for i in range(5):
36 | await f(rabbitmq_connection, weakset)
37 |
38 | gc.collect()
39 |
40 | assert len(tuple(weakset)) == 0
41 |
--------------------------------------------------------------------------------
/tests/test_message.py:
--------------------------------------------------------------------------------
1 | import time
2 | from copy import copy
3 | from datetime import datetime, timezone
4 | from typing import List, Tuple
5 |
6 | import shortuuid
7 |
8 | from aio_pika import DeliveryMode, Message
9 | from aio_pika.abc import FieldValue, HeadersType, MessageInfo
10 |
11 |
12 | def test_message_copy():
13 | msg1 = Message(
14 | bytes(shortuuid.uuid(), "utf-8"),
15 | content_type="application/json",
16 | content_encoding="text",
17 | timestamp=datetime(2000, 1, 1),
18 | headers={"h1": "v1", "h2": "v2"},
19 | )
20 | msg2 = copy(msg1)
21 |
22 | msg1.lock()
23 |
24 | assert not msg2.locked
25 |
26 |
27 | def test_message_info():
28 | body = bytes(shortuuid.uuid(), "utf-8")
29 |
30 | info = MessageInfo(
31 | app_id="test",
32 | body_size=len(body),
33 | cluster_id=None,
34 | consumer_tag=None,
35 | content_encoding="text",
36 | content_type="application/json",
37 | correlation_id="1",
38 | delivery_mode=DeliveryMode.PERSISTENT,
39 | delivery_tag=None,
40 | exchange=None,
41 | expiration=1.5,
42 | headers={"foo": "bar"},
43 | message_id=shortuuid.uuid(),
44 | priority=0,
45 | redelivered=None,
46 | reply_to="test",
47 | routing_key=None,
48 | timestamp=datetime.fromtimestamp(int(time.time()), tz=timezone.utc),
49 | type="0",
50 | user_id="guest",
51 | )
52 |
53 | msg = Message(
54 | body=body,
55 | headers={"foo": "bar"},
56 | content_type="application/json",
57 | content_encoding="text",
58 | delivery_mode=DeliveryMode.PERSISTENT,
59 | priority=0,
60 | correlation_id="1",
61 | reply_to="test",
62 | expiration=1.5,
63 | message_id=info["message_id"],
64 | timestamp=info["timestamp"],
65 | type="0",
66 | user_id="guest",
67 | app_id="test",
68 | )
69 |
70 | assert info == msg.info()
71 |
72 |
73 | def test_headers_setter():
74 | data: HeadersType = {"foo": "bar"}
75 | data_expected = {"foo": "bar"}
76 |
77 | msg = Message(b"", headers={"bar": "baz"})
78 | msg.headers = data
79 |
80 | assert msg.headers == data_expected
81 |
82 |
83 | def test_headers_content():
84 | data: Tuple[List[FieldValue], ...] = (
85 | [42, 42],
86 | [b"foo", b"foo"],
87 | [b"\00", b"\00"],
88 | )
89 |
90 | for src, value in data:
91 | msg = Message(b"", headers={"value": src})
92 | assert msg.headers["value"] == value
93 |
94 |
95 | def test_headers_set():
96 | msg = Message(b"", headers={"header": "value"})
97 |
98 | data = (
99 | ["header-1", 42, 42],
100 | ["header-2", b"foo", b"foo"],
101 | ["header-3", b"\00", b"\00"],
102 | ["header-4", {"foo": "bar"}, {"foo": "bar"}],
103 | )
104 |
105 | for name, src, value in data: # type: ignore
106 | msg.headers[name] = value # type: ignore
107 | assert msg.headers[name] == value # type: ignore
108 |
109 | assert msg.headers["header"] == "value"
110 |
--------------------------------------------------------------------------------
/tests/test_pool.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 | from collections import Counter
3 |
4 | import pytest
5 |
6 | from aio_pika.pool import Pool, PoolInstance
7 |
8 |
9 | @pytest.mark.parametrize("max_size", [50, 10, 5, 1])
10 | async def test_simple(max_size, event_loop):
11 | counter = 0
12 |
13 | async def create_instance():
14 | nonlocal counter
15 | await asyncio.sleep(0)
16 | counter += 1
17 | return counter
18 |
19 | pool: Pool = Pool(create_instance, max_size=max_size, loop=event_loop)
20 |
21 | async def getter():
22 | nonlocal counter, pool
23 |
24 | async with pool.acquire() as instance:
25 | assert instance > 0
26 | await asyncio.sleep(1 if counter < max_size else 0)
27 | return instance, counter
28 |
29 | results = await asyncio.gather(*[getter() for _ in range(200)])
30 |
31 | for instance, total in results:
32 | assert instance > -1
33 | assert total > -1
34 |
35 | assert counter == max_size
36 |
37 |
38 | class TestInstanceBase:
39 | class Instance(PoolInstance):
40 | def __init__(self):
41 | self.closed = False
42 |
43 | async def close(self):
44 | if self.closed:
45 | raise RuntimeError
46 |
47 | self.closed = True
48 |
49 | @pytest.fixture
50 | def instances(self):
51 | return set()
52 |
53 | @pytest.fixture(params=[50, 40, 30, 20, 10])
54 | def max_size(self, request):
55 | return request.param
56 |
57 | @pytest.fixture
58 | def pool(self, max_size, instances, event_loop):
59 | async def create_instance():
60 | nonlocal instances
61 |
62 | obj = TestInstanceBase.Instance()
63 | instances.add(obj)
64 | return obj
65 |
66 | return Pool(create_instance, max_size=max_size, loop=event_loop)
67 |
68 |
69 | class TestInstance(TestInstanceBase):
70 | async def test_close(self, pool, instances, event_loop, max_size):
71 | async def getter():
72 | async with pool.acquire():
73 | await asyncio.sleep(0.05)
74 |
75 | assert not pool.is_closed
76 | assert len(instances) == 0
77 |
78 | await asyncio.gather(*[getter() for _ in range(200)])
79 |
80 | assert len(instances) == max_size
81 |
82 | for instance in instances:
83 | assert not instance.closed
84 |
85 | await pool.close()
86 |
87 | for instance in instances:
88 | assert instance.closed
89 |
90 | assert pool.is_closed
91 |
92 | async def test_close_context_manager(self, pool, instances):
93 | async def getter():
94 | async with pool.acquire():
95 | await asyncio.sleep(0.05)
96 |
97 | async with pool:
98 | assert not pool.is_closed
99 |
100 | assert len(instances) == 0
101 |
102 | await asyncio.gather(*[getter() for _ in range(200)])
103 |
104 | assert len(instances) > 1
105 |
106 | for instance in instances:
107 | assert not instance.closed
108 |
109 | assert not pool.is_closed
110 |
111 | assert pool.is_closed
112 |
113 | for instance in instances:
114 | assert instance.closed
115 |
116 |
117 | class TestCaseNoMaxSize(TestInstance):
118 | async def test_simple(self, pool, event_loop):
119 | call_count = 200
120 | counter = 0
121 |
122 | async def getter():
123 | nonlocal counter
124 |
125 | async with pool.acquire() as instance:
126 | await asyncio.sleep(1)
127 | assert isinstance(instance, TestInstanceBase.Instance)
128 | counter += 1
129 | return counter
130 |
131 | results = await asyncio.gather(*[getter() for _ in range(call_count)])
132 |
133 | for result in results:
134 | assert result > -1
135 |
136 | assert counter == call_count
137 |
138 |
139 | class TestCaseItemReuse(TestInstanceBase):
140 | @pytest.fixture
141 | def call_count(self, max_size):
142 | return max_size * 5
143 |
144 | async def test_simple(self, pool, call_count, instances):
145 | counter: Counter = Counter()
146 |
147 | async def getter():
148 | nonlocal counter
149 |
150 | async with pool.acquire() as instance:
151 | await asyncio.sleep(0.05)
152 | counter[instance] += 1
153 |
154 | await asyncio.gather(*[getter() for _ in range(call_count)])
155 |
156 | assert sum(counter.values()) == call_count
157 | assert set(counter) == set(instances)
158 | assert len(set(counter.values())) == 1
159 |
--------------------------------------------------------------------------------
/tests/test_tools.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 | import logging
3 | from copy import copy
4 | from typing import Any, List
5 | from unittest import mock
6 |
7 | import pytest
8 |
9 | from aio_pika.tools import CallbackCollection, ensure_awaitable
10 |
11 |
12 | log = logging.getLogger(__name__)
13 |
14 |
15 | # noinspection PyTypeChecker
16 | class TestCase:
17 | @pytest.fixture
18 | def instance(self) -> mock.MagicMock:
19 | return mock.MagicMock()
20 |
21 | @pytest.fixture
22 | def collection(self, instance):
23 | return CallbackCollection(instance)
24 |
25 | def test_basic(self, collection):
26 | def func(sender, *args, **kwargs):
27 | pass
28 |
29 | collection.add(func)
30 |
31 | assert func in collection
32 |
33 | with pytest.raises(ValueError):
34 | collection.add(None)
35 |
36 | collection.remove(func)
37 |
38 | with pytest.raises(LookupError):
39 | collection.remove(func)
40 |
41 | for _ in range(10):
42 | collection.add(func)
43 |
44 | assert len(collection) == 1
45 |
46 | collection.freeze()
47 |
48 | with pytest.raises(RuntimeError):
49 | collection.freeze()
50 |
51 | assert len(collection) == 1
52 |
53 | with pytest.raises(RuntimeError):
54 | collection.add(func)
55 |
56 | with pytest.raises(RuntimeError):
57 | collection.remove(func)
58 |
59 | with pytest.raises(RuntimeError):
60 | collection.clear()
61 |
62 | collection2 = copy(collection)
63 | collection.unfreeze()
64 |
65 | assert not copy(collection).is_frozen
66 |
67 | assert collection.is_frozen != collection2.is_frozen
68 |
69 | with pytest.raises(RuntimeError):
70 | collection.unfreeze()
71 |
72 | collection.clear()
73 | assert collection2
74 | assert not collection
75 |
76 | def test_callback_call(self, collection):
77 | l1: List[Any] = list()
78 | l2: List[Any] = list()
79 |
80 | assert l1 == l2
81 |
82 | collection.add(lambda sender, x: l1.append(x))
83 | collection.add(lambda sender, x: l2.append(x))
84 |
85 | collection(1)
86 | collection(2)
87 |
88 | assert l1 == l2
89 | assert l1 == [1, 2]
90 |
91 | async def test_blank_awaitable_callback(self, collection):
92 | await collection()
93 |
94 | async def test_awaitable_callback(
95 | self, event_loop, collection, instance,
96 | ):
97 | future = event_loop.create_future()
98 |
99 | shared = []
100 |
101 | async def coro(arg):
102 | nonlocal shared
103 | shared.append(arg)
104 |
105 | def task_maker(arg):
106 | return event_loop.create_task(coro(arg))
107 |
108 | collection.add(future.set_result)
109 | collection.add(coro)
110 | collection.add(task_maker)
111 |
112 | await collection()
113 |
114 | assert shared == [instance, instance]
115 | assert await future == instance
116 |
117 | async def test_collection_create_tasks(
118 | self, event_loop, collection, instance,
119 | ):
120 | future = event_loop.create_future()
121 |
122 | async def coro(arg):
123 | await asyncio.sleep(0.5)
124 | future.set_result(arg)
125 |
126 | collection.add(coro)
127 |
128 | # noinspection PyAsyncCall
129 | collection()
130 |
131 | assert await future == instance
132 |
133 | async def test_collection_run_tasks_parallel(self, collection):
134 | class Callable:
135 | def __init__(self):
136 | self.counter = 0
137 |
138 | async def __call__(self, *args, **kwargs):
139 | await asyncio.sleep(1)
140 | self.counter += 1
141 |
142 | callables = [Callable() for _ in range(100)]
143 |
144 | for callable in callables:
145 | collection.add(callable)
146 |
147 | await asyncio.wait_for(collection(), timeout=2)
148 |
149 | assert [c.counter for c in callables] == [1] * 100
150 |
151 |
152 | class TestEnsureAwaitable:
153 | async def test_non_coroutine(self):
154 | with pytest.deprecated_call(match="You probably registering the"):
155 | func = ensure_awaitable(lambda x: x * x)
156 |
157 | with pytest.deprecated_call(match="Function"):
158 | assert await func(2) == 4
159 |
160 | with pytest.deprecated_call(match="Function"):
161 | assert await func(4) == 16
162 |
163 | async def test_coroutine(self):
164 | async def square(x):
165 | return x * x
166 | func = ensure_awaitable(square)
167 | assert await func(2) == 4
168 | assert await func(4) == 16
169 |
170 | async def test_something_awaitable_returned(self):
171 |
172 | def non_coro(x):
173 | async def coro(x):
174 | return x * x
175 |
176 | return coro(x)
177 |
178 | with pytest.deprecated_call(match="You probably registering the"):
179 | func = ensure_awaitable(non_coro)
180 |
181 | assert await func(2) == 4
182 |
183 | async def test_something_non_awaitable_returned(self):
184 |
185 | def non_coro(x):
186 | def coro(x):
187 | return x * x
188 |
189 | return coro(x)
190 |
191 | with pytest.deprecated_call(match="You probably registering the"):
192 | func = ensure_awaitable(non_coro)
193 |
194 | with pytest.deprecated_call(match="Function"):
195 | assert await func(2) == 4
196 |
--------------------------------------------------------------------------------
/tests/test_types.py:
--------------------------------------------------------------------------------
1 | import aio_pika
2 | import aio_pika.abc
3 |
4 |
5 | async def test_connect_robust(amqp_url) -> None:
6 | async with await aio_pika.connect_robust(amqp_url) as connection:
7 | assert isinstance(connection, aio_pika.abc.AbstractRobustConnection)
8 | assert isinstance(connection, aio_pika.abc.AbstractConnection)
9 |
10 | channel = await connection.channel()
11 | assert isinstance(channel, aio_pika.abc.AbstractRobustChannel)
12 | assert isinstance(channel, aio_pika.abc.AbstractChannel)
13 |
14 |
15 | async def test_connect(amqp_url) -> None:
16 | async with await aio_pika.connect(amqp_url) as connection:
17 | assert isinstance(connection, aio_pika.abc.AbstractConnection)
18 | assert not isinstance(
19 | connection, aio_pika.abc.AbstractRobustConnection,
20 | )
21 |
22 | channel = await connection.channel()
23 | assert isinstance(channel, aio_pika.abc.AbstractChannel)
24 | assert not isinstance(
25 | channel, aio_pika.abc.AbstractRobustChannel,
26 | )
27 |
--------------------------------------------------------------------------------