├── .gitattributes ├── .gitignore ├── .travis.yml ├── CHANGELOG.md ├── CONTRIBUTING.md ├── LICENSE ├── MANIFEST.in ├── README.md ├── RELEASING ├── TODO ├── asynqp.sublime-project ├── doc ├── Makefile ├── _static │ └── custom.css ├── _templates │ └── layout.html ├── conf.py ├── conformance.rst ├── examples.rst ├── examples │ ├── helloworld.py │ └── reconnecting.py ├── extensions.rst ├── index.rst ├── make.bat └── reference.rst ├── ez_setup.py ├── requirements.txt ├── setup.py ├── src └── asynqp │ ├── __init__.py │ ├── _exceptions.py │ ├── amqp0-9-1.xml │ ├── amqptypes.py │ ├── channel.py │ ├── connection.py │ ├── exceptions.py │ ├── exchange.py │ ├── frames.py │ ├── log.py │ ├── message.py │ ├── protocol.py │ ├── queue.py │ ├── routing.py │ ├── serialisation.py │ └── spec.py └── test ├── __init__.py ├── base_contexts.py ├── channel_tests.py ├── connection_tests.py ├── exchange_tests.py ├── heartbeat_tests.py ├── integration_tests.py ├── message_tests.py ├── method_tests.py ├── protocol_tests.py ├── queue_tests.py ├── serialisation_tests.py └── util.py /.gitattributes: -------------------------------------------------------------------------------- 1 | # Auto detect text files and perform LF normalization 2 | * text=auto 3 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | 5 | # C extensions 6 | *.so 7 | 8 | # Distribution / packaging 9 | .Python 10 | env/ 11 | bin/ 12 | build/ 13 | develop-eggs/ 14 | dist/ 15 | eggs/ 16 | lib/ 17 | lib64/ 18 | parts/ 19 | sdist/ 20 | var/ 21 | *.egg-info/ 22 | .installed.cfg 23 | *.egg 24 | 25 | # Installer logs 26 | pip-log.txt 27 | pip-delete-this-directory.txt 28 | 29 | # Unit test / coverage reports 30 | htmlcov/ 31 | .tox/ 32 | .coverage 33 | .cache 34 | nosetests.xml 35 | coverage.xml 36 | 37 | # Translations 38 | *.mo 39 | 40 | # Mr Developer 41 | .mr.developer.cfg 42 | .project 43 | .pydevproject 44 | 45 | # Rope 46 | .ropeproject 47 | 48 | # Django stuff: 49 | *.log 50 | *.pot 51 | 52 | # Sphinx documentation 53 | doc/_build/ 54 | 55 | *.sublime-workspace 56 | .ipynb_checkpoints 57 | .DS_Store 58 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | sudo: false 2 | 3 | language: python 4 | 5 | python: 6 | - 3.4 7 | - 3.5 8 | - 3.6 9 | 10 | services: rabbitmq 11 | 12 | install: 13 | - pip install -r requirements.txt 14 | - python setup.py develop 15 | 16 | script: 17 | - flake8 src test --ignore=E501,W503,E722 18 | - coverage run --source=src -m contexts -v 19 | - pushd doc && make html && popd 20 | 21 | after_success: 22 | coveralls 23 | 24 | deploy: 25 | provider: pypi 26 | user: benjamin.hodgson 27 | password: 28 | secure: "bJA9NXYhqDgKiMX71YzU3Bq39NgfEi7R7cUoORs73/11ofJzvrFUsnmvw+n90FHru3b1AwZgOxHnlEOPZpQhgKol6BCBgv0HUscjA00dttXs7TO53/c8gqUIkhwlyEbn6+5++kEBTrrVZWjHrTfK6dWKbFA4LspTewvt4p8I9rQ=" 29 | on: 30 | tags: true 31 | all_branches: true 32 | distributions: "sdist bdist_wheel bdist_egg" 33 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | What's new in `asynqp` 2 | ====================== 3 | 4 | 5 | v0.5 6 | ---- 7 | * Channels will no longer break if their calls are cancelled. Issue #52. 8 | * Fixed message properties decoding without `content_type`. Pull request #66. 9 | * Added `nowait` argument to Exchange and Queue declarations. 10 | * Added `passive` argument to Exchange and Queue declarations. 11 | * Added `on_error` and `on_cancel` callbacks for Consumer. Issue #34 12 | * Changed the closing scheme for Channel/Connection. Proper exceptions are now 13 | always propagated to user. Issues #57, #58 14 | * Complete internals refactor and cleanup. Rull requests #48, #49, #50. 15 | * Add NO_DELAY option for socket. Issue #40. (Thanks to @socketpair for PR #41) 16 | * Change heartbeat to be a proper background task. Issue #45. 17 | * `loop` is now proparly passed to all components from open_connection call. Pull request #42. 18 | * Add support for Python up to 3.5. 19 | 20 | v0.4 21 | ---- 22 | 23 | * Improved error handling. 24 | * When the connection to the server is lost, any futures awaiting communication 25 | from the server will now be cancelled. 26 | (Thanks to @lenzenmi, in pull request #19) 27 | * More detailed exceptions on channel closure. 28 | * Support for custom RabbitMQ extensions by an `arguments` keyword parameter for a number of methods. 29 | (Thanks to @fweisel, in pull request #27) 30 | * Improved compatibility with RabbitMQ's implementation of the 31 | wire protocol, including better support for tables. 32 | (Thanks to @fweisel, in pull requests #24, #25, #26, #28) 33 | * Support for a `sock` keyword argument to `asynqp.connect`. 34 | (Thanks to @urbaniak, in pull request #14) 35 | 36 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | To contribute to this project, submit a [pull request](https://help.github.com/articles/using-pull-requests): 2 | 3 | 1. [Fork this repo](https://help.github.com/articles/fork-a-repo) 4 | 2. [Create a branch](https://help.github.com/articles/creating-and-deleting-branches-within-your-repository#creating-a-branch) in your fork of the repo and commit your changes 5 | 3. [Open a pull request](https://help.github.com/articles/creating-a-pull-request) in this repo to merge your topic branch into the mainstream 6 | 4. I'll review your changes and merge your pull request as soon as possible 7 | 8 | If you want to contribute to the project, but are not sure what you want to work on, 9 | I am always happy for help on any of the [open issues](https://github.com/benjamin-hodgson/asynqp/issues) 10 | in the GitHub tracker. 11 | 12 | This project is built using Test-Driven-Development. 13 | So if you're planning to contribute a feature or bugfix, please **ensure that 14 | it is covered by tests** before submitting it for review. Use your best judgment to 15 | determine what kind of tests are appropriate - a good rule of thumb is 16 | *unit tests for everything* and *integration tests for important features*. 17 | 18 | The tests are written using [Contexts](https://github.com/benjamin-hodgson/Contexts) 19 | so you'll need to install that into your virtualenv before running the tests. 20 | You also need a local instance of RabbitMQ running. So to run all the tests: 21 | 22 | ```bash 23 | pip install contexts 24 | sudo rabbitmq-server 25 | run-contexts # in another window 26 | ``` 27 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2013-2015 Benjamin Hodgson 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in 13 | all copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 21 | THE SOFTWARE. 22 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include ez_setup.py 2 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | asynqp 2 | ====== 3 | 4 | [![Build Status](https://travis-ci.org/benjamin-hodgson/asynqp.svg?branch=master)](https://travis-ci.org/benjamin-hodgson/asynqp) 5 | [![Documentation Status](https://readthedocs.org/projects/asynqp/badge/?version=v0.4)](https://readthedocs.org/projects/asynqp/?badge=v0.4) 6 | [![Coverage Status](https://coveralls.io/repos/benjamin-hodgson/asynqp/badge.svg?branch=master&service=github)](https://coveralls.io/github/benjamin-hodgson/asynqp?branch=master) 7 | [![Requirements Status](https://requires.io/github/benjamin-hodgson/asynqp/requirements.svg?branch=master)](https://requires.io/github/benjamin-hodgson/asynqp/requirements/?branch=master) 8 | 9 | `asynqp` is an AMQP (aka [RabbitMQ](rabbitmq.com)) client library for 10 | Python 3.4's new [`asyncio`](https://docs.python.org/3.4/library/asyncio.html) module. 11 | 12 | Check out the [official documentation](http://asynqp.readthedocs.org/). 13 | 14 | 15 | Example 16 | ------- 17 | 18 | ```python 19 | import asyncio 20 | import asynqp 21 | 22 | 23 | @asyncio.coroutine 24 | def hello_world(): 25 | """ 26 | Sends a 'hello world' message and then reads it from the queue. 27 | """ 28 | # connect to the RabbitMQ broker 29 | connection = yield from asynqp.connect('localhost', 5672, username='guest', password='guest') 30 | 31 | # Open a communications channel 32 | channel = yield from connection.open_channel() 33 | 34 | # Create a queue and an exchange on the broker 35 | exchange = yield from channel.declare_exchange('test.exchange', 'direct') 36 | queue = yield from channel.declare_queue('test.queue') 37 | 38 | # Bind the queue to the exchange, so the queue will get messages published to the exchange 39 | yield from queue.bind(exchange, 'routing.key') 40 | 41 | # If you pass in a dict it will be automatically converted to JSON 42 | msg = asynqp.Message({'hello': 'world'}) 43 | exchange.publish(msg, 'routing.key') 44 | 45 | # Synchronously get a message from the queue 46 | received_message = yield from queue.get() 47 | print(received_message.json()) # get JSON from incoming messages easily 48 | 49 | # Acknowledge a delivered message 50 | received_message.ack() 51 | 52 | yield from channel.close() 53 | yield from connection.close() 54 | 55 | 56 | if __name__ == "__main__": 57 | loop = asyncio.get_event_loop() 58 | loop.run_until_complete(hello_world()) 59 | ``` 60 | 61 | 62 | Installation 63 | ------------ 64 | 65 | `asynqp` is [on the Cheese Shop](https://pypi.python.org/pypi/asynqp), so you can install it using Pip: 66 | ``` 67 | pip install asynqp 68 | ``` 69 | 70 | If you want the latest development version, you can install it from source: 71 | ``` 72 | git clone https://github.com/benjamin-hodgson/asynqp.git 73 | cd asynqp 74 | python setup.py install 75 | ``` 76 | -------------------------------------------------------------------------------- /RELEASING: -------------------------------------------------------------------------------- 1 | 1. Check Travis is green on the latest commit 2 | 2. Increment the version in setup.py 3 | 3. Increment the version in doc/conf.py 4 | 4. Commit and tag 5 | 5. Push using --follow-tags 6 | 6. Check that the tests passed on Travis and it published to the Cheese Shop 7 | 6a. If it failed to publish to the cheese shop for some reason, run `python setup.py register` followed by `python setup.py sdist bdist_egg bdist_wheel upload` 8 | 7. Build the docs at readthedocs.org and increment the latest version 9 | -------------------------------------------------------------------------------- /TODO: -------------------------------------------------------------------------------- 1 | API design issues 2 | 3 | Public implementation-related fields in public classes (even though they're not documented) 4 | Threading issues? 5 | Load testing? 6 | 7 | 8 | 9 | Unimplemented wire-protocol elements 10 | 11 | Arbitrary tables 12 | 13 | 14 | 15 | Unimplemented methods: 16 | 17 | Connection 18 | secure/secure-ok - pluggable auth mechanism? 19 | 20 | Channel 21 | flow/flow-ok 22 | 23 | Exchange 24 | bind/bind-ok (Rabbit extension) 25 | unbind/unbind-ok (Rabbit extension) 26 | 27 | Basic 28 | recover/recover-ok 29 | nack (Rabbit extension) 30 | 31 | Tx 32 | select/select-ok 33 | commit/commit-ok 34 | rollback/rollback-ok 35 | 36 | Confirm 37 | select/select-ok 38 | 39 | 40 | 41 | Unimplemented functions 42 | 43 | Customise connection-tune response 44 | Passive declares 45 | No-wait flags 46 | -------------------------------------------------------------------------------- /asynqp.sublime-project: -------------------------------------------------------------------------------- 1 | { 2 | "build_systems": 3 | [ 4 | { 5 | "name": "asynqp Contexts tests", 6 | "windows":{ 7 | "cmd": ["$project_path\\env\\scripts\\run-contexts"] 8 | }, 9 | "cmd": "$project_path/env/bin/run-contexts", 10 | "working_dir": "$project_path" 11 | } 12 | ], 13 | "folders": 14 | [ 15 | { 16 | "follow_symlinks": true, 17 | "path": "." 18 | } 19 | ] 20 | } 21 | -------------------------------------------------------------------------------- /doc/Makefile: -------------------------------------------------------------------------------- 1 | # Makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | PAPER = 8 | BUILDDIR = _build 9 | 10 | # User-friendly check for sphinx-build 11 | ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) 12 | $(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) 13 | endif 14 | 15 | # Internal variables. 16 | PAPEROPT_a4 = -D latex_paper_size=a4 17 | PAPEROPT_letter = -D latex_paper_size=letter 18 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 19 | # the i18n builder cannot share the environment and doctrees with the others 20 | I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 21 | 22 | .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext 23 | 24 | help: 25 | @echo "Please use \`make ' where is one of" 26 | @echo " html to make standalone HTML files" 27 | @echo " dirhtml to make HTML files named index.html in directories" 28 | @echo " singlehtml to make a single large HTML file" 29 | @echo " pickle to make pickle files" 30 | @echo " json to make JSON files" 31 | @echo " htmlhelp to make HTML files and a HTML help project" 32 | @echo " qthelp to make HTML files and a qthelp project" 33 | @echo " devhelp to make HTML files and a Devhelp project" 34 | @echo " epub to make an epub" 35 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" 36 | @echo " latexpdf to make LaTeX files and run them through pdflatex" 37 | @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" 38 | @echo " text to make text files" 39 | @echo " man to make manual pages" 40 | @echo " texinfo to make Texinfo files" 41 | @echo " info to make Texinfo files and run them through makeinfo" 42 | @echo " gettext to make PO message catalogs" 43 | @echo " changes to make an overview of all changed/added/deprecated items" 44 | @echo " xml to make Docutils-native XML files" 45 | @echo " pseudoxml to make pseudoxml-XML files for display purposes" 46 | @echo " linkcheck to check all external links for integrity" 47 | @echo " doctest to run all doctests embedded in the documentation (if enabled)" 48 | 49 | clean: 50 | rm -rf $(BUILDDIR)/* 51 | 52 | html: 53 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html 54 | @echo 55 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." 56 | 57 | dirhtml: 58 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml 59 | @echo 60 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." 61 | 62 | singlehtml: 63 | $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml 64 | @echo 65 | @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." 66 | 67 | pickle: 68 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle 69 | @echo 70 | @echo "Build finished; now you can process the pickle files." 71 | 72 | json: 73 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json 74 | @echo 75 | @echo "Build finished; now you can process the JSON files." 76 | 77 | htmlhelp: 78 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp 79 | @echo 80 | @echo "Build finished; now you can run HTML Help Workshop with the" \ 81 | ".hhp project file in $(BUILDDIR)/htmlhelp." 82 | 83 | qthelp: 84 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp 85 | @echo 86 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \ 87 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:" 88 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/asynqp.qhcp" 89 | @echo "To view the help file:" 90 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/asynqp.qhc" 91 | 92 | devhelp: 93 | $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp 94 | @echo 95 | @echo "Build finished." 96 | @echo "To view the help file:" 97 | @echo "# mkdir -p $$HOME/.local/share/devhelp/asynqp" 98 | @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/asynqp" 99 | @echo "# devhelp" 100 | 101 | epub: 102 | $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub 103 | @echo 104 | @echo "Build finished. The epub file is in $(BUILDDIR)/epub." 105 | 106 | latex: 107 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 108 | @echo 109 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." 110 | @echo "Run \`make' in that directory to run these through (pdf)latex" \ 111 | "(use \`make latexpdf' here to do that automatically)." 112 | 113 | latexpdf: 114 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 115 | @echo "Running LaTeX files through pdflatex..." 116 | $(MAKE) -C $(BUILDDIR)/latex all-pdf 117 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 118 | 119 | latexpdfja: 120 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 121 | @echo "Running LaTeX files through platex and dvipdfmx..." 122 | $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja 123 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 124 | 125 | text: 126 | $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text 127 | @echo 128 | @echo "Build finished. The text files are in $(BUILDDIR)/text." 129 | 130 | man: 131 | $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man 132 | @echo 133 | @echo "Build finished. The manual pages are in $(BUILDDIR)/man." 134 | 135 | texinfo: 136 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 137 | @echo 138 | @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." 139 | @echo "Run \`make' in that directory to run these through makeinfo" \ 140 | "(use \`make info' here to do that automatically)." 141 | 142 | info: 143 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 144 | @echo "Running Texinfo files through makeinfo..." 145 | make -C $(BUILDDIR)/texinfo info 146 | @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." 147 | 148 | gettext: 149 | $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale 150 | @echo 151 | @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." 152 | 153 | changes: 154 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes 155 | @echo 156 | @echo "The overview file is in $(BUILDDIR)/changes." 157 | 158 | linkcheck: 159 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck 160 | @echo 161 | @echo "Link check complete; look for any errors in the above output " \ 162 | "or in $(BUILDDIR)/linkcheck/output.txt." 163 | 164 | doctest: 165 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest 166 | @echo "Testing of doctests in the sources finished, look at the " \ 167 | "results in $(BUILDDIR)/doctest/output.txt." 168 | 169 | xml: 170 | $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml 171 | @echo 172 | @echo "Build finished. The XML files are in $(BUILDDIR)/xml." 173 | 174 | pseudoxml: 175 | $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml 176 | @echo 177 | @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." 178 | -------------------------------------------------------------------------------- /doc/_static/custom.css: -------------------------------------------------------------------------------- 1 | span.green { 2 | color: #006600; 3 | font-weight: bold; 4 | } 5 | span.red { 6 | color: #FF0000; 7 | font-weight: bold; 8 | } 9 | span.orange { 10 | color: #FF9933; 11 | font-weight: bold; 12 | } 13 | -------------------------------------------------------------------------------- /doc/_templates/layout.html: -------------------------------------------------------------------------------- 1 | {% extends "!layout.html" %} 2 | {% block extrahead %} 3 | 5 | {% endblock %} 6 | -------------------------------------------------------------------------------- /doc/conf.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | # 4 | # asynqp documentation build configuration file, created by 5 | # sphinx-quickstart on Wed Jun 11 20:44:10 2014. 6 | # 7 | # This file is execfile()d with the current directory set to its 8 | # containing dir. 9 | # 10 | # Note that not all possible configuration values are present in this 11 | # autogenerated file. 12 | # 13 | # All configuration values have a default; values that are commented out 14 | # serve to show the default. 15 | 16 | import sys 17 | import os 18 | 19 | # If extensions (or modules to document with autodoc) are in another directory, 20 | # add these directories to sys.path here. If the directory is relative to the 21 | # documentation root, use os.path.abspath to make it absolute, like shown here. 22 | sys.path.insert(0, os.path.abspath(os.path.join(__file__, '../../src'))) 23 | 24 | 25 | # -- General configuration ------------------------------------------------ 26 | 27 | # If your documentation needs a minimal Sphinx version, state it here. 28 | #needs_sphinx = '1.0' 29 | 30 | # Add any Sphinx extension module names here, as strings. They can be 31 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 32 | # ones. 33 | extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx'] 34 | intersphinx_mapping = {'python': ('http://docs.python.org/3', None)} 35 | 36 | autodoc_member_order = 'bysource' 37 | 38 | add_function_parentheses = False 39 | 40 | # Add any paths that contain templates here, relative to this directory. 41 | templates_path = ['_templates'] 42 | 43 | # The suffix of source filenames. 44 | source_suffix = '.rst' 45 | 46 | # The encoding of source files. 47 | #source_encoding = 'utf-8-sig' 48 | 49 | # The master toctree document. 50 | master_doc = 'index' 51 | 52 | # General information about the project. 53 | project = 'asynqp' 54 | copyright = '2014-2015, Benjamin Hodgson' 55 | 56 | # The version info for the project you're documenting, acts as replacement for 57 | # |version| and |release|, also used in various other places throughout the 58 | # built documents. 59 | # 60 | # The short X.Y version. 61 | version = '0.6' 62 | # The full version, including alpha/beta/rc tags. 63 | release = '0.6' 64 | 65 | 66 | def hide_class_constructor(app, what, name, obj, options, signature, return_annotation): 67 | if what == "class" and not name.endswith(".Message"): 68 | return (None, None) 69 | return (signature, return_annotation) 70 | 71 | 72 | 73 | def setup(app): 74 | app.connect('autodoc-process-signature', hide_class_constructor) 75 | 76 | 77 | # The language for content autogenerated by Sphinx. Refer to documentation 78 | # for a list of supported languages. 79 | #language = None 80 | 81 | # There are two options for replacing |today|: either, you set today to some 82 | # non-false value, then it is used: 83 | #today = '' 84 | # Else, today_fmt is used as the format for a strftime call. 85 | #today_fmt = '%B %d, %Y' 86 | 87 | # List of patterns, relative to source directory, that match files and 88 | # directories to ignore when looking for source files. 89 | exclude_patterns = ['_build'] 90 | 91 | # The reST default role (used for this markup: `text`) to use for all 92 | # documents. 93 | #default_role = None 94 | 95 | # If true, '()' will be appended to :func: etc. cross-reference text. 96 | #add_function_parentheses = True 97 | 98 | # If true, the current module name will be prepended to all description 99 | # unit titles (such as .. function::). 100 | #add_module_names = True 101 | 102 | # If true, sectionauthor and moduleauthor directives will be shown in the 103 | # output. They are ignored by default. 104 | #show_authors = False 105 | 106 | # The name of the Pygments (syntax highlighting) style to use. 107 | pygments_style = 'sphinx' 108 | 109 | # A list of ignored prefixes for module index sorting. 110 | #modindex_common_prefix = [] 111 | 112 | # If true, keep warnings as "system message" paragraphs in the built documents. 113 | #keep_warnings = False 114 | 115 | 116 | # -- Options for HTML output ---------------------------------------------- 117 | 118 | # The theme to use for HTML and HTML Help pages. See the documentation for 119 | # a list of builtin themes. 120 | html_theme = 'nature' 121 | 122 | # Theme options are theme-specific and customize the look and feel of a theme 123 | # further. For a list of options available for each theme, see the 124 | # documentation. 125 | #html_theme_options = {} 126 | 127 | # Add any paths that contain custom themes here, relative to this directory. 128 | #html_theme_path = [] 129 | 130 | # The name for this set of Sphinx documents. If None, it defaults to 131 | # " v documentation". 132 | #html_title = None 133 | 134 | # A shorter title for the navigation bar. Default is the same as html_title. 135 | #html_short_title = None 136 | 137 | # The name of an image file (relative to this directory) to place at the top 138 | # of the sidebar. 139 | #html_logo = None 140 | 141 | # The name of an image file (within the static path) to use as favicon of the 142 | # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 143 | # pixels large. 144 | #html_favicon = None 145 | 146 | # Add any paths that contain custom static files (such as style sheets) here, 147 | # relative to this directory. They are copied after the builtin static files, 148 | # so a file named "default.css" will overwrite the builtin "default.css". 149 | html_static_path = ['_static'] 150 | 151 | # Add any extra paths that contain custom files (such as robots.txt or 152 | # .htaccess) here, relative to this directory. These files are copied 153 | # directly to the root of the documentation. 154 | #html_extra_path = [] 155 | 156 | # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, 157 | # using the given strftime format. 158 | #html_last_updated_fmt = '%b %d, %Y' 159 | 160 | # If true, SmartyPants will be used to convert quotes and dashes to 161 | # typographically correct entities. 162 | #html_use_smartypants = True 163 | 164 | # Custom sidebar templates, maps document names to template names. 165 | #html_sidebars = {} 166 | 167 | # Additional templates that should be rendered to pages, maps page names to 168 | # template names. 169 | #html_additional_pages = {} 170 | 171 | # If false, no module index is generated. 172 | #html_domain_indices = True 173 | 174 | # If false, no index is generated. 175 | #html_use_index = True 176 | 177 | # If true, the index is split into individual pages for each letter. 178 | #html_split_index = False 179 | 180 | # If true, links to the reST sources are added to the pages. 181 | #html_show_sourcelink = True 182 | 183 | # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. 184 | #html_show_sphinx = True 185 | 186 | # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. 187 | #html_show_copyright = True 188 | 189 | # If true, an OpenSearch description file will be output, and all pages will 190 | # contain a tag referring to it. The value of this option must be the 191 | # base URL from which the finished HTML is served. 192 | #html_use_opensearch = '' 193 | 194 | # This is the file name suffix for HTML files (e.g. ".xhtml"). 195 | #html_file_suffix = None 196 | 197 | # Output file base name for HTML help builder. 198 | htmlhelp_basename = 'asynqpdoc' 199 | 200 | 201 | # -- Options for LaTeX output --------------------------------------------- 202 | 203 | latex_elements = { 204 | # The paper size ('letterpaper' or 'a4paper'). 205 | #'papersize': 'letterpaper', 206 | 207 | # The font size ('10pt', '11pt' or '12pt'). 208 | #'pointsize': '10pt', 209 | 210 | # Additional stuff for the LaTeX preamble. 211 | #'preamble': '', 212 | } 213 | 214 | # Grouping the document tree into LaTeX files. List of tuples 215 | # (source start file, target name, title, 216 | # author, documentclass [howto, manual, or own class]). 217 | latex_documents = [ 218 | ('index', 'asynqp.tex', 'asynqp Documentation', 219 | 'Benjamin Hodgson', 'manual'), 220 | ] 221 | 222 | # The name of an image file (relative to this directory) to place at the top of 223 | # the title page. 224 | #latex_logo = None 225 | 226 | # For "manual" documents, if this is true, then toplevel headings are parts, 227 | # not chapters. 228 | #latex_use_parts = False 229 | 230 | # If true, show page references after internal links. 231 | #latex_show_pagerefs = False 232 | 233 | # If true, show URL addresses after external links. 234 | #latex_show_urls = False 235 | 236 | # Documents to append as an appendix to all manuals. 237 | #latex_appendices = [] 238 | 239 | # If false, no module index is generated. 240 | #latex_domain_indices = True 241 | 242 | 243 | # -- Options for manual page output --------------------------------------- 244 | 245 | # One entry per manual page. List of tuples 246 | # (source start file, name, description, authors, manual section). 247 | man_pages = [ 248 | ('index', 'asynqp', 'asynqp Documentation', 249 | ['Benjamin Hodgson'], 1) 250 | ] 251 | 252 | # If true, show URL addresses after external links. 253 | #man_show_urls = False 254 | 255 | 256 | # -- Options for Texinfo output ------------------------------------------- 257 | 258 | # Grouping the document tree into Texinfo files. List of tuples 259 | # (source start file, target name, title, author, 260 | # dir menu entry, description, category) 261 | texinfo_documents = [ 262 | ('index', 'asynqp', 'asynqp Documentation', 263 | 'Benjamin Hodgson', 'asynqp', 'One line description of project.', 264 | 'Miscellaneous'), 265 | ] 266 | 267 | # Documents to append as an appendix to all manuals. 268 | #texinfo_appendices = [] 269 | 270 | # If false, no module index is generated. 271 | #texinfo_domain_indices = True 272 | 273 | # How to display URL addresses: 'footnote', 'no', or 'inline'. 274 | #texinfo_show_urls = 'footnote' 275 | 276 | # If true, do not generate a @detailmenu in the "Top" node's menu. 277 | #texinfo_no_detailmenu = False 278 | -------------------------------------------------------------------------------- /doc/conformance.rst: -------------------------------------------------------------------------------- 1 | .. asynqp documentation master file, created by 2 | sphinx-quickstart on Wed Jun 11 20:44:10 2014. 3 | You can adapt this file completely to your liking, but it should at least 4 | contain the root `toctree` directive. 5 | 6 | .. role :: red 7 | 8 | .. role :: orange 9 | 10 | .. role :: green 11 | 12 | 13 | AMQP Procotol Support 14 | ===================== 15 | ``asynqp`` is under development. 16 | Here is a table documenting the parts of the `AMQP protocol `_ 17 | that are currently supported by ``asynqp``. 18 | 19 | .. NOTE:: 20 | This library is alpha software. Even the methods marked as 'full support' may still have bugs. 21 | Please report any bugs to the `Github tracker `_. 22 | 23 | 24 | +------------+----------------------+-------------------+-------------------------------------------+-----------------------------------------+ 25 | | Class | Method | Support | API | Notes | 26 | +============+======================+===================+===========================================+=========================================+ 27 | | connection | | :orange:`partial` | :class:`asynqp.Connection` | | 28 | +------------+----------------------+-------------------+-------------------------------------------+-----------------------------------------+ 29 | | | start/start-ok | :green:`full` | :func:`asynqp.connect` | | 30 | +------------+----------------------+-------------------+-------------------------------------------+-----------------------------------------+ 31 | | | secure/secure-ok | :red:`none` | | Not required for default auth mechanism | 32 | +------------+----------------------+-------------------+-------------------------------------------+-----------------------------------------+ 33 | | | tune/tune-ok | :orange:`partial` | | Not presently user-customisable | 34 | +------------+----------------------+-------------------+-------------------------------------------+-----------------------------------------+ 35 | | | open/open-ok | :green:`full` | :func:`asynqp.connect` | | 36 | +------------+----------------------+-------------------+-------------------------------------------+-----------------------------------------+ 37 | | | close/close-ok | :green:`full` | :meth:`asynqp.Connection.close` | | 38 | +------------+----------------------+-------------------+-------------------------------------------+-----------------------------------------+ 39 | | channel | | :orange:`partial` | :class:`asynqp.Channel` | | 40 | +------------+----------------------+-------------------+-------------------------------------------+-----------------------------------------+ 41 | | | open/open-ok | :green:`full` | :func:`asynqp.Connection.open_channel` | | 42 | +------------+----------------------+-------------------+-------------------------------------------+-----------------------------------------+ 43 | | | flow/flow-ok | :red:`none` | | | 44 | +------------+----------------------+-------------------+-------------------------------------------+-----------------------------------------+ 45 | | | close/close-ok | :green:`full` | :meth:`asynqp.Channel.close` | | 46 | +------------+----------------------+-------------------+-------------------------------------------+-----------------------------------------+ 47 | | exchange | | :orange:`partial` | :class:`asynqp.Exchange` | | 48 | +------------+----------------------+-------------------+-------------------------------------------+-----------------------------------------+ 49 | | | declare/declare-ok | :green:`full` | :meth:`asynqp.Channel.declare_exchange` | | 50 | +------------+----------------------+-------------------+-------------------------------------------+-----------------------------------------+ 51 | | | delete/delete-ok | :green:`full` | :meth:`asynqp.Exchange.delete` | | 52 | +------------+----------------------+-------------------+-------------------------------------------+-----------------------------------------+ 53 | | | bind/bind-ok | :red:`none` | | RabbitMQ extension | 54 | +------------+----------------------+-------------------+-------------------------------------------+-----------------------------------------+ 55 | | | unbind/unbind-ok | :red:`none` | | RabbitMQ extension | 56 | +------------+----------------------+-------------------+-------------------------------------------+-----------------------------------------+ 57 | | queue | | :orange:`partial` | :class:`asynqp.Queue` | | 58 | +------------+----------------------+-------------------+-------------------------------------------+-----------------------------------------+ 59 | | | declare/declare-ok | :green:`full` | :meth:`asynqp.Channel.declare_queue` | | 60 | +------------+----------------------+-------------------+-------------------------------------------+-----------------------------------------+ 61 | | | bind/bind-ok | :orange:`partial` | :meth:`asynqp.Queue.bind` | Not all parameters presently supported | 62 | +------------+----------------------+-------------------+-------------------------------------------+-----------------------------------------+ 63 | | | unbind/unbind-ok | :green:`full` | :meth:`asynqp.QueueBinding.unbind` | | 64 | +------------+----------------------+-------------------+-------------------------------------------+-----------------------------------------+ 65 | | | purge/purge-ok | :orange:`partial` | :meth:`asynqp.Queue.purge` | ``no-wait`` not presently supported | 66 | +------------+----------------------+-------------------+-------------------------------------------+-----------------------------------------+ 67 | | | delete/delete-ok | :orange:`partial` | :meth:`asynqp.Queue.delete` | ``no-wait`` not presently supported | 68 | +------------+----------------------+-------------------+-------------------------------------------+-----------------------------------------+ 69 | | basic | | :orange:`partial` | | | 70 | +------------+----------------------+-------------------+-------------------------------------------+-----------------------------------------+ 71 | | | qos/qos-ok | :green:`full` | :meth:`asynqp.Channel.set_qos` | | 72 | +------------+----------------------+-------------------+-------------------------------------------+-----------------------------------------+ 73 | | | consume/consume-ok | :orange:`partial` | :meth:`asynqp.Queue.consume` | Not all parameters presently supported | 74 | +------------+----------------------+-------------------+-------------------------------------------+-----------------------------------------+ 75 | | | cancel/cancel-ok | :orange:`partial` | :meth:`asynqp.Consumer.cancel` | ``no-wait`` not presently supported | 76 | +------------+----------------------+-------------------+-------------------------------------------+-----------------------------------------+ 77 | | | publish | :orange:`partial` | :meth:`asynqp.Exchange.publish` | ``immediate`` not presently supported | 78 | +------------+----------------------+-------------------+-------------------------------------------+-----------------------------------------+ 79 | | | return | :green:`full` | :meth:`asynqp.Channel.set_return_handler` | | 80 | +------------+----------------------+-------------------+-------------------------------------------+-----------------------------------------+ 81 | | | deliver | :green:`full` | | | 82 | +------------+----------------------+-------------------+-------------------------------------------+-----------------------------------------+ 83 | | | get/get-ok/get-empty | :green:`full` | :meth:`asynqp.Queue.get` | | 84 | +------------+----------------------+-------------------+-------------------------------------------+-----------------------------------------+ 85 | | | ack | :green:`full` | :meth:`asynqp.IncomingMessage.ack` | | 86 | +------------+----------------------+-------------------+-------------------------------------------+-----------------------------------------+ 87 | | | reject | :green:`full` | :meth:`asynqp.IncomingMessage.reject` | | 88 | +------------+----------------------+-------------------+-------------------------------------------+-----------------------------------------+ 89 | | | recover/recover-ok | :red:`none` | | | 90 | +------------+----------------------+-------------------+-------------------------------------------+-----------------------------------------+ 91 | | | recover-async | :red:`none` | | | 92 | +------------+----------------------+-------------------+-------------------------------------------+-----------------------------------------+ 93 | | | nack | :red:`none` | | RabbitMQ extension | 94 | +------------+----------------------+-------------------+-------------------------------------------+-----------------------------------------+ 95 | | tx | | :red:`none` | | | 96 | +------------+----------------------+-------------------+-------------------------------------------+-----------------------------------------+ 97 | | | select/select-ok | :red:`none` | | | 98 | +------------+----------------------+-------------------+-------------------------------------------+-----------------------------------------+ 99 | | | commit/commit-ok | :red:`none` | | | 100 | +------------+----------------------+-------------------+-------------------------------------------+-----------------------------------------+ 101 | | | rollback/rollback-ok | :red:`none` | | | 102 | +------------+----------------------+-------------------+-------------------------------------------+-----------------------------------------+ 103 | | confirm | | :red:`none` | | | 104 | +------------+----------------------+-------------------+-------------------------------------------+-----------------------------------------+ 105 | | | select/select-ok | :red:`none` | | | 106 | +------------+----------------------+-------------------+-------------------------------------------+-----------------------------------------+ 107 | -------------------------------------------------------------------------------- /doc/examples.rst: -------------------------------------------------------------------------------- 1 | Examples 2 | ======== 3 | 4 | 5 | Hello World 6 | ----------- 7 | .. literalinclude:: /examples/helloworld.py 8 | :language: python 9 | 10 | 11 | Reconnecting 12 | ------------ 13 | .. literalinclude:: /examples/reconnecting.py 14 | :language: python 15 | -------------------------------------------------------------------------------- /doc/examples/helloworld.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import asynqp 3 | 4 | 5 | @asyncio.coroutine 6 | def hello_world(): 7 | """ 8 | Sends a 'hello world' message and then reads it from the queue. 9 | """ 10 | # connect to the RabbitMQ broker 11 | connection = yield from asynqp.connect('localhost', 5672, username='guest', password='guest') 12 | 13 | # Open a communications channel 14 | channel = yield from connection.open_channel() 15 | 16 | # Create a queue and an exchange on the broker 17 | exchange = yield from channel.declare_exchange('test.exchange', 'direct') 18 | queue = yield from channel.declare_queue('test.queue') 19 | 20 | # Bind the queue to the exchange, so the queue will get messages published to the exchange 21 | yield from queue.bind(exchange, 'routing.key') 22 | 23 | # If you pass in a dict it will be automatically converted to JSON 24 | msg = asynqp.Message({'hello': 'world'}) 25 | exchange.publish(msg, 'routing.key') 26 | 27 | # Synchronously get a message from the queue 28 | received_message = yield from queue.get() 29 | print(received_message.json()) # get JSON from incoming messages easily 30 | 31 | # Acknowledge a delivered message 32 | received_message.ack() 33 | 34 | yield from channel.close() 35 | yield from connection.close() 36 | 37 | 38 | if __name__ == "__main__": 39 | loop = asyncio.get_event_loop() 40 | loop.run_until_complete(hello_world()) 41 | -------------------------------------------------------------------------------- /doc/examples/reconnecting.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import asynqp 3 | import logging 4 | 5 | logging.basicConfig(level=logging.INFO) 6 | 7 | RECONNECT_BACKOFF = 1 8 | 9 | 10 | class Consumer: 11 | 12 | def __init__(self, connection, queue): 13 | self.queue = queue 14 | self.connection = connection 15 | 16 | def __call__(self, msg): 17 | self.queue.put_nowait(msg) 18 | 19 | def on_error(self, exc): 20 | print("Connection lost while consuming queue", exc) 21 | 22 | 23 | @asyncio.coroutine 24 | def connect_and_consume(queue): 25 | # connect to the RabbitMQ broker 26 | connection = yield from asynqp.connect( 27 | 'localhost', 5672, username='guest', password='guest') 28 | try: 29 | channel = yield from connection.open_channel() 30 | amqp_queue = yield from channel.declare_queue('test.queue') 31 | consumer = Consumer(connection, queue) 32 | yield from amqp_queue.consume(consumer) 33 | except asynqp.AMQPError as err: 34 | print("Could not consume on queue", err) 35 | yield from connection.close() 36 | return None 37 | return connection 38 | 39 | 40 | @asyncio.coroutine 41 | def reconnector(queue): 42 | try: 43 | connection = None 44 | while True: 45 | if connection is None or connection.is_closed(): 46 | print("Connecting to rabbitmq...") 47 | try: 48 | connection = yield from connect_and_consume(queue) 49 | except (ConnectionError, OSError): 50 | print("Failed to connect to rabbitmq server. " 51 | "Will retry in {} seconds".format(RECONNECT_BACKOFF)) 52 | connection = None 53 | if connection is None: 54 | yield from asyncio.sleep(RECONNECT_BACKOFF) 55 | else: 56 | print("Successfully connected and consuming test.queue") 57 | # poll connection state every 100ms 58 | yield from asyncio.sleep(0.1) 59 | except asyncio.CancelledError: 60 | if connection is not None: 61 | yield from connection.close() 62 | 63 | 64 | @asyncio.coroutine 65 | def process_msgs(queue): 66 | try: 67 | while True: 68 | msg = yield from queue.get() 69 | print("Received", msg.body) 70 | msg.ack() 71 | except asyncio.CancelledError: 72 | pass 73 | 74 | 75 | def main(): 76 | loop = asyncio.get_event_loop() 77 | queue = asyncio.Queue() 78 | # Start main indexing task in the background 79 | reconnect_task = loop.create_task(reconnector(queue)) 80 | process_task = loop.create_task(process_msgs(queue)) 81 | try: 82 | loop.run_forever() 83 | except KeyboardInterrupt: 84 | process_task.cancel() 85 | reconnect_task.cancel() 86 | loop.run_until_complete(process_task) 87 | loop.run_until_complete(reconnect_task) 88 | loop.close() 89 | 90 | 91 | if __name__ == "__main__": 92 | main() 93 | -------------------------------------------------------------------------------- /doc/extensions.rst: -------------------------------------------------------------------------------- 1 | .. _extensions: 2 | 3 | Protocol extensions 4 | =================== 5 | 6 | RabbitMQ, and other brokers, support certain extensions to the AMQP protocol. 7 | `asynqp`'s support for such extensions currently includes 8 | *optional extra arguments* to certain methods such as :meth:`Channel.declare_queue() `. 9 | 10 | The acceptable parameters for optional argument dictionaries is implementation-dependent. 11 | See`RabbitMQ's supported extensions `. 12 | -------------------------------------------------------------------------------- /doc/index.rst: -------------------------------------------------------------------------------- 1 | ``asynqp`` 2 | ========== 3 | An AMQP (aka `RabbitMQ `_) client library for :mod:`asyncio`. 4 | 5 | 6 | Example 7 | ------- 8 | .. literalinclude:: /examples/helloworld.py 9 | :language: python 10 | 11 | 12 | Installation 13 | ------------ 14 | 15 | :mod:`asynqp` has no dependencies outside of the standard library. To install the package: 16 | 17 | :: 18 | 19 | pip install asynqp 20 | 21 | 22 | Table of contents 23 | ----------------- 24 | .. toctree:: 25 | :maxdepth: 2 26 | 27 | reference 28 | examples 29 | conformance 30 | extensions 31 | 32 | * :ref:`genindex` 33 | * :ref:`modindex` 34 | * :ref:`search` 35 | 36 | -------------------------------------------------------------------------------- /doc/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | REM Command file for Sphinx documentation 4 | 5 | if "%SPHINXBUILD%" == "" ( 6 | set SPHINXBUILD=sphinx-build 7 | ) 8 | set BUILDDIR=_build 9 | set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% . 10 | set I18NSPHINXOPTS=%SPHINXOPTS% . 11 | if NOT "%PAPER%" == "" ( 12 | set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% 13 | set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS% 14 | ) 15 | 16 | if "%1" == "" goto help 17 | 18 | if "%1" == "help" ( 19 | :help 20 | echo.Please use `make ^` where ^ is one of 21 | echo. html to make standalone HTML files 22 | echo. dirhtml to make HTML files named index.html in directories 23 | echo. singlehtml to make a single large HTML file 24 | echo. pickle to make pickle files 25 | echo. json to make JSON files 26 | echo. htmlhelp to make HTML files and a HTML help project 27 | echo. qthelp to make HTML files and a qthelp project 28 | echo. devhelp to make HTML files and a Devhelp project 29 | echo. epub to make an epub 30 | echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter 31 | echo. text to make text files 32 | echo. man to make manual pages 33 | echo. texinfo to make Texinfo files 34 | echo. gettext to make PO message catalogs 35 | echo. changes to make an overview over all changed/added/deprecated items 36 | echo. xml to make Docutils-native XML files 37 | echo. pseudoxml to make pseudoxml-XML files for display purposes 38 | echo. linkcheck to check all external links for integrity 39 | echo. doctest to run all doctests embedded in the documentation if enabled 40 | goto end 41 | ) 42 | 43 | if "%1" == "clean" ( 44 | for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i 45 | del /q /s %BUILDDIR%\* 46 | goto end 47 | ) 48 | 49 | 50 | %SPHINXBUILD% 2> nul 51 | if errorlevel 9009 ( 52 | echo. 53 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 54 | echo.installed, then set the SPHINXBUILD environment variable to point 55 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 56 | echo.may add the Sphinx directory to PATH. 57 | echo. 58 | echo.If you don't have Sphinx installed, grab it from 59 | echo.http://sphinx-doc.org/ 60 | exit /b 1 61 | ) 62 | 63 | if "%1" == "html" ( 64 | %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html 65 | if errorlevel 1 exit /b 1 66 | echo. 67 | echo.Build finished. The HTML pages are in %BUILDDIR%/html. 68 | goto end 69 | ) 70 | 71 | if "%1" == "dirhtml" ( 72 | %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml 73 | if errorlevel 1 exit /b 1 74 | echo. 75 | echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. 76 | goto end 77 | ) 78 | 79 | if "%1" == "singlehtml" ( 80 | %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml 81 | if errorlevel 1 exit /b 1 82 | echo. 83 | echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. 84 | goto end 85 | ) 86 | 87 | if "%1" == "pickle" ( 88 | %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle 89 | if errorlevel 1 exit /b 1 90 | echo. 91 | echo.Build finished; now you can process the pickle files. 92 | goto end 93 | ) 94 | 95 | if "%1" == "json" ( 96 | %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json 97 | if errorlevel 1 exit /b 1 98 | echo. 99 | echo.Build finished; now you can process the JSON files. 100 | goto end 101 | ) 102 | 103 | if "%1" == "htmlhelp" ( 104 | %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp 105 | if errorlevel 1 exit /b 1 106 | echo. 107 | echo.Build finished; now you can run HTML Help Workshop with the ^ 108 | .hhp project file in %BUILDDIR%/htmlhelp. 109 | goto end 110 | ) 111 | 112 | if "%1" == "qthelp" ( 113 | %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp 114 | if errorlevel 1 exit /b 1 115 | echo. 116 | echo.Build finished; now you can run "qcollectiongenerator" with the ^ 117 | .qhcp project file in %BUILDDIR%/qthelp, like this: 118 | echo.^> qcollectiongenerator %BUILDDIR%\qthelp\asynqp.qhcp 119 | echo.To view the help file: 120 | echo.^> assistant -collectionFile %BUILDDIR%\qthelp\asynqp.ghc 121 | goto end 122 | ) 123 | 124 | if "%1" == "devhelp" ( 125 | %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp 126 | if errorlevel 1 exit /b 1 127 | echo. 128 | echo.Build finished. 129 | goto end 130 | ) 131 | 132 | if "%1" == "epub" ( 133 | %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub 134 | if errorlevel 1 exit /b 1 135 | echo. 136 | echo.Build finished. The epub file is in %BUILDDIR%/epub. 137 | goto end 138 | ) 139 | 140 | if "%1" == "latex" ( 141 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 142 | if errorlevel 1 exit /b 1 143 | echo. 144 | echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. 145 | goto end 146 | ) 147 | 148 | if "%1" == "latexpdf" ( 149 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 150 | cd %BUILDDIR%/latex 151 | make all-pdf 152 | cd %BUILDDIR%/.. 153 | echo. 154 | echo.Build finished; the PDF files are in %BUILDDIR%/latex. 155 | goto end 156 | ) 157 | 158 | if "%1" == "latexpdfja" ( 159 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 160 | cd %BUILDDIR%/latex 161 | make all-pdf-ja 162 | cd %BUILDDIR%/.. 163 | echo. 164 | echo.Build finished; the PDF files are in %BUILDDIR%/latex. 165 | goto end 166 | ) 167 | 168 | if "%1" == "text" ( 169 | %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text 170 | if errorlevel 1 exit /b 1 171 | echo. 172 | echo.Build finished. The text files are in %BUILDDIR%/text. 173 | goto end 174 | ) 175 | 176 | if "%1" == "man" ( 177 | %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man 178 | if errorlevel 1 exit /b 1 179 | echo. 180 | echo.Build finished. The manual pages are in %BUILDDIR%/man. 181 | goto end 182 | ) 183 | 184 | if "%1" == "texinfo" ( 185 | %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo 186 | if errorlevel 1 exit /b 1 187 | echo. 188 | echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo. 189 | goto end 190 | ) 191 | 192 | if "%1" == "gettext" ( 193 | %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale 194 | if errorlevel 1 exit /b 1 195 | echo. 196 | echo.Build finished. The message catalogs are in %BUILDDIR%/locale. 197 | goto end 198 | ) 199 | 200 | if "%1" == "changes" ( 201 | %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes 202 | if errorlevel 1 exit /b 1 203 | echo. 204 | echo.The overview file is in %BUILDDIR%/changes. 205 | goto end 206 | ) 207 | 208 | if "%1" == "linkcheck" ( 209 | %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck 210 | if errorlevel 1 exit /b 1 211 | echo. 212 | echo.Link check complete; look for any errors in the above output ^ 213 | or in %BUILDDIR%/linkcheck/output.txt. 214 | goto end 215 | ) 216 | 217 | if "%1" == "doctest" ( 218 | %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest 219 | if errorlevel 1 exit /b 1 220 | echo. 221 | echo.Testing of doctests in the sources finished, look at the ^ 222 | results in %BUILDDIR%/doctest/output.txt. 223 | goto end 224 | ) 225 | 226 | if "%1" == "xml" ( 227 | %SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml 228 | if errorlevel 1 exit /b 1 229 | echo. 230 | echo.Build finished. The XML files are in %BUILDDIR%/xml. 231 | goto end 232 | ) 233 | 234 | if "%1" == "pseudoxml" ( 235 | %SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml 236 | if errorlevel 1 exit /b 1 237 | echo. 238 | echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml. 239 | goto end 240 | ) 241 | 242 | :end 243 | -------------------------------------------------------------------------------- /doc/reference.rst: -------------------------------------------------------------------------------- 1 | Reference guide 2 | =============== 3 | 4 | .. module:: asynqp 5 | 6 | Connecting to the AMQP broker 7 | ----------------------------- 8 | 9 | .. autofunction:: connect 10 | 11 | .. autofunction:: connect_and_open_channel 12 | 13 | 14 | Managing Connections and Channels 15 | --------------------------------- 16 | 17 | Connections 18 | ~~~~~~~~~~~ 19 | 20 | .. autoclass:: Connection 21 | :members: 22 | 23 | 24 | Channels 25 | ~~~~~~~~ 26 | 27 | .. autoclass:: Channel 28 | :members: 29 | 30 | 31 | Sending and receiving messages with Queues and Exchanges 32 | -------------------------------------------------------- 33 | 34 | Queues 35 | ~~~~~~ 36 | 37 | .. autoclass:: Queue 38 | :members: 39 | 40 | 41 | Exchanges 42 | ~~~~~~~~~ 43 | 44 | .. autoclass:: Exchange 45 | :members: 46 | 47 | 48 | Bindings 49 | ~~~~~~~~ 50 | 51 | .. autoclass:: QueueBinding 52 | :members: 53 | 54 | Consumers 55 | ~~~~~~~~~ 56 | 57 | .. autoclass:: Consumer 58 | :members: 59 | 60 | 61 | Message objects 62 | --------------- 63 | 64 | .. autoclass:: Message 65 | :members: 66 | 67 | .. autoclass:: IncomingMessage 68 | :members: 69 | 70 | 71 | Exceptions 72 | ---------- 73 | 74 | .. automodule:: asynqp.exceptions 75 | :members: 76 | :undoc-members: 77 | -------------------------------------------------------------------------------- /ez_setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | """ 4 | Setuptools bootstrapping installer. 5 | 6 | Run this script to install or upgrade setuptools. 7 | """ 8 | 9 | import os 10 | import shutil 11 | import sys 12 | import tempfile 13 | import zipfile 14 | import optparse 15 | import subprocess 16 | import platform 17 | import textwrap 18 | import contextlib 19 | import warnings 20 | 21 | from distutils import log 22 | 23 | try: 24 | from urllib.request import urlopen 25 | except ImportError: 26 | from urllib2 import urlopen 27 | 28 | try: 29 | from site import USER_SITE 30 | except ImportError: 31 | USER_SITE = None 32 | 33 | DEFAULT_VERSION = "15.2" 34 | DEFAULT_URL = "https://pypi.python.org/packages/source/s/setuptools/" 35 | DEFAULT_SAVE_DIR = os.curdir 36 | 37 | 38 | def _python_cmd(*args): 39 | """ 40 | Execute a command. 41 | 42 | Return True if the command succeeded. 43 | """ 44 | args = (sys.executable,) + args 45 | return subprocess.call(args) == 0 46 | 47 | 48 | def _install(archive_filename, install_args=()): 49 | """Install Setuptools.""" 50 | with archive_context(archive_filename): 51 | # installing 52 | log.warn('Installing Setuptools') 53 | if not _python_cmd('setup.py', 'install', *install_args): 54 | log.warn('Something went wrong during the installation.') 55 | log.warn('See the error message above.') 56 | # exitcode will be 2 57 | return 2 58 | 59 | 60 | def _build_egg(egg, archive_filename, to_dir): 61 | """Build Setuptools egg.""" 62 | with archive_context(archive_filename): 63 | # building an egg 64 | log.warn('Building a Setuptools egg in %s', to_dir) 65 | _python_cmd('setup.py', '-q', 'bdist_egg', '--dist-dir', to_dir) 66 | # returning the result 67 | log.warn(egg) 68 | if not os.path.exists(egg): 69 | raise IOError('Could not build the egg.') 70 | 71 | 72 | class ContextualZipFile(zipfile.ZipFile): 73 | 74 | """Supplement ZipFile class to support context manager for Python 2.6.""" 75 | 76 | def __enter__(self): 77 | return self 78 | 79 | def __exit__(self, type, value, traceback): 80 | self.close() 81 | 82 | def __new__(cls, *args, **kwargs): 83 | """Construct a ZipFile or ContextualZipFile as appropriate.""" 84 | if hasattr(zipfile.ZipFile, '__exit__'): 85 | return zipfile.ZipFile(*args, **kwargs) 86 | return super(ContextualZipFile, cls).__new__(cls) 87 | 88 | 89 | @contextlib.contextmanager 90 | def archive_context(filename): 91 | """ 92 | Unzip filename to a temporary directory, set to the cwd. 93 | 94 | The unzipped target is cleaned up after. 95 | """ 96 | tmpdir = tempfile.mkdtemp() 97 | log.warn('Extracting in %s', tmpdir) 98 | old_wd = os.getcwd() 99 | try: 100 | os.chdir(tmpdir) 101 | with ContextualZipFile(filename) as archive: 102 | archive.extractall() 103 | 104 | # going in the directory 105 | subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0]) 106 | os.chdir(subdir) 107 | log.warn('Now working in %s', subdir) 108 | yield 109 | 110 | finally: 111 | os.chdir(old_wd) 112 | shutil.rmtree(tmpdir) 113 | 114 | 115 | def _do_download(version, download_base, to_dir, download_delay): 116 | """Download Setuptools.""" 117 | egg = os.path.join(to_dir, 'setuptools-%s-py%d.%d.egg' 118 | % (version, sys.version_info[0], sys.version_info[1])) 119 | if not os.path.exists(egg): 120 | archive = download_setuptools(version, download_base, 121 | to_dir, download_delay) 122 | _build_egg(egg, archive, to_dir) 123 | sys.path.insert(0, egg) 124 | 125 | # Remove previously-imported pkg_resources if present (see 126 | # https://bitbucket.org/pypa/setuptools/pull-request/7/ for details). 127 | if 'pkg_resources' in sys.modules: 128 | del sys.modules['pkg_resources'] 129 | 130 | import setuptools 131 | setuptools.bootstrap_install_from = egg 132 | 133 | 134 | def use_setuptools( 135 | version=DEFAULT_VERSION, download_base=DEFAULT_URL, 136 | to_dir=DEFAULT_SAVE_DIR, download_delay=15): 137 | """ 138 | Ensure that a setuptools version is installed. 139 | 140 | Return None. Raise SystemExit if the requested version 141 | or later cannot be installed. 142 | """ 143 | to_dir = os.path.abspath(to_dir) 144 | 145 | # prior to importing, capture the module state for 146 | # representative modules. 147 | rep_modules = 'pkg_resources', 'setuptools' 148 | imported = set(sys.modules).intersection(rep_modules) 149 | 150 | try: 151 | import pkg_resources 152 | pkg_resources.require("setuptools>=" + version) 153 | # a suitable version is already installed 154 | return 155 | except ImportError: 156 | # pkg_resources not available; setuptools is not installed; download 157 | pass 158 | except pkg_resources.DistributionNotFound: 159 | # no version of setuptools was found; allow download 160 | pass 161 | except pkg_resources.VersionConflict as VC_err: 162 | if imported: 163 | _conflict_bail(VC_err, version) 164 | 165 | # otherwise, unload pkg_resources to allow the downloaded version to 166 | # take precedence. 167 | del pkg_resources 168 | _unload_pkg_resources() 169 | 170 | return _do_download(version, download_base, to_dir, download_delay) 171 | 172 | 173 | def _conflict_bail(VC_err, version): 174 | """ 175 | Setuptools was imported prior to invocation, so it is 176 | unsafe to unload it. Bail out. 177 | """ 178 | conflict_tmpl = textwrap.dedent(""" 179 | The required version of setuptools (>={version}) is not available, 180 | and can't be installed while this script is running. Please 181 | install a more recent version first, using 182 | 'easy_install -U setuptools'. 183 | 184 | (Currently using {VC_err.args[0]!r}) 185 | """) 186 | msg = conflict_tmpl.format(**locals()) 187 | sys.stderr.write(msg) 188 | sys.exit(2) 189 | 190 | 191 | def _unload_pkg_resources(): 192 | del_modules = [ 193 | name for name in sys.modules 194 | if name.startswith('pkg_resources') 195 | ] 196 | for mod_name in del_modules: 197 | del sys.modules[mod_name] 198 | 199 | 200 | def _clean_check(cmd, target): 201 | """ 202 | Run the command to download target. 203 | 204 | If the command fails, clean up before re-raising the error. 205 | """ 206 | try: 207 | subprocess.check_call(cmd) 208 | except subprocess.CalledProcessError: 209 | if os.access(target, os.F_OK): 210 | os.unlink(target) 211 | raise 212 | 213 | 214 | def download_file_powershell(url, target): 215 | """ 216 | Download the file at url to target using Powershell. 217 | 218 | Powershell will validate trust. 219 | Raise an exception if the command cannot complete. 220 | """ 221 | target = os.path.abspath(target) 222 | ps_cmd = ( 223 | "[System.Net.WebRequest]::DefaultWebProxy.Credentials = " 224 | "[System.Net.CredentialCache]::DefaultCredentials; " 225 | "(new-object System.Net.WebClient).DownloadFile(%(url)r, %(target)r)" 226 | % vars() 227 | ) 228 | cmd = [ 229 | 'powershell', 230 | '-Command', 231 | ps_cmd, 232 | ] 233 | _clean_check(cmd, target) 234 | 235 | 236 | def has_powershell(): 237 | """Determine if Powershell is available.""" 238 | if platform.system() != 'Windows': 239 | return False 240 | cmd = ['powershell', '-Command', 'echo test'] 241 | with open(os.path.devnull, 'wb') as devnull: 242 | try: 243 | subprocess.check_call(cmd, stdout=devnull, stderr=devnull) 244 | except Exception: 245 | return False 246 | return True 247 | download_file_powershell.viable = has_powershell 248 | 249 | 250 | def download_file_curl(url, target): 251 | cmd = ['curl', url, '--silent', '--output', target] 252 | _clean_check(cmd, target) 253 | 254 | 255 | def has_curl(): 256 | cmd = ['curl', '--version'] 257 | with open(os.path.devnull, 'wb') as devnull: 258 | try: 259 | subprocess.check_call(cmd, stdout=devnull, stderr=devnull) 260 | except Exception: 261 | return False 262 | return True 263 | download_file_curl.viable = has_curl 264 | 265 | 266 | def download_file_wget(url, target): 267 | cmd = ['wget', url, '--quiet', '--output-document', target] 268 | _clean_check(cmd, target) 269 | 270 | 271 | def has_wget(): 272 | cmd = ['wget', '--version'] 273 | with open(os.path.devnull, 'wb') as devnull: 274 | try: 275 | subprocess.check_call(cmd, stdout=devnull, stderr=devnull) 276 | except Exception: 277 | return False 278 | return True 279 | download_file_wget.viable = has_wget 280 | 281 | 282 | def download_file_insecure(url, target): 283 | """Use Python to download the file, without connection authentication.""" 284 | src = urlopen(url) 285 | try: 286 | # Read all the data in one block. 287 | data = src.read() 288 | finally: 289 | src.close() 290 | 291 | # Write all the data in one block to avoid creating a partial file. 292 | with open(target, "wb") as dst: 293 | dst.write(data) 294 | download_file_insecure.viable = lambda: True 295 | 296 | 297 | def get_best_downloader(): 298 | downloaders = ( 299 | download_file_powershell, 300 | download_file_curl, 301 | download_file_wget, 302 | download_file_insecure, 303 | ) 304 | viable_downloaders = (dl for dl in downloaders if dl.viable()) 305 | return next(viable_downloaders, None) 306 | 307 | 308 | def download_setuptools( 309 | version=DEFAULT_VERSION, download_base=DEFAULT_URL, 310 | to_dir=DEFAULT_SAVE_DIR, delay=15, 311 | downloader_factory=get_best_downloader): 312 | """ 313 | Download setuptools from a specified location and return its filename. 314 | 315 | `version` should be a valid setuptools version number that is available 316 | as an sdist for download under the `download_base` URL (which should end 317 | with a '/'). `to_dir` is the directory where the egg will be downloaded. 318 | `delay` is the number of seconds to pause before an actual download 319 | attempt. 320 | 321 | ``downloader_factory`` should be a function taking no arguments and 322 | returning a function for downloading a URL to a target. 323 | """ 324 | # making sure we use the absolute path 325 | to_dir = os.path.abspath(to_dir) 326 | zip_name = "setuptools-%s.zip" % version 327 | url = download_base + zip_name 328 | saveto = os.path.join(to_dir, zip_name) 329 | if not os.path.exists(saveto): # Avoid repeated downloads 330 | log.warn("Downloading %s", url) 331 | downloader = downloader_factory() 332 | downloader(url, saveto) 333 | return os.path.realpath(saveto) 334 | 335 | 336 | def _build_install_args(options): 337 | """ 338 | Build the arguments to 'python setup.py install' on the setuptools package. 339 | 340 | Returns list of command line arguments. 341 | """ 342 | return ['--user'] if options.user_install else [] 343 | 344 | 345 | def _parse_args(): 346 | """Parse the command line for options.""" 347 | parser = optparse.OptionParser() 348 | parser.add_option( 349 | '--user', dest='user_install', action='store_true', default=False, 350 | help='install in user site package (requires Python 2.6 or later)') 351 | parser.add_option( 352 | '--download-base', dest='download_base', metavar="URL", 353 | default=DEFAULT_URL, 354 | help='alternative URL from where to download the setuptools package') 355 | parser.add_option( 356 | '--insecure', dest='downloader_factory', action='store_const', 357 | const=lambda: download_file_insecure, default=get_best_downloader, 358 | help='Use internal, non-validating downloader' 359 | ) 360 | parser.add_option( 361 | '--version', help="Specify which version to download", 362 | default=DEFAULT_VERSION, 363 | ) 364 | parser.add_option( 365 | '--to-dir', 366 | help="Directory to save (and re-use) package", 367 | default=DEFAULT_SAVE_DIR, 368 | ) 369 | options, args = parser.parse_args() 370 | # positional arguments are ignored 371 | return options 372 | 373 | 374 | def _download_args(options): 375 | """Return args for download_setuptools function from cmdline args.""" 376 | return dict( 377 | version=options.version, 378 | download_base=options.download_base, 379 | downloader_factory=options.downloader_factory, 380 | to_dir=options.to_dir, 381 | ) 382 | 383 | 384 | def main(): 385 | """Install or upgrade setuptools and EasyInstall.""" 386 | options = _parse_args() 387 | archive = download_setuptools(**_download_args(options)) 388 | return _install(archive, _build_install_args(options)) 389 | 390 | if __name__ == '__main__': 391 | sys.exit(main()) 392 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | contexts 2 | colorama 3 | sphinx 4 | flake8 5 | coverage 6 | coveralls 7 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | try: 2 | from setuptools import setup, find_packages 3 | except ImportError: 4 | from ez_setup import use_setuptools 5 | use_setuptools() 6 | 7 | from setuptools import setup, find_packages 8 | 9 | 10 | setup( 11 | name='asynqp', 12 | version='0.6', 13 | author="Benjamin Hodgson", 14 | author_email="benjamin.hodgson@huddle.net", 15 | url="https://github.com/benjamin-hodgson/asynqp", 16 | description="An AMQP (aka RabbitMQ) client library for asyncio.", 17 | package_dir={'': 'src'}, 18 | packages=find_packages('src'), 19 | package_data={'asynqp': ['amqp0-9-1.xml']}, 20 | install_requires=["setuptools"], 21 | classifiers=[ 22 | "Development Status :: 3 - Alpha", 23 | "Programming Language :: Python", 24 | "Programming Language :: Python :: 3.4", 25 | "Programming Language :: Python :: 3.5", 26 | "License :: OSI Approved :: MIT License", 27 | "Intended Audience :: Developers", 28 | "Intended Audience :: Information Technology", 29 | "Intended Audience :: Telecommunications Industry", 30 | "Natural Language :: English", 31 | "Operating System :: OS Independent", 32 | "Topic :: Software Development", 33 | "Topic :: Software Development :: Libraries", 34 | "Topic :: Software Development :: Libraries :: Python Modules", 35 | "Topic :: System :: Networking" 36 | ] 37 | ) 38 | -------------------------------------------------------------------------------- /src/asynqp/__init__.py: -------------------------------------------------------------------------------- 1 | # flake8: noqa 2 | 3 | import socket 4 | import asyncio 5 | from .exceptions import * # noqa 6 | from .message import Message, IncomingMessage 7 | from .connection import Connection 8 | from .channel import Channel 9 | from .exchange import Exchange 10 | from .queue import Queue, QueueBinding, Consumer 11 | 12 | 13 | __all__ = [ 14 | "Message", "IncomingMessage", 15 | "Connection", "Channel", "Exchange", "Queue", "QueueBinding", "Consumer", 16 | "connect", "connect_and_open_channel" 17 | ] 18 | __all__ += exceptions.__all__ 19 | 20 | 21 | @asyncio.coroutine 22 | def connect(host='localhost', 23 | port=5672, 24 | username='guest', password='guest', 25 | virtual_host='/', 26 | on_connection_close=None, *, 27 | loop=None, sock=None, **kwargs): 28 | """ 29 | Connect to an AMQP server on the given host and port. 30 | 31 | Log in to the given virtual host using the supplied credentials. 32 | This function is a :ref:`coroutine `. 33 | 34 | :param str host: the host server to connect to. 35 | :param int port: the port which the AMQP server is listening on. 36 | :param str username: the username to authenticate with. 37 | :param str password: the password to authenticate with. 38 | :param str virtual_host: the AMQP virtual host to connect to. 39 | :param func on_connection_close: function called after connection lost. 40 | :keyword BaseEventLoop loop: An instance of :class:`~asyncio.BaseEventLoop` to use. 41 | (Defaults to :func:`asyncio.get_event_loop()`) 42 | :keyword socket sock: A :func:`~socket.socket` instance to use for the connection. 43 | This is passed on to :meth:`loop.create_connection() `. 44 | If ``sock`` is supplied then ``host`` and ``port`` will be ignored. 45 | 46 | Further keyword arguments are passed on to :meth:`loop.create_connection() `. 47 | 48 | This function will set TCP_NODELAY on TCP and TCP6 sockets either on supplied ``sock`` or created one. 49 | 50 | :return: the :class:`Connection` object. 51 | """ 52 | from .protocol import AMQP 53 | from .routing import Dispatcher 54 | from .connection import open_connection 55 | 56 | loop = asyncio.get_event_loop() if loop is None else loop 57 | 58 | if sock is None: 59 | kwargs['host'] = host 60 | kwargs['port'] = port 61 | else: 62 | kwargs['sock'] = sock 63 | 64 | dispatcher = Dispatcher() 65 | 66 | def protocol_factory(): 67 | return AMQP(dispatcher, loop, close_callback=on_connection_close) 68 | transport, protocol = yield from loop.create_connection(protocol_factory, **kwargs) 69 | 70 | # RPC-like applications require TCP_NODELAY in order to acheive 71 | # minimal response time. Actually, this library send data in one 72 | # big chunk and so this will not affect TCP-performance. 73 | sk = transport.get_extra_info('socket') 74 | # 1. Unfortunatelly we cannot check socket type (sk.type == socket.SOCK_STREAM). https://bugs.python.org/issue21327 75 | # 2. Proto remains zero, if not specified at creation of socket 76 | if (sk.family in (socket.AF_INET, socket.AF_INET6)) and (sk.proto in (0, socket.IPPROTO_TCP)): 77 | sk.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) 78 | 79 | connection_info = { 80 | 'username': username, 81 | 'password': password, 82 | 'virtual_host': virtual_host 83 | } 84 | connection = yield from open_connection( 85 | loop, transport, protocol, dispatcher, connection_info) 86 | return connection 87 | 88 | 89 | @asyncio.coroutine 90 | def connect_and_open_channel(host='localhost', 91 | port=5672, 92 | username='guest', password='guest', 93 | virtual_host='/', 94 | on_connection_close=None, *, 95 | loop=None, **kwargs): 96 | """ 97 | Connect to an AMQP server and open a channel on the connection. 98 | This function is a :ref:`coroutine `. 99 | 100 | Parameters of this function are the same as :func:`connect`. 101 | 102 | :return: a tuple of ``(connection, channel)``. 103 | 104 | Equivalent to:: 105 | 106 | connection = yield from connect(host, port, username, password, virtual_host, on_connection_close, loop=loop, **kwargs) 107 | channel = yield from connection.open_channel() 108 | return connection, channel 109 | """ 110 | connection = yield from connect(host, port, username, password, virtual_host, on_connection_close, loop=loop, **kwargs) 111 | channel = yield from connection.open_channel() 112 | return connection, channel 113 | -------------------------------------------------------------------------------- /src/asynqp/_exceptions.py: -------------------------------------------------------------------------------- 1 | class AMQPError(IOError): 2 | pass 3 | 4 | 5 | class AMQPChannelError(AMQPError): 6 | pass 7 | -------------------------------------------------------------------------------- /src/asynqp/amqptypes.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | from . import serialisation 3 | 4 | 5 | class Bit(object): 6 | def __init__(self, value): 7 | if isinstance(value, type(self)): 8 | value = value.value # ha! 9 | if not isinstance(value, bool): 10 | raise TypeError('Could not construct a Bit from value {}'.format(value)) 11 | self.value = value 12 | 13 | def __eq__(self, other): 14 | if isinstance(other, type(self.value)): 15 | return self.value == other 16 | try: 17 | return self.value == other.value 18 | except AttributeError: 19 | return NotImplemented 20 | 21 | def __bool__(self): 22 | return self.value 23 | 24 | @classmethod 25 | def read(cls, stream): 26 | return cls(serialisation.read_bool(stream)) 27 | 28 | 29 | class Octet(int): 30 | MIN = 0 31 | MAX = (1 << 8) - 1 32 | 33 | def __new__(cls, value): 34 | if not (Octet.MIN <= value <= Octet.MAX): 35 | raise TypeError('Could not construct an Octet from value {}'.format(value)) 36 | return super().__new__(cls, value) 37 | 38 | def write(self, stream): 39 | stream.write(serialisation.pack_octet(self)) 40 | 41 | @classmethod 42 | def read(cls, stream): 43 | return cls(serialisation.read_octet(stream)) 44 | 45 | 46 | class Short(int): 47 | MIN = -(1 << 15) 48 | MAX = (1 << 15) - 1 49 | 50 | def __new__(cls, value): 51 | if not (Short.MIN <= value <= Short.MAX): 52 | raise TypeError('Could not construct a Short from value {}'.format(value)) 53 | return super().__new__(cls, value) 54 | 55 | def write(self, stream): 56 | stream.write(serialisation.pack_short(self)) 57 | 58 | @classmethod 59 | def read(cls, stream): 60 | return cls(serialisation.read_short(stream)) 61 | 62 | 63 | class UnsignedShort(int): 64 | MIN = 0 65 | MAX = (1 << 16) - 1 66 | 67 | def __new__(cls, value): 68 | if not (UnsignedShort.MIN <= value <= UnsignedShort.MAX): 69 | raise TypeError('Could not construct an UnsignedShort from value {}'.format(value)) 70 | return super().__new__(cls, value) 71 | 72 | def write(self, stream): 73 | stream.write(serialisation.pack_unsigned_short(self)) 74 | 75 | @classmethod 76 | def read(cls, stream): 77 | return cls(serialisation.read_unsigned_short(stream)) 78 | 79 | 80 | class Long(int): 81 | MIN = -(1 << 31) 82 | MAX = (1 << 31) - 1 83 | 84 | def __new__(cls, value): 85 | if not (Long.MIN <= value <= Long.MAX): 86 | raise TypeError('Could not construct a Long from value {}'.format(value)) 87 | return super().__new__(cls, value) 88 | 89 | def write(self, stream): 90 | stream.write(serialisation.pack_long(self)) 91 | 92 | @classmethod 93 | def read(cls, stream): 94 | return cls(serialisation.read_long(stream)) 95 | 96 | 97 | class UnsignedLong(int): 98 | MIN = 0 99 | MAX = (1 << 32) - 1 100 | 101 | def __new__(cls, value): 102 | if not (UnsignedLong.MIN <= value <= UnsignedLong.MAX): 103 | raise TypeError('Could not construct a UnsignedLong from value {}'.format(value)) 104 | return super().__new__(cls, value) 105 | 106 | def write(self, stream): 107 | stream.write(serialisation.pack_unsigned_long(self)) 108 | 109 | @classmethod 110 | def read(cls, stream): 111 | return cls(serialisation.read_unsigned_long(stream)) 112 | 113 | 114 | class LongLong(int): 115 | MIN = -(1 << 63) 116 | MAX = (1 << 63) - 1 117 | 118 | def __new__(cls, value): 119 | if not (LongLong.MIN <= value <= LongLong.MAX): 120 | raise TypeError('Could not construct a LongLong from value {}'.format(value)) 121 | return super().__new__(cls, value) 122 | 123 | def write(self, stream): 124 | stream.write(serialisation.pack_long_long(self)) 125 | 126 | @classmethod 127 | def read(cls, stream): 128 | return cls(serialisation.read_long_long(stream)) 129 | 130 | 131 | class UnsignedLongLong(int): 132 | MIN = 0 133 | MAX = (1 << 64) - 1 134 | 135 | def __new__(cls, value): 136 | if not (UnsignedLongLong.MIN <= value <= UnsignedLongLong.MAX): 137 | raise TypeError('Could not construct a UnsignedLongLong from value {}'.format(value)) 138 | return super().__new__(cls, value) 139 | 140 | def write(self, stream): 141 | stream.write(serialisation.pack_unsigned_long_long(self)) 142 | 143 | @classmethod 144 | def read(cls, stream): 145 | return cls(serialisation.read_unsigned_long_long(stream)) 146 | 147 | 148 | class ShortStr(str): 149 | def __new__(cls, value): 150 | if len(value) > Octet.MAX: 151 | raise TypeError('Could not construct a ShortStr from value {}'.format(value)) 152 | return super().__new__(cls, value) 153 | 154 | def __hash__(self): 155 | return super().__hash__() 156 | 157 | def write(self, stream): 158 | stream.write(serialisation.pack_short_string(self)) 159 | 160 | @classmethod 161 | def read(cls, stream): 162 | return cls(serialisation.read_short_string(stream)) 163 | 164 | 165 | class LongStr(str): 166 | def __new__(cls, value): 167 | if len(value) > UnsignedLong.MAX: 168 | raise TypeError('Could not construct a LongStr from value {}'.format(value)) 169 | return super().__new__(cls, value) 170 | 171 | def write(self, stream): 172 | stream.write(serialisation.pack_long_string(self)) 173 | 174 | @classmethod 175 | def read(cls, stream): 176 | return cls(serialisation.read_long_string(stream)) 177 | 178 | 179 | class Table(dict): 180 | def write(self, stream): 181 | stream.write(serialisation.pack_table(self)) 182 | 183 | @classmethod 184 | def read(cls, stream): 185 | return cls(serialisation.read_table(stream)) 186 | 187 | 188 | class Timestamp(datetime.datetime): 189 | def __new__(cls, *args, **kwargs): 190 | if kwargs or len(args) > 1: 191 | return super().__new__(cls, *args, **kwargs) 192 | 193 | value, = args 194 | if isinstance(value, datetime.datetime): 195 | return super().__new__(cls, value.year, value.month, value.day, value.hour, value.minute, value.second) 196 | raise TypeError("Could not construct a timestamp from value {}".format(value)) 197 | 198 | def __eq__(self, other): 199 | return abs(self - other) < datetime.timedelta(milliseconds=1) 200 | 201 | def write(self, stream): 202 | stamp = int(self.timestamp()) 203 | stream.write(serialisation.pack_long_long(stamp)) 204 | 205 | @classmethod 206 | def read(cls, stream): 207 | return cls.fromtimestamp(serialisation.read_long_long(stream)) 208 | 209 | 210 | FIELD_TYPES = { 211 | 'bit': Bit, 212 | 'octet': Octet, 213 | 'short': Short, 214 | 'unsignedshort': UnsignedShort, 215 | 'long': Long, 216 | 'unsignedlong': UnsignedLong, 217 | 'longlong': LongLong, 218 | 'unsignedlonglong': UnsignedLongLong, 219 | 'table': Table, 220 | 'longstr': LongStr, 221 | 'shortstr': ShortStr, 222 | 'timestamp': Timestamp 223 | } 224 | -------------------------------------------------------------------------------- /src/asynqp/connection.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import sys 3 | from . import spec 4 | from . import routing 5 | from . import frames 6 | from .channel import ChannelFactory 7 | from .exceptions import ( 8 | AMQPConnectionError, ConnectionClosed) 9 | from .log import log 10 | 11 | 12 | class Connection(object): 13 | """ 14 | Manage connections to AMQP brokers. 15 | 16 | A :class:`Connection` is a long-lasting mode of communication with a remote server. 17 | Each connection occupies a single TCP connection, and may carry multiple :class:`Channels `. 18 | A connection communicates with a single virtual host on the server; virtual hosts are 19 | sandboxed and may not communicate with one another. 20 | 21 | Applications are advised to use one connection for each AMQP peer it needs to communicate with; 22 | if you need to perform multiple concurrent tasks you should open multiple channels. 23 | 24 | Connections are created using :func:`asynqp.connect() `. 25 | 26 | .. attribute:: transport 27 | 28 | The :class:`~asyncio.BaseTransport` over which the connection is communicating with the server 29 | 30 | .. attribute:: protocol 31 | 32 | The :class:`~asyncio.Protocol` which is paired with the transport 33 | """ 34 | def __init__(self, loop, transport, protocol, synchroniser, sender, dispatcher, connection_info): 35 | self.synchroniser = synchroniser 36 | self.sender = sender 37 | self.channel_factory = ChannelFactory(loop, protocol, dispatcher, connection_info) 38 | self.connection_info = connection_info 39 | 40 | self.transport = transport 41 | self.protocol = protocol 42 | # Indicates, that close was initiated by client 43 | self.closed = asyncio.Future(loop=loop) 44 | # This future is a backport, so we don't need to log pending errors 45 | self.closed.add_done_callback(lambda fut: fut.exception()) 46 | 47 | self._closing = False 48 | 49 | @asyncio.coroutine 50 | def open_channel(self): 51 | """ 52 | Open a new channel on this connection. 53 | 54 | This method is a :ref:`coroutine `. 55 | 56 | :return: The new :class:`Channel` object. 57 | """ 58 | if self._closing: 59 | raise ConnectionClosed("Closed by application") 60 | if self.closed.done(): 61 | raise self.closed.exception() 62 | 63 | channel = yield from self.channel_factory.open() 64 | return channel 65 | 66 | def is_closed(self): 67 | " Returns True if connection was closed " 68 | return self._closing or self.closed.done() 69 | 70 | @asyncio.coroutine 71 | def close(self): 72 | """ 73 | Close the connection by handshaking with the server. 74 | 75 | This method is a :ref:`coroutine `. 76 | """ 77 | if not self.is_closed(): 78 | self._closing = True 79 | # Let the ConnectionActor do the actual close operations. 80 | # It will do the work on CloseOK 81 | self.sender.send_Close( 82 | 0, 'Connection closed by application', 0, 0) 83 | try: 84 | yield from self.synchroniser.wait(spec.ConnectionCloseOK) 85 | except AMQPConnectionError: 86 | # For example if both sides want to close or the connection 87 | # is closed. 88 | pass 89 | else: 90 | if self._closing: 91 | log.warn("Called `close` on already closing connection...") 92 | # finish all pending tasks 93 | yield from self.protocol.heartbeat_monitor.wait_closed() 94 | 95 | 96 | @asyncio.coroutine 97 | def open_connection(loop, transport, protocol, dispatcher, connection_info): 98 | synchroniser = routing.Synchroniser(loop=loop) 99 | 100 | sender = ConnectionMethodSender(protocol) 101 | connection = Connection(loop, transport, protocol, synchroniser, sender, dispatcher, connection_info) 102 | actor = ConnectionActor(synchroniser, sender, protocol, connection, dispatcher, loop=loop) 103 | reader = routing.QueuedReader(actor, loop=loop) 104 | 105 | try: 106 | dispatcher.add_handler(0, reader.feed) 107 | protocol.send_protocol_header() 108 | reader.ready() 109 | 110 | yield from synchroniser.wait(spec.ConnectionStart) 111 | sender.send_StartOK( 112 | {"product": "asynqp", 113 | "version": "0.1", # todo: use pkg_resources to inspect the package 114 | "platform": sys.version, 115 | "capabilities": { 116 | "consumer_cancel_notify": True 117 | }}, 118 | 'AMQPLAIN', 119 | {'LOGIN': connection_info['username'], 'PASSWORD': connection_info['password']}, 120 | 'en_US' 121 | ) 122 | reader.ready() 123 | 124 | frame = yield from synchroniser.wait(spec.ConnectionTune) 125 | # just agree with whatever the server wants. Make this configurable in future 126 | connection_info['frame_max'] = frame.payload.frame_max 127 | heartbeat_interval = frame.payload.heartbeat 128 | sender.send_TuneOK(frame.payload.channel_max, frame.payload.frame_max, heartbeat_interval) 129 | 130 | sender.send_Open(connection_info['virtual_host']) 131 | protocol.start_heartbeat(heartbeat_interval) 132 | reader.ready() 133 | 134 | yield from synchroniser.wait(spec.ConnectionOpenOK) 135 | reader.ready() 136 | except: 137 | dispatcher.remove_handler(0) 138 | raise 139 | return connection 140 | 141 | 142 | class ConnectionActor(routing.Actor): 143 | def __init__(self, synchroniser, sender, protocol, connection, dispatcher, *, loop=None): 144 | super().__init__(synchroniser, sender, loop=loop) 145 | self.protocol = protocol 146 | self.connection = connection 147 | self.dispatcher = dispatcher 148 | 149 | def handle(self, frame): 150 | # From docs on `close`: 151 | # After sending this method, any received methods except Close and 152 | # Close-OK MUST be discarded. 153 | # So we will only process ConnectionClose, ConnectionCloseOK, 154 | # PoisonPillFrame if channel is closed 155 | if self.connection.is_closed(): 156 | close_methods = (spec.ConnectionClose, spec.ConnectionCloseOK) 157 | if isinstance(frame.payload, close_methods) or isinstance(frame, frames.PoisonPillFrame): 158 | return super().handle(frame) 159 | else: 160 | return 161 | return super().handle(frame) 162 | 163 | def handle_ConnectionStart(self, frame): 164 | self.synchroniser.notify(spec.ConnectionStart) 165 | 166 | def handle_ConnectionTune(self, frame): 167 | self.synchroniser.notify(spec.ConnectionTune, frame) 168 | 169 | def handle_ConnectionOpenOK(self, frame): 170 | self.synchroniser.notify(spec.ConnectionOpenOK) 171 | 172 | # Close handlers 173 | 174 | def handle_PoisonPillFrame(self, frame): 175 | """ Is sent in case protocol lost connection to server.""" 176 | # Will be delivered after Close or CloseOK handlers. It's for channels, 177 | # so ignore it. 178 | if self.connection.closed.done(): 179 | return 180 | # If connection was not closed already - we lost connection. 181 | # Protocol should already be closed 182 | self._close_all(frame.exception) 183 | 184 | def handle_ConnectionClose(self, frame): 185 | """ AMQP server closed the channel with an error """ 186 | # Notify server we are OK to close. 187 | self.sender.send_CloseOK() 188 | 189 | exc = ConnectionClosed(frame.payload.reply_text, 190 | frame.payload.reply_code) 191 | self._close_all(exc) 192 | # This will not abort transport, it will try to flush remaining data 193 | # asynchronously, as stated in `asyncio` docs. 194 | self.protocol.close() 195 | 196 | def handle_ConnectionCloseOK(self, frame): 197 | self.synchroniser.notify(spec.ConnectionCloseOK) 198 | exc = ConnectionClosed("Closed by application") 199 | self._close_all(exc) 200 | # We already agread with server on closing, so lets do it right away 201 | self.protocol.close() 202 | 203 | def _close_all(self, exc): 204 | # Make sure all `close` calls don't deadlock 205 | self.connection.closed.set_exception(exc) 206 | # Close heartbeat 207 | self.protocol.heartbeat_monitor.stop() 208 | # If there were anyone who expected an `*-OK` kill them, as no data 209 | # will follow after close 210 | self.synchroniser.killall(exc) 211 | # Notify all channels about error 212 | poison_frame = frames.PoisonPillFrame(exc) 213 | self.dispatcher.dispatch_all(poison_frame) 214 | 215 | 216 | class ConnectionMethodSender(routing.Sender): 217 | def __init__(self, protocol): 218 | super().__init__(0, protocol) 219 | 220 | def send_StartOK(self, client_properties, mechanism, response, locale): 221 | method = spec.ConnectionStartOK(client_properties, mechanism, response, locale) 222 | self.send_method(method) 223 | 224 | def send_TuneOK(self, channel_max, frame_max, heartbeat): 225 | self.send_method(spec.ConnectionTuneOK(channel_max, frame_max, heartbeat)) 226 | 227 | def send_Open(self, virtual_host): 228 | self.send_method(spec.ConnectionOpen(virtual_host, '', False)) 229 | 230 | def send_Close(self, status_code, message, class_id, method_id): 231 | method = spec.ConnectionClose(status_code, message, class_id, method_id) 232 | self.send_method(method) 233 | 234 | def send_CloseOK(self): 235 | self.send_method(spec.ConnectionCloseOK()) 236 | -------------------------------------------------------------------------------- /src/asynqp/exceptions.py: -------------------------------------------------------------------------------- 1 | from ._exceptions import AMQPError, AMQPChannelError 2 | from .spec import EXCEPTIONS, CONSTANTS_INVERSE 3 | 4 | 5 | __all__ = [ 6 | "AMQPError", 7 | "ConnectionLostError", 8 | "ChannelClosed", 9 | "ConnectionClosed", 10 | "AMQPChannelError", 11 | "AMQPConnectionError", 12 | "UndeliverableMessage", 13 | "Deleted" 14 | ] 15 | __all__.extend(EXCEPTIONS.keys()) 16 | 17 | 18 | class AMQPConnectionError(AMQPError): 19 | pass 20 | 21 | 22 | class ConnectionLostError(AMQPConnectionError, ConnectionError): 23 | """ Connection was closed unexpectedly """ 24 | 25 | def __init__(self, message, exc=None): 26 | super().__init__(message) 27 | self.original_exc = exc 28 | 29 | 30 | class ConnectionClosed(AMQPConnectionError): 31 | """ Connection was closed by client """ 32 | 33 | def __init__(self, reply_text, reply_code=None): 34 | super().__init__(reply_text) 35 | self.reply_text = reply_text 36 | self.reply_code = reply_code 37 | 38 | 39 | class ChannelClosed(AMQPChannelError): 40 | """ Channel was closed by client """ 41 | 42 | 43 | class UndeliverableMessage(ValueError): 44 | pass 45 | 46 | 47 | class Deleted(ValueError): 48 | pass 49 | 50 | 51 | class InvalidExchangeName(ValueError): 52 | pass 53 | 54 | 55 | globals().update(EXCEPTIONS) 56 | 57 | 58 | def _get_exception_type(reply_code): 59 | name = CONSTANTS_INVERSE[reply_code] 60 | classname = ''.join([x.capitalize() for x in name.split('_')]) 61 | return EXCEPTIONS[classname] 62 | -------------------------------------------------------------------------------- /src/asynqp/exchange.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from . import spec 3 | 4 | 5 | class Exchange(object): 6 | """ 7 | Manage AMQP Exchanges and publish messages. 8 | 9 | An exchange is a 'routing node' to which messages can be published. 10 | When a message is published to an exchange, the exchange determines which :class:`Queue` 11 | to deliver the message to by inspecting the message's routing key and the exchange's bindings. 12 | You can bind a queue to an exchange, to start receiving messages on the queue, 13 | using :meth:`Queue.bind`. 14 | 15 | Exchanges are created using :meth:`Channel.declare_exchange() `. 16 | 17 | .. attribute:: name 18 | 19 | the name of the exchange. 20 | 21 | .. attribute:: type 22 | 23 | the type of the exchange (usually one of ``'fanout'``, ``'direct'``, ``'topic'``, or ``'headers'``). 24 | """ 25 | def __init__(self, reader, synchroniser, sender, name, type, durable, auto_delete, internal): 26 | self.reader = reader 27 | self.synchroniser = synchroniser 28 | self.sender = sender 29 | self.name = name 30 | self.type = type 31 | self.durable = durable 32 | self.auto_delete = auto_delete 33 | self.internal = internal 34 | 35 | def publish(self, message, routing_key, *, mandatory=True): 36 | """ 37 | Publish a message on the exchange, to be asynchronously delivered to queues. 38 | 39 | :param asynqp.Message message: the message to send 40 | :param str routing_key: the routing key with which to publish the message 41 | :param bool mandatory: if True (the default) undeliverable messages result in an error (see also :meth:`Channel.set_return_handler`) 42 | """ 43 | self.sender.send_BasicPublish(self.name, routing_key, mandatory, message) 44 | 45 | @asyncio.coroutine 46 | def delete(self, *, if_unused=True): 47 | """ 48 | Delete the exchange. 49 | 50 | This method is a :ref:`coroutine `. 51 | 52 | :keyword bool if_unused: If true, the exchange will only be deleted if 53 | it has no queues bound to it. 54 | """ 55 | self.sender.send_ExchangeDelete(self.name, if_unused) 56 | yield from self.synchroniser.wait(spec.ExchangeDeleteOK) 57 | self.reader.ready() 58 | -------------------------------------------------------------------------------- /src/asynqp/frames.py: -------------------------------------------------------------------------------- 1 | from io import BytesIO 2 | from . import spec 3 | from . import serialisation 4 | from . import message 5 | 6 | 7 | def read(frame_type, channel_id, raw_payload): 8 | if frame_type == MethodFrame.frame_type: 9 | method = spec.read_method(raw_payload) 10 | return MethodFrame(channel_id, method) 11 | if frame_type == ContentHeaderFrame.frame_type: 12 | payload = message.ContentHeaderPayload.read(raw_payload) 13 | return ContentHeaderFrame(channel_id, payload) 14 | if frame_type == ContentBodyFrame.frame_type: 15 | return ContentBodyFrame(channel_id, raw_payload) 16 | if frame_type == HeartbeatFrame.frame_type: 17 | return HeartbeatFrame() 18 | raise ValueError("Received an unexpected frame type: " + str(frame_type)) 19 | 20 | 21 | class Frame(object): 22 | def __init__(self, channel_id, payload): 23 | self.channel_id = channel_id 24 | self.payload = payload 25 | 26 | def serialise(self): 27 | frame = serialisation.pack_octet(self.frame_type) 28 | frame += serialisation.pack_short(self.channel_id) 29 | 30 | if isinstance(self.payload, bytes): 31 | body = self.payload 32 | else: 33 | bytesio = BytesIO() 34 | self.payload.write(bytesio) 35 | body = bytesio.getvalue() 36 | 37 | frame += serialisation.pack_long(len(body)) + body 38 | frame += serialisation.pack_octet(spec.FRAME_END) 39 | 40 | return frame 41 | 42 | def __eq__(self, other): 43 | return (self.frame_type == other.frame_type 44 | and self.channel_id == other.channel_id 45 | and self.payload == other.payload) 46 | 47 | 48 | class MethodFrame(Frame): 49 | frame_type = spec.FRAME_METHOD 50 | 51 | 52 | class ContentHeaderFrame(Frame): 53 | frame_type = spec.FRAME_HEADER 54 | 55 | 56 | class ContentBodyFrame(Frame): 57 | frame_type = spec.FRAME_BODY 58 | 59 | 60 | class HeartbeatFrame(Frame): 61 | frame_type = spec.FRAME_HEARTBEAT 62 | channel_id = 0 63 | payload = b'' 64 | 65 | def __init__(self): 66 | pass 67 | 68 | 69 | class PoisonPillFrame(Frame): 70 | channel_id = 0 71 | payload = b'' 72 | 73 | def __init__(self, exception): 74 | self.exception = exception 75 | -------------------------------------------------------------------------------- /src/asynqp/log.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | log = logging.getLogger("asynqp") 4 | -------------------------------------------------------------------------------- /src/asynqp/message.py: -------------------------------------------------------------------------------- 1 | import json 2 | from collections import OrderedDict 3 | from datetime import datetime 4 | from io import BytesIO 5 | from . import amqptypes 6 | from . import serialisation 7 | 8 | 9 | class Message(object): 10 | """ 11 | An AMQP Basic message. 12 | 13 | Some of the constructor parameters are ignored by the AMQP broker and are provided 14 | just for the convenience of user applications. They are marked "for applications" 15 | in the list below. 16 | 17 | :param body: :func:`bytes` , :class:`str` or :class:`dict` representing the body of the message. 18 | Strings will be encoded according to the content_encoding parameter; 19 | dicts will be converted to a string using JSON. 20 | :param dict headers: a dictionary of message headers 21 | :param str content_type: MIME content type 22 | (defaults to 'application/json' if :code:`body` is a :class:`dict`, 23 | or 'application/octet-stream' otherwise) 24 | :param str content_encoding: MIME encoding (defaults to 'utf-8') 25 | :param int delivery_mode: 1 for non-persistent, 2 for persistent 26 | :param int priority: message priority - integer between 0 and 9 27 | :param str correlation_id: correlation id of the message *(for applications)* 28 | :param str reply_to: reply-to address *(for applications)* 29 | :param str expiration: expiration specification *(for applications)* 30 | :param str message_id: unique id of the message *(for applications)* 31 | :param datetime.datetime timestamp: :class:`~datetime.datetime` of when the message was sent 32 | (default: :meth:`datetime.now() `) 33 | :param str type: message type *(for applications)* 34 | :param str user_id: ID of the user sending the message *(for applications)* 35 | :param str app_id: ID of the application sending the message *(for applications)* 36 | 37 | Attributes are the same as the constructor parameters. 38 | """ 39 | property_types = OrderedDict( 40 | [("content_type", amqptypes.ShortStr), 41 | ("content_encoding", amqptypes.ShortStr), 42 | ("headers", amqptypes.Table), 43 | ("delivery_mode", amqptypes.Octet), 44 | ("priority", amqptypes.Octet), 45 | ("correlation_id", amqptypes.ShortStr), 46 | ("reply_to", amqptypes.ShortStr), 47 | ("expiration", amqptypes.ShortStr), 48 | ("message_id", amqptypes.ShortStr), 49 | ("timestamp", amqptypes.Timestamp), 50 | ("type", amqptypes.ShortStr), 51 | ("user_id", amqptypes.ShortStr), 52 | ("app_id", amqptypes.ShortStr)] 53 | ) 54 | 55 | def __init__(self, body, *, 56 | headers=None, content_type=None, 57 | content_encoding=None, delivery_mode=None, 58 | priority=None, correlation_id=None, 59 | reply_to=None, expiration=None, 60 | message_id=None, timestamp=None, 61 | type=None, user_id=None, 62 | app_id=None): 63 | if content_encoding is None: 64 | content_encoding = 'utf-8' 65 | 66 | if isinstance(body, dict): 67 | body = json.dumps(body) 68 | if content_type is None: 69 | content_type = 'application/json' 70 | elif content_type is None: 71 | content_type = 'application/octet-stream' 72 | 73 | if isinstance(body, bytes): 74 | self.body = body 75 | else: 76 | self.body = body.encode(content_encoding) 77 | 78 | timestamp = timestamp if timestamp is not None else datetime.now() 79 | 80 | self._properties = OrderedDict() 81 | for name, amqptype in self.property_types.items(): 82 | value = locals()[name] 83 | if value is not None: 84 | value = amqptype(value) 85 | self._properties[name] = value 86 | 87 | def __eq__(self, other): 88 | return (self.body == other.body 89 | and self._properties == other._properties) 90 | 91 | def __getattr__(self, name): 92 | try: 93 | return self._properties[name] 94 | except KeyError as e: 95 | raise AttributeError from e 96 | 97 | def __setattr__(self, name, value): 98 | amqptype = self.property_types.get(name) 99 | if amqptype is not None: 100 | self._properties[name] = value if isinstance(value, amqptype) else amqptype(value) 101 | return 102 | super().__setattr__(name, value) 103 | 104 | def json(self): 105 | """ 106 | Parse the message body as JSON. 107 | 108 | :return: the parsed JSON. 109 | """ 110 | return json.loads(self.body.decode(self.content_encoding)) 111 | 112 | 113 | class IncomingMessage(Message): 114 | """ 115 | A message that has been delivered to the client. 116 | 117 | Subclass of :class:`Message`. 118 | 119 | .. attribute::delivery_tag 120 | 121 | The *delivery tag* assigned to this message by the AMQP broker. 122 | 123 | .. attribute::exchange_name 124 | 125 | The name of the exchange to which the message was originally published. 126 | 127 | .. attribute::routing_key 128 | 129 | The routing key under which the message was originally published. 130 | """ 131 | def __init__(self, *args, sender, delivery_tag, exchange_name, routing_key, **kwargs): 132 | super().__init__(*args, **kwargs) 133 | self.sender = sender 134 | self.delivery_tag = delivery_tag 135 | self.exchange_name = exchange_name 136 | self.routing_key = routing_key 137 | 138 | def ack(self): 139 | """ 140 | Acknowledge the message. 141 | """ 142 | self.sender.send_BasicAck(self.delivery_tag) 143 | 144 | def reject(self, *, requeue=True): 145 | """ 146 | Reject the message. 147 | 148 | :keyword bool requeue: if true, the broker will attempt to requeue the 149 | message and deliver it to an alternate consumer. 150 | """ 151 | self.sender.send_BasicReject(self.delivery_tag, requeue) 152 | 153 | 154 | def get_header_payload(message, class_id): 155 | return ContentHeaderPayload(class_id, len(message.body), list(message._properties.values())) 156 | 157 | 158 | # NB: the total frame size will be 8 bytes larger than frame_body_size 159 | def get_frame_payloads(message, frame_body_size): 160 | frames = [] 161 | remaining = message.body 162 | while remaining: 163 | frame = remaining[:frame_body_size] 164 | remaining = remaining[frame_body_size:] 165 | frames.append(frame) 166 | return frames 167 | 168 | 169 | class ContentHeaderPayload(object): 170 | synchronous = True 171 | 172 | def __init__(self, class_id, body_length, properties): 173 | self.class_id = class_id 174 | self.body_length = body_length 175 | self.properties = properties 176 | 177 | def __eq__(self, other): 178 | return (self.class_id == other.class_id 179 | and self.body_length == other.body_length 180 | and self.properties == other.properties) 181 | 182 | def write(self, stream): 183 | stream.write(serialisation.pack_unsigned_short(self.class_id)) 184 | stream.write(serialisation.pack_unsigned_short(0)) # weight 185 | stream.write(serialisation.pack_unsigned_long_long(self.body_length)) 186 | 187 | bytesio = BytesIO() 188 | 189 | property_flags = 0 190 | bitshift = 15 191 | 192 | for val in self.properties: 193 | if val is not None: 194 | property_flags |= (1 << bitshift) 195 | val.write(bytesio) 196 | bitshift -= 1 197 | 198 | stream.write(serialisation.pack_unsigned_short(property_flags)) 199 | stream.write(bytesio.getvalue()) 200 | 201 | @classmethod 202 | def read(cls, raw): 203 | bytesio = BytesIO(raw) 204 | class_id = serialisation.read_unsigned_short(bytesio) 205 | weight = serialisation.read_unsigned_short(bytesio) 206 | assert weight == 0 207 | body_length = serialisation.read_unsigned_long_long(bytesio) 208 | property_flags_short = serialisation.read_unsigned_short(bytesio) 209 | 210 | properties = [] 211 | 212 | for i, amqptype in enumerate(Message.property_types.values()): 213 | pos = 15 - i # We started from `content_type` witch has pos==15 214 | if property_flags_short & (1 << pos): 215 | properties.append(amqptype.read(bytesio)) 216 | else: 217 | properties.append(None) 218 | 219 | return cls(class_id, body_length, properties) 220 | 221 | def __repr__(self): 222 | return "".format( 223 | self.class_id, self.body_length, self.properties) 224 | 225 | 226 | class MessageBuilder(object): 227 | def __init__(self, sender, delivery_tag, redelivered, exchange_name, routing_key, consumer_tag=None): 228 | self.sender = sender 229 | self.delivery_tag = delivery_tag 230 | self.body = b'' 231 | self.consumer_tag = consumer_tag 232 | self.exchange_name = exchange_name 233 | self.routing_key = routing_key 234 | 235 | def set_header(self, header): 236 | self.body_length = header.body_length 237 | self.properties = {} 238 | for name, prop in zip(IncomingMessage.property_types, header.properties): 239 | self.properties[name] = prop 240 | 241 | def add_body_chunk(self, chunk): 242 | self.body += chunk 243 | 244 | def done(self): 245 | return len(self.body) == self.body_length 246 | 247 | def build(self): 248 | return IncomingMessage( 249 | self.body, 250 | sender=self.sender, 251 | delivery_tag=self.delivery_tag, 252 | exchange_name=self.exchange_name, 253 | routing_key=self.routing_key, 254 | **self.properties) 255 | -------------------------------------------------------------------------------- /src/asynqp/protocol.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import struct 3 | from contextlib import suppress 4 | from . import spec 5 | from . import frames 6 | from .exceptions import AMQPError, ConnectionLostError 7 | from .log import log 8 | 9 | 10 | class AMQP(asyncio.Protocol): 11 | def __init__(self, dispatcher, loop, close_callback=None): 12 | self.dispatcher = dispatcher 13 | self.partial_frame = b'' 14 | self.frame_reader = FrameReader() 15 | self.heartbeat_monitor = HeartbeatMonitor(self, loop) 16 | self._closed = False 17 | self._close_callback = close_callback 18 | 19 | def connection_made(self, transport): 20 | self.transport = transport 21 | 22 | def data_received(self, data): 23 | while data: 24 | self.heartbeat_monitor.heartbeat_received() # the spec says 'any octet may substitute for a heartbeat' 25 | 26 | try: 27 | result = self.frame_reader.read_frame(data) 28 | except AMQPError: 29 | self.close() 30 | raise 31 | 32 | if result is None: # incomplete frame, wait for the rest 33 | return 34 | frame, remainder = result 35 | 36 | self.dispatcher.dispatch(frame) 37 | data = remainder 38 | 39 | def send_method(self, channel, method): 40 | frame = frames.MethodFrame(channel, method) 41 | self.send_frame(frame) 42 | 43 | def send_frame(self, frame): 44 | self.transport.write(frame.serialise()) 45 | 46 | def send_protocol_header(self): 47 | self.transport.write(b'AMQP\x00\x00\x09\x01') 48 | 49 | def start_heartbeat(self, heartbeat_interval): 50 | self.heartbeat_monitor.start(heartbeat_interval) 51 | 52 | def connection_lost(self, exc): 53 | # If self._closed=True - we closed the transport ourselves. No need to 54 | # dispatch PoisonPillFrame, as we should have closed everything already 55 | if self._close_callback: 56 | # _close_callback now only accepts coroutines 57 | asyncio.ensure_future(self._close_callback(exc)) 58 | 59 | if not self._closed: 60 | poison_exc = ConnectionLostError( 61 | 'The connection was unexpectedly lost', exc) 62 | self.dispatcher.dispatch_all(frames.PoisonPillFrame(poison_exc)) 63 | # XXX: Really do we even need to raise this??? It's super bad API 64 | raise poison_exc from exc 65 | 66 | def heartbeat_timeout(self): 67 | """ Called by heartbeat_monitor on timeout """ 68 | assert not self._closed, "Did we not stop heartbeat_monitor on close?" 69 | log.error("Heartbeat time out") 70 | poison_exc = ConnectionLostError('Heartbeat timed out') 71 | poison_frame = frames.PoisonPillFrame(poison_exc) 72 | self.dispatcher.dispatch_all(poison_frame) 73 | # Spec says to just close socket without ConnectionClose handshake. 74 | self.close() 75 | 76 | def close(self): 77 | assert not self._closed, "Why do we close it 2-ce?" 78 | self._closed = True 79 | self.transport.close() 80 | 81 | 82 | class FrameReader(object): 83 | def __init__(self): 84 | self.partial_frame = b'' 85 | 86 | def read_frame(self, data): 87 | data = self.partial_frame + data 88 | self.partial_frame = b'' 89 | 90 | if len(data) < 7: 91 | self.partial_frame = data 92 | return 93 | 94 | frame_header = data[:7] 95 | frame_type, channel_id, size = struct.unpack('!BHL', frame_header) 96 | 97 | if len(data) < size + 8: 98 | self.partial_frame = data 99 | return 100 | 101 | raw_payload = data[7:7 + size] 102 | frame_end = data[7 + size] 103 | 104 | if frame_end != spec.FRAME_END: 105 | raise AMQPError("Frame end byte was incorrect") 106 | 107 | frame = frames.read(frame_type, channel_id, raw_payload) 108 | remainder = data[8 + size:] 109 | 110 | return frame, remainder 111 | 112 | 113 | class HeartbeatMonitor(object): 114 | def __init__(self, protocol, loop): 115 | self.protocol = protocol 116 | self.loop = loop 117 | self.send_hb_task = None 118 | self.monitor_task = None 119 | self._last_received = 0 120 | 121 | def start(self, interval): 122 | if interval <= 0: 123 | return 124 | self.send_hb_task = asyncio.ensure_future(self.send_heartbeat(interval), loop=self.loop) 125 | self.monitor_task = asyncio.ensure_future(self.monitor_heartbeat(interval), loop=self.loop) 126 | 127 | def stop(self): 128 | if self.send_hb_task is not None: 129 | self.send_hb_task.cancel() 130 | if self.monitor_task is not None: 131 | self.monitor_task.cancel() 132 | 133 | @asyncio.coroutine 134 | def wait_closed(self): 135 | if self.send_hb_task is not None: 136 | with suppress(asyncio.CancelledError): 137 | yield from self.send_hb_task 138 | if self.monitor_task is not None: 139 | with suppress(asyncio.CancelledError): 140 | yield from self.monitor_task 141 | 142 | @asyncio.coroutine 143 | def send_heartbeat(self, interval): 144 | # XXX: Add `last_sent` frame monitoring to not send heartbeats 145 | # if traffic was going through socket 146 | while True: 147 | self.protocol.send_frame(frames.HeartbeatFrame()) 148 | yield from asyncio.sleep(interval, loop=self.loop) 149 | 150 | @asyncio.coroutine 151 | def monitor_heartbeat(self, interval): 152 | self._last_received = self.loop.time() 153 | no_beat_for = 0 154 | while True: 155 | # As spec states: 156 | # If a peer detects no incoming traffic (i.e. received octets) for 157 | # two heartbeat intervals or longer, it should close the connection 158 | yield from asyncio.sleep( 159 | interval * 2 - no_beat_for, loop=self.loop) 160 | 161 | no_beat_for = self.loop.time() - self._last_received 162 | if no_beat_for > interval * 2: 163 | self.protocol.heartbeat_timeout() 164 | self.send_hb_task.cancel() 165 | return 166 | 167 | def heartbeat_received(self): 168 | self._last_received = self.loop.time() 169 | -------------------------------------------------------------------------------- /src/asynqp/routing.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import collections 3 | from . import frames 4 | from .log import log 5 | 6 | 7 | class Dispatcher(object): 8 | def __init__(self): 9 | self.handlers = {} 10 | 11 | def add_handler(self, channel_id, handler): 12 | self.handlers[channel_id] = handler 13 | 14 | def remove_handler(self, channel_id): 15 | del self.handlers[channel_id] 16 | 17 | def dispatch(self, frame): 18 | if isinstance(frame, frames.HeartbeatFrame): 19 | return 20 | handler = self.handlers[frame.channel_id] 21 | handler(frame) 22 | 23 | def dispatch_all(self, frame): 24 | for handler in self.handlers.values(): 25 | handler(frame) 26 | 27 | 28 | class Sender(object): 29 | def __init__(self, channel_id, protocol): 30 | self.channel_id = channel_id 31 | self.protocol = protocol 32 | 33 | def send_method(self, method): 34 | self.protocol.send_method(self.channel_id, method) 35 | 36 | 37 | class Actor(object): 38 | def __init__(self, synchroniser, sender, *, loop): 39 | self._loop = loop 40 | self.synchroniser = synchroniser 41 | self.sender = sender 42 | 43 | def handle(self, frame): 44 | try: 45 | meth = getattr(self, 'handle_' + type(frame).__name__) 46 | except AttributeError: 47 | meth = getattr(self, 'handle_' + type(frame.payload).__name__) 48 | 49 | meth(frame) 50 | 51 | 52 | class Synchroniser(object): 53 | 54 | def __init__(self, *, loop): 55 | self._loop = loop 56 | self._futures = collections.defaultdict(collections.deque) 57 | self.connection_exc = None 58 | 59 | def wait(self, *expected_methods): 60 | fut = asyncio.Future(loop=self._loop) 61 | 62 | if self.connection_exc is not None: 63 | fut.set_exception(self.connection_exc) 64 | return fut 65 | 66 | for method in expected_methods: 67 | self._futures[method].append(fut) 68 | return fut 69 | 70 | def notify(self, method, result=None): 71 | while True: 72 | try: 73 | fut = self._futures[method].popleft() 74 | except IndexError: 75 | # XXX: we can't just ignore this. 76 | log.error("Got an unexpected method notification %s", method) 77 | return 78 | # We can have done futures if they were awaited together, like 79 | # (spec.BasicGetOK, spec.BasicGetEmpty). 80 | if not fut.done(): 81 | break 82 | 83 | fut.set_result(result) 84 | 85 | def killall(self, exc): 86 | """ Connection/Channel was closed. All subsequent and ongoing requests 87 | should raise an error 88 | """ 89 | self.connection_exc = exc 90 | # Set an exception for all others 91 | for method, futs in self._futures.items(): 92 | for fut in futs: 93 | if fut.done(): 94 | continue 95 | fut.set_exception(exc) 96 | self._futures.clear() 97 | 98 | 99 | # When ready() is called, wait for a frame to arrive on the queue. 100 | # When the frame does arrive, dispatch it to the handler and do nothing 101 | # until someone calls ready() again. 102 | class QueuedReader(object): 103 | def __init__(self, handler, *, loop): 104 | self.handler = handler 105 | self.is_waiting = False 106 | self.pending_frames = collections.deque() 107 | self._loop = loop 108 | 109 | def ready(self): 110 | assert not self.is_waiting, "ready() got called while waiting for a frame to be read" 111 | if self.pending_frames: 112 | frame = self.pending_frames.popleft() 113 | # We will call it in another tick just to be more strict about the 114 | # sequence of frames 115 | self._loop.call_soon(self.handler.handle, frame) 116 | else: 117 | self.is_waiting = True 118 | 119 | def feed(self, frame): 120 | if self.is_waiting: 121 | self.is_waiting = False 122 | # We will call it in another tick just to be more strict about the 123 | # sequence of frames 124 | self._loop.call_soon(self.handler.handle, frame) 125 | else: 126 | self.pending_frames.append(frame) 127 | -------------------------------------------------------------------------------- /src/asynqp/serialisation.py: -------------------------------------------------------------------------------- 1 | import struct 2 | from .exceptions import AMQPError 3 | from datetime import datetime, timezone 4 | 5 | 6 | def rethrow_as(expected_cls, to_throw): 7 | def decorator(f): 8 | def wrapper(*args, **kwargs): 9 | try: 10 | return f(*args, **kwargs) 11 | except expected_cls as e: 12 | raise to_throw from e 13 | return wrapper 14 | return decorator 15 | 16 | 17 | ########################################################### 18 | # Deserialisation 19 | ########################################################### 20 | 21 | 22 | @rethrow_as(struct.error, AMQPError('failed to read an octet')) 23 | def read_octet(stream): 24 | return _read_octet(stream)[0] 25 | 26 | 27 | @rethrow_as(struct.error, AMQPError('failed to read a short')) 28 | def read_short(stream): 29 | return _read_short(stream)[0] 30 | 31 | 32 | @rethrow_as(struct.error, AMQPError('failed to read an unsigned short')) 33 | def read_unsigned_short(stream): 34 | return _read_unsigned_short(stream)[0] 35 | 36 | 37 | @rethrow_as(struct.error, AMQPError('failed to read a long')) 38 | def read_long(stream): 39 | return _read_long(stream)[0] 40 | 41 | 42 | @rethrow_as(struct.error, AMQPError('failed to read an unsigned long')) 43 | def read_unsigned_long(stream): 44 | return _read_unsigned_long(stream)[0] 45 | 46 | 47 | @rethrow_as(struct.error, AMQPError('failed to read a long long')) 48 | def read_long_long(stream): 49 | return _read_long_long(stream)[0] 50 | 51 | 52 | @rethrow_as(struct.error, AMQPError('failed to read an unsigned long long')) 53 | def read_unsigned_long_long(stream): 54 | return _read_unsigned_long_long(stream)[0] 55 | 56 | 57 | @rethrow_as(struct.error, AMQPError('failed to read a short string')) 58 | def read_short_string(stream): 59 | return _read_short_string(stream)[0] 60 | 61 | 62 | @rethrow_as(struct.error, AMQPError('failed to read a long string')) 63 | def read_long_string(stream): 64 | return _read_long_string(stream)[0] 65 | 66 | 67 | @rethrow_as(KeyError, AMQPError('failed to read a table')) 68 | @rethrow_as(struct.error, AMQPError('failed to read a table')) 69 | def read_table(stream): 70 | return _read_table(stream)[0] 71 | 72 | 73 | @rethrow_as(KeyError, AMQPError('failed to read an array')) 74 | @rethrow_as(struct.error, AMQPError('failed to read an array')) 75 | def read_array(stream): 76 | return _read_array(stream)[0] 77 | 78 | 79 | @rethrow_as(struct.error, AMQPError('failed to read a boolean')) 80 | def read_bool(stream): 81 | return _read_bool(stream)[0] 82 | 83 | 84 | @rethrow_as(struct.error, AMQPError('failed to read a boolean')) 85 | def read_bools(byte, number_of_bools): 86 | bits = "{0:b}".format(byte) 87 | bits = "0" * (number_of_bools - len(bits)) + bits 88 | return (b == "1" for b in reversed(bits)) 89 | 90 | 91 | @rethrow_as(struct.error, AMQPError('failed to read a boolean')) 92 | def read_timestamp(stream): 93 | return _read_timestamp(stream)[0] 94 | 95 | 96 | def qpid_rabbit_mq_table(): 97 | # TODO: fix amqp 0.9.1 compatibility 98 | # TODO: Add missing types 99 | TABLE_VALUE_PARSERS = { 100 | b't': _read_bool, 101 | b'b': _read_signed_byte, 102 | b's': _read_short, 103 | b'I': _read_long, 104 | b'l': _read_long_long, 105 | b'f': _read_float, 106 | b'S': _read_long_string, 107 | b'A': _read_array, 108 | b'V': _read_void, 109 | b'x': _read_byte_array, 110 | b'F': _read_table, 111 | b'T': _read_timestamp 112 | } 113 | return TABLE_VALUE_PARSERS 114 | 115 | 116 | def _read_table(stream): 117 | TABLE_VALUE_PARSERS = qpid_rabbit_mq_table() 118 | table = {} 119 | 120 | table_length, initial_long_size = _read_unsigned_long(stream) 121 | consumed = initial_long_size 122 | 123 | while consumed < table_length + initial_long_size: 124 | key, x = _read_short_string(stream) 125 | consumed += x 126 | 127 | value_type_code = stream.read(1) 128 | consumed += 1 129 | 130 | value, x = TABLE_VALUE_PARSERS[value_type_code](stream) 131 | consumed += x 132 | 133 | table[key] = value 134 | 135 | return table, consumed 136 | 137 | 138 | def _read_short_string(stream): 139 | str_length, x = _read_octet(stream) 140 | string = stream.read(str_length).decode('utf-8') 141 | return string, x + str_length 142 | 143 | 144 | def _read_long_string(stream): 145 | str_length, x = _read_unsigned_long(stream) 146 | buffer = stream.read(str_length) 147 | if len(buffer) != str_length: 148 | raise AMQPError("Long string had incorrect length") 149 | return buffer.decode('utf-8'), x + str_length 150 | 151 | 152 | def _read_octet(stream): 153 | x, = struct.unpack('!B', stream.read(1)) 154 | return x, 1 155 | 156 | 157 | def _read_signed_byte(stream): 158 | x, = struct.unpack_from('!b', stream.read(1)) 159 | return x, 1 160 | 161 | 162 | def _read_bool(stream): 163 | x, = struct.unpack('!?', stream.read(1)) 164 | return x, 1 165 | 166 | 167 | def _read_short(stream): 168 | x, = struct.unpack('!h', stream.read(2)) 169 | return x, 2 170 | 171 | 172 | def _read_unsigned_short(stream): 173 | x, = struct.unpack('!H', stream.read(2)) 174 | return x, 2 175 | 176 | 177 | def _read_long(stream): 178 | x, = struct.unpack('!l', stream.read(4)) 179 | return x, 4 180 | 181 | 182 | def _read_unsigned_long(stream): 183 | x, = struct.unpack('!L', stream.read(4)) 184 | return x, 4 185 | 186 | 187 | def _read_long_long(stream): 188 | x, = struct.unpack('!q', stream.read(8)) 189 | return x, 8 190 | 191 | 192 | def _read_unsigned_long_long(stream): 193 | x, = struct.unpack('!Q', stream.read(8)) 194 | return x, 8 195 | 196 | 197 | def _read_float(stream): 198 | x, = struct.unpack('!f', stream.read(4)) 199 | return x, 4 200 | 201 | 202 | def _read_timestamp(stream): 203 | x, = struct.unpack('!Q', stream.read(8)) 204 | # From datetime.fromutctimestamp converts it to a local timestamp without timezone information 205 | return datetime.fromtimestamp(x * 1e-3, timezone.utc), 8 206 | 207 | 208 | def _read_array(stream): 209 | TABLE_VALUE_PARSERS = qpid_rabbit_mq_table() 210 | field_array = [] 211 | 212 | # The standard says only long, but unsigned long seems sensible 213 | array_length, initial_long_size = _read_unsigned_long(stream) 214 | consumed = initial_long_size 215 | 216 | while consumed < array_length + initial_long_size: 217 | value_type_code = stream.read(1) 218 | consumed += 1 219 | value, x = TABLE_VALUE_PARSERS[value_type_code](stream) 220 | consumed += x 221 | field_array.append(value) 222 | 223 | return field_array, consumed 224 | 225 | 226 | def _read_void(stream): 227 | return None, 0 228 | 229 | 230 | def _read_byte_array(stream): 231 | byte_array_length, x = _read_unsigned_long(stream) 232 | return stream.read(byte_array_length), byte_array_length + x 233 | 234 | 235 | ########################################################### 236 | # Serialisation 237 | ########################################################### 238 | 239 | def pack_short_string(string): 240 | buffer = string.encode('utf-8') 241 | return pack_octet(len(buffer)) + buffer 242 | 243 | 244 | def pack_long_string(string): 245 | buffer = string.encode('utf-8') 246 | return pack_unsigned_long(len(buffer)) + buffer 247 | 248 | 249 | def pack_field_value(value): 250 | if value is None: 251 | return b'V' 252 | if isinstance(value, bool): 253 | return b't' + pack_bool(value) 254 | if isinstance(value, dict): 255 | return b'F' + pack_table(value) 256 | if isinstance(value, list): 257 | return b'A' + pack_array(value) 258 | if isinstance(value, bytes): 259 | return b'x' + pack_byte_array(value) 260 | if isinstance(value, str): 261 | return b'S' + pack_long_string(value) 262 | if isinstance(value, datetime): 263 | return b'T' + pack_timestamp(value) 264 | if isinstance(value, int): 265 | if value.bit_length() < 8: 266 | return b'b' + pack_signed_byte(value) 267 | if value.bit_length() < 32: 268 | return b'I' + pack_long(value) 269 | if isinstance(value, float): 270 | return b'f' + pack_float(value) 271 | raise NotImplementedError() 272 | 273 | 274 | def pack_table(d): 275 | buffer = b'' 276 | for key, value in d.items(): 277 | buffer += pack_short_string(key) 278 | # todo: more values 279 | buffer += pack_field_value(value) 280 | 281 | return pack_unsigned_long(len(buffer)) + buffer 282 | 283 | 284 | def pack_octet(number): 285 | return struct.pack('!B', number) 286 | 287 | 288 | def pack_signed_byte(number): 289 | return struct.pack('!b', number) 290 | 291 | 292 | def pack_unsigned_byte(number): 293 | return struct.pack('!B', number) 294 | 295 | 296 | def pack_short(number): 297 | return struct.pack('!h', number) 298 | 299 | 300 | def pack_unsigned_short(number): 301 | return struct.pack('!H', number) 302 | 303 | 304 | def pack_long(number): 305 | return struct.pack('!l', number) 306 | 307 | 308 | def pack_unsigned_long(number): 309 | return struct.pack('!L', number) 310 | 311 | 312 | def pack_long_long(number): 313 | return struct.pack('!q', number) 314 | 315 | 316 | def pack_unsigned_long_long(number): 317 | return struct.pack('!Q', number) 318 | 319 | 320 | def pack_float(number): 321 | return struct.pack('!f', number) 322 | 323 | 324 | def pack_bool(b): 325 | return struct.pack('!?', b) 326 | 327 | 328 | def pack_timestamp(timeval): 329 | number = int(timeval.timestamp() * 1e3) 330 | return struct.pack('!Q', number) 331 | 332 | 333 | def pack_byte_array(value): 334 | buffer = pack_unsigned_long(len(value)) 335 | buffer += value 336 | return buffer 337 | 338 | 339 | def pack_array(items): 340 | buffer = b'' 341 | for value in items: 342 | buffer += pack_field_value(value) 343 | 344 | return pack_unsigned_long(len(buffer)) + buffer 345 | 346 | 347 | def pack_bools(*bs): 348 | tot = 0 349 | for n, b in enumerate(bs): 350 | x = 1 if b else 0 351 | tot += (x << n) 352 | return pack_octet(tot) 353 | -------------------------------------------------------------------------------- /src/asynqp/spec.py: -------------------------------------------------------------------------------- 1 | import struct 2 | from collections import OrderedDict 3 | from io import BytesIO 4 | from xml.etree import ElementTree 5 | import pkg_resources 6 | from . import amqptypes 7 | from . import serialisation 8 | from .amqptypes import FIELD_TYPES 9 | from ._exceptions import AMQPError 10 | 11 | 12 | def read_method(raw): 13 | stream = BytesIO(raw) 14 | method_type_code = struct.unpack('!HH', raw[0:4]) 15 | return METHODS[method_type_code].read(stream) 16 | 17 | 18 | class Method: 19 | def __init__(self, *args): 20 | self.fields = OrderedDict() 21 | 22 | if len(args) != len(self.field_info): 23 | raise TypeError('__init__ takes {} arguments but {} were given'.format(len(self.field_info), len(args))) 24 | 25 | for (fieldname, fieldcls), value in zip(self.field_info.items(), args): 26 | self.fields[fieldname] = fieldcls(value) 27 | 28 | @classmethod 29 | def read(cls, stream): 30 | method_type = struct.unpack('!HH', stream.read(4)) 31 | assert method_type == cls.method_type, "How did this happen? Wrong method type for {}: {}".format(cls.__name__, method_type) 32 | 33 | args = [] 34 | number_of_bits = 0 35 | for fieldcls in cls.field_info.values(): 36 | if fieldcls is FIELD_TYPES['bit']: 37 | number_of_bits += 1 38 | continue 39 | elif number_of_bits: # if we have some bools but this next field is not a bool 40 | val = ord(stream.read(1)) 41 | args.extend(serialisation.read_bools(val, number_of_bits)) 42 | number_of_bits = 0 43 | 44 | args.append(fieldcls.read(stream)) 45 | 46 | if number_of_bits: # if there were some bools right at the end 47 | val = ord(stream.read(1)) 48 | args.extend(serialisation.read_bools(val, number_of_bits)) 49 | number_of_bits = 0 50 | 51 | return cls(*args) 52 | 53 | def write(self, stream): 54 | stream.write(struct.pack('!HH', *self.method_type)) 55 | bits = [] 56 | for val in self.fields.values(): 57 | if isinstance(val, amqptypes.Bit): 58 | bits.append(val.value) 59 | else: 60 | if bits: 61 | stream.write(serialisation.pack_bools(*bits)) 62 | bits = [] 63 | val.write(stream) 64 | 65 | if bits: 66 | stream.write(serialisation.pack_bools(*bits)) 67 | 68 | def __getattr__(self, name): 69 | try: 70 | return self.fields[name] 71 | except KeyError as e: 72 | raise AttributeError('{} object has no attribute {}'.format(type(self).__name__, name)) from e 73 | 74 | def __eq__(self, other): 75 | return (type(self) == type(other) 76 | and self.fields == other.fields) 77 | 78 | 79 | # Here, we load up the AMQP XML spec, traverse it, 80 | # and generate serialisable DTO classes (subclasses of Method, above) 81 | # based on the definitions in the XML file. 82 | # It's one of those things that works like magic until it doesn't. 83 | # Compare amqp0-9-1.xml to the classes that this module exports and you'll see what's going on 84 | def load_spec(): 85 | tree = parse_tree() 86 | classes = get_classes(tree) 87 | return generate_methods(classes), get_constants(tree) 88 | 89 | 90 | def parse_tree(): 91 | filename = pkg_resources.resource_filename(__name__, 'amqp0-9-1.xml') 92 | return ElementTree.parse(filename) 93 | 94 | 95 | def get_classes(tree): 96 | domain_types = {e.attrib['name']: e.attrib['type'] for e in tree.findall('domain')} 97 | 98 | classes = {} 99 | for class_elem in tree.findall('class'): 100 | class_id = class_elem.attrib['index'] 101 | 102 | class_methods = {} 103 | for method in class_elem.findall('method'): 104 | method_id = method.attrib['index'] 105 | 106 | fields = OrderedDict() 107 | for elem in method.findall('field'): 108 | fieldname = elem.attrib['name'].replace('-', '_') 109 | try: 110 | fieldtype = elem.attrib['type'] 111 | except KeyError: 112 | fieldtype = domain_types[elem.attrib['domain']] 113 | cls = FIELD_TYPES[fieldtype] 114 | fields[fieldname] = cls 115 | 116 | method_support = {} 117 | for elem in method.findall('chassis'): 118 | method_support[elem.attrib['name']] = elem.attrib['implement'] 119 | 120 | doc = build_docstring(method, fields) 121 | 122 | synchronous = 'synchronous' in method.attrib 123 | 124 | method_name = method.attrib['name'].capitalize().replace('-ok', 'OK').replace('-empty', 'Empty') 125 | class_methods[method_name] = (int(method_id), fields, method_support, synchronous, doc) 126 | 127 | classes[class_elem.attrib['name'].capitalize()] = (int(class_id), class_methods) 128 | 129 | return classes 130 | 131 | 132 | def build_docstring(method_elem, fields): 133 | doc = '\n'.join([line.strip() for line in method_elem.find('doc').text.splitlines()]).strip() 134 | doc += '\n\nArguments:\n ' 135 | doc += '\n '.join([n + ': ' + t.__name__ for n, t in fields.items()]) 136 | return doc 137 | 138 | 139 | def get_constants(tree): 140 | constants = {} 141 | for elem in tree.findall('constant'): 142 | name = elem.attrib['name'].replace('-', '_').upper() 143 | value = int(elem.attrib['value']) 144 | constants[name] = value 145 | return constants 146 | 147 | 148 | def generate_methods(classes): 149 | methods = {} 150 | 151 | for class_name, (class_id, method_infos) in classes.items(): 152 | for method_name, (method_id, fields, method_support, synchronous, method_doc) in method_infos.items(): 153 | name = class_name + method_name 154 | method_type = (class_id, method_id) 155 | 156 | # this call to type() is where the magic happens - 157 | # we are dynamically building subclasses of OutgoingMethod and/or IncomingMethod 158 | # with strongly-typed fields as defined in the spec. 159 | # The write() and read() methods of the base classes traverse the fields 160 | # and generate the correct bytestring 161 | cls = type(name, (Method,), {'method_type': method_type, 'field_info': fields, 'synchronous': synchronous}) 162 | cls.__doc__ = method_doc 163 | methods[name] = methods[method_type] = cls 164 | 165 | return methods 166 | 167 | 168 | def generate_exceptions(constants): 169 | ret = {} 170 | 171 | for name, value in constants.items(): 172 | if 300 <= value < 600: # it's an error 173 | classname = ''.join([x.capitalize() for x in name.split('_')]) 174 | ret[classname] = type(classname, (AMQPError,), {}) 175 | 176 | return ret 177 | 178 | 179 | METHODS, CONSTANTS = load_spec() 180 | CONSTANTS_INVERSE = {value: name for name, value in CONSTANTS.items()} 181 | EXCEPTIONS = generate_exceptions(CONSTANTS) 182 | 183 | # what the hack? 'response' is always a table but the protocol spec says it's a longstr. 184 | METHODS['ConnectionStartOK'].field_info['response'] = amqptypes.Table 185 | 186 | # Also pretty hacky 187 | globals().update({k: v for k, v in METHODS.items() if isinstance(k, str)}) 188 | globals().update(CONSTANTS) 189 | -------------------------------------------------------------------------------- /test/__init__.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import logging 3 | from .util import testing_exception_handler 4 | 5 | 6 | loop = asyncio.get_event_loop() 7 | loop.set_exception_handler(testing_exception_handler) 8 | 9 | 10 | logging.getLogger('asynqp').setLevel(100) # mute the logger 11 | -------------------------------------------------------------------------------- /test/base_contexts.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import asynqp 3 | import asynqp.routing 4 | from asynqp import spec 5 | from asynqp import protocol 6 | from asynqp.connection import open_connection 7 | from unittest import mock 8 | from .util import MockServer, FakeTransport, run_briefly 9 | 10 | 11 | class LoopContext: 12 | def given_an_event_loop(self): 13 | self.exceptions = [] 14 | self.loop = asyncio.get_event_loop() 15 | self.loop.set_debug(True) 16 | 17 | def cleanup_test_hack(self): 18 | self.loop.set_debug(False) 19 | 20 | def exception_handler(self, loop, context): 21 | self.exceptions.append(context['exception']) 22 | 23 | def tick(self): 24 | run_briefly(self.loop) 25 | 26 | def async_partial(self, coro): 27 | """ 28 | Schedule a coroutine which you are not expecting to complete before the end of the test. 29 | Disables the error log when the task is destroyed before completing. 30 | """ 31 | t = asyncio.ensure_future(coro) 32 | t._log_destroy_pending = False 33 | self.tick() 34 | return t 35 | 36 | def wait_for(self, coro): 37 | return self.loop.run_until_complete(asyncio.wait_for(coro, timeout=0.2, loop=self.loop)) 38 | 39 | 40 | class MockServerContext(LoopContext): 41 | def given_a_mock_server_on_the_other_end_of_the_transport(self): 42 | self.dispatcher = asynqp.routing.Dispatcher() 43 | self.protocol = protocol.AMQP(self.dispatcher, self.loop) 44 | self.server = MockServer(self.protocol, self.tick) 45 | self.transport = FakeTransport(self.server) 46 | self.protocol.connection_made(self.transport) 47 | 48 | 49 | class OpenConnectionContext(MockServerContext): 50 | def given_an_open_connection(self): 51 | connection_info = {'username': 'guest', 'password': 'guest', 'virtual_host': '/'} 52 | task = asyncio.ensure_future(open_connection(self.loop, self.transport, self.protocol, self.dispatcher, connection_info)) 53 | self.tick() 54 | 55 | start_method = spec.ConnectionStart(0, 9, {}, 'PLAIN AMQPLAIN', 'en_US') 56 | self.server.send_method(0, start_method) 57 | 58 | tune_method = spec.ConnectionTune(0, 131072, 600) 59 | self.frame_max = tune_method.frame_max 60 | self.server.send_method(0, tune_method) 61 | 62 | self.server.send_method(0, spec.ConnectionOpenOK('')) 63 | 64 | self.connection = task.result() 65 | 66 | def cleanup_connection(self): 67 | self.connection.protocol.heartbeat_monitor.stop() 68 | self.loop.run_until_complete( 69 | self.connection.protocol.heartbeat_monitor.wait_closed()) 70 | 71 | 72 | class OpenChannelContext(OpenConnectionContext): 73 | def given_an_open_channel(self): 74 | self.channel = self.open_channel() 75 | 76 | def open_channel(self, channel_id=1): 77 | task = asyncio.ensure_future(self.connection.open_channel(), loop=self.loop) 78 | self.tick() 79 | self.server.send_method(channel_id, spec.ChannelOpenOK('')) 80 | return self.loop.run_until_complete(task) 81 | 82 | 83 | class QueueContext(OpenChannelContext): 84 | def given_a_queue(self): 85 | queue_name = 'my.nice.queue' 86 | task = asyncio.ensure_future(self.channel.declare_queue(queue_name, durable=True, exclusive=True, auto_delete=True), loop=self.loop) 87 | self.tick() 88 | self.server.send_method(self.channel.id, spec.QueueDeclareOK(queue_name, 123, 456)) 89 | self.queue = task.result() 90 | 91 | 92 | class ExchangeContext(OpenChannelContext): 93 | def given_an_exchange(self): 94 | self.exchange = self.make_exchange('my.nice.exchange') 95 | 96 | def make_exchange(self, name): 97 | task = asyncio.ensure_future(self.channel.declare_exchange(name, 'fanout', durable=True, auto_delete=False, internal=False), 98 | loop=self.loop) 99 | self.tick() 100 | self.server.send_method(self.channel.id, spec.ExchangeDeclareOK()) 101 | return task.result() 102 | 103 | 104 | class BoundQueueContext(QueueContext, ExchangeContext): 105 | def given_a_bound_queue(self): 106 | task = asyncio.ensure_future(self.queue.bind(self.exchange, 'routing.key')) 107 | self.tick() 108 | self.server.send_method(self.channel.id, spec.QueueBindOK()) 109 | self.binding = task.result() 110 | 111 | 112 | class ConsumerContext(QueueContext): 113 | def given_a_consumer(self): 114 | self.callback = mock.Mock() 115 | del self.callback._is_coroutine # :( 116 | 117 | task = asyncio.ensure_future(self.queue.consume(self.callback, no_local=False, no_ack=False, exclusive=False)) 118 | self.tick() 119 | self.server.send_method(self.channel.id, spec.BasicConsumeOK('made.up.tag')) 120 | self.consumer = task.result() 121 | 122 | 123 | class ProtocolContext(LoopContext): 124 | def given_a_connected_protocol(self): 125 | self.transport = mock.Mock(spec=asyncio.Transport) 126 | self.dispatcher = asynqp.routing.Dispatcher() 127 | self.protocol = protocol.AMQP(self.dispatcher, self.loop) 128 | self.protocol.connection_made(self.transport) 129 | 130 | 131 | class MockDispatcherContext(LoopContext): 132 | def given_a_connected_protocol(self): 133 | self.transport = mock.Mock(spec=asyncio.Transport) 134 | self.dispatcher = mock.Mock(spec=asynqp.routing.Dispatcher) 135 | self.protocol = protocol.AMQP(self.dispatcher, self.loop) 136 | self.protocol.connection_made(self.transport) 137 | -------------------------------------------------------------------------------- /test/channel_tests.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import contexts 3 | import asynqp 4 | from contextlib import suppress 5 | from unittest import mock 6 | from asynqp import spec, frames, exceptions 7 | from asynqp import message 8 | from . import util 9 | from .base_contexts import OpenConnectionContext, OpenChannelContext 10 | 11 | 12 | class WhenOpeningAChannel(OpenConnectionContext): 13 | def when_the_user_wants_to_open_a_channel(self): 14 | self.async_partial(self.connection.open_channel()) 15 | 16 | def it_should_send_a_channel_open_frame(self): 17 | self.server.should_have_received_method(1, spec.ChannelOpen('')) 18 | 19 | 20 | class WhenOpeningMultipleChannelsConcurrently(OpenConnectionContext): 21 | def when_the_user_wants_to_open_several_channels(self): 22 | self.async_partial(asyncio.wait([self.connection.open_channel(), self.connection.open_channel()])) 23 | 24 | def it_should_send_a_channel_open_frame_for_channel_1(self): 25 | self.server.should_have_received_method(1, spec.ChannelOpen('')) 26 | 27 | def it_should_send_a_channel_open_frame_for_channel_2(self): 28 | self.server.should_have_received_method(2, spec.ChannelOpen('')) 29 | 30 | 31 | class WhenChannelOpenOKArrives(OpenConnectionContext): 32 | def given_the_user_has_called_open_channel(self): 33 | self.task = asyncio.ensure_future(self.connection.open_channel()) 34 | self.tick() 35 | 36 | def when_channel_open_ok_arrives(self): 37 | self.server.send_method(1, spec.ChannelOpenOK('')) 38 | 39 | def it_should_have_the_correct_channel_id(self): 40 | assert self.task.result().id == 1 41 | 42 | 43 | class WhenOpeningASecondChannel(OpenChannelContext): 44 | def when_the_user_opens_another_channel(self): 45 | self.result = self.open_channel(2) 46 | 47 | def it_should_send_another_channel_open_frame(self): 48 | self.server.should_have_received_method(2, spec.ChannelOpen('')) 49 | 50 | def it_should_have_the_correct_channel_id(self): 51 | assert self.result.id == 2 52 | 53 | 54 | class WhenTheApplicationClosesAChannel(OpenChannelContext): 55 | def when_I_close_the_channel(self): 56 | self.async_partial(self.channel.close()) 57 | 58 | def it_should_send_ChannelClose(self): 59 | self.server.should_have_received_method(1, spec.ChannelClose(0, 'Channel closed by application', 0, 0)) 60 | 61 | 62 | class WhenTheServerClosesAChannel(OpenChannelContext): 63 | def when_the_server_shuts_the_channel_down(self): 64 | self.server.send_method(self.channel.id, spec.ChannelClose(404, 'i am tired of you', 40, 50)) 65 | 66 | def it_should_send_ChannelCloseOK(self): 67 | self.server.should_have_received_method(self.channel.id, spec.ChannelCloseOK()) 68 | 69 | 70 | class WhenAnotherMethodArrivesWhileTheChannelIsClosing(OpenChannelContext): 71 | def given_that_i_closed_the_channel(self): 72 | self.async_partial(self.channel.close()) 73 | self.server.reset() 74 | 75 | def when_another_method_arrives(self): 76 | self.server.send_method(self.channel.id, spec.ChannelOpenOK('')) 77 | 78 | def it_MUST_discard_the_method(self): 79 | self.server.should_not_have_received_any() 80 | 81 | 82 | class WhenAnotherMethodArrivesAfterTheServerClosedTheChannel(OpenChannelContext): 83 | def given_the_server_closed_the_channel(self): 84 | self.server.send_method(self.channel.id, spec.ChannelClose(404, 'i am tired of you', 40, 50)) 85 | self.server.reset() 86 | 87 | def when_another_method_arrives(self): 88 | self.server.send_method(self.channel.id, spec.ChannelOpenOK('')) 89 | 90 | def it_MUST_discard_the_method(self): 91 | self.server.should_not_have_received_any() 92 | 93 | 94 | class WhenAnAsyncMethodArrivesWhileWeAwaitASynchronousOne(OpenChannelContext): 95 | def given_we_are_awaiting_QueueDeclareOK(self): 96 | self.task = self.async_partial(self.channel.declare_queue('my.nice.queue', durable=True, exclusive=True, auto_delete=True)) 97 | self.server.reset() 98 | 99 | def when_an_async_method_arrives(self): 100 | with util.silence_expected_destroy_pending_log('receive_deliver'): 101 | self.server.send_method(self.channel.id, spec.BasicDeliver('consumer', 2, False, 'exchange', 'routing_key')) 102 | 103 | def it_should_not_close_the_channel(self): 104 | self.server.should_not_have_received_any() 105 | 106 | def it_should_not_throw_an_exception(self): 107 | assert not self.task.done() 108 | 109 | 110 | class WhenAnUnexpectedChannelCloseArrives(OpenChannelContext): 111 | def given_we_are_awaiting_QueueDeclareOK(self): 112 | self.task = asyncio.ensure_future(self.channel.declare_queue('my.nice.queue', durable=True, exclusive=True, auto_delete=True)) 113 | self.tick() 114 | 115 | def when_ChannelClose_arrives(self): 116 | self.server.send_method(self.channel.id, spec.ChannelClose(406, "the precondition, she failed", 50, 10)) 117 | self.tick() 118 | 119 | def it_should_send_ChannelCloseOK(self): 120 | self.server.should_have_received_method(self.channel.id, spec.ChannelCloseOK()) 121 | 122 | def it_should_throw_an_exception(self): 123 | assert isinstance(self.task.exception(), exceptions.PreconditionFailed) 124 | 125 | 126 | class WhenSettingQOS(OpenChannelContext): 127 | def when_we_are_setting_prefetch_count_only(self): 128 | self.async_partial(self.channel.set_qos(prefetch_size=1000, prefetch_count=100, apply_globally=True)) 129 | 130 | def it_should_send_BasicQos_with_default_values(self): 131 | self.server.should_have_received_method(self.channel.id, spec.BasicQos(1000, 100, True)) 132 | 133 | 134 | class WhenBasicQOSOkArrives(OpenChannelContext): 135 | def given_we_are_setting_qos_settings(self): 136 | self.task = asyncio.ensure_future(self.channel.set_qos(prefetch_size=1000, prefetch_count=100, apply_globally=True)) 137 | self.tick() 138 | 139 | def when_BasicQosOk_arrives(self): 140 | self.server.send_method(self.channel.id, spec.BasicQosOK()) 141 | 142 | def it_should_yield_result(self): 143 | assert self.task.done() 144 | 145 | 146 | class WhenBasicReturnArrivesAndIHaveDefinedAHandler(OpenChannelContext): 147 | def given_a_message(self): 148 | self.expected_message = asynqp.Message('body') 149 | 150 | self.callback = mock.Mock() 151 | del self.callback._is_coroutine # :( 152 | self.channel.set_return_handler(self.callback) 153 | 154 | def when_BasicReturn_arrives_with_content(self): 155 | method = spec.BasicReturn(123, "you messed up", "the.exchange", "the.routing.key") 156 | self.server.send_frame(frames.MethodFrame(self.channel.id, method)) 157 | 158 | header = message.get_header_payload(self.expected_message, spec.BasicGet.method_type[0]) 159 | self.server.send_frame(frames.ContentHeaderFrame(self.channel.id, header)) 160 | 161 | body = message.get_frame_payloads(self.expected_message, 100)[0] 162 | self.server.send_frame(frames.ContentBodyFrame(self.channel.id, body)) 163 | self.tick() 164 | 165 | def it_should_send_the_message_to_the_callback(self): 166 | self.callback.assert_called_once_with(self.expected_message) 167 | 168 | 169 | class WhenBasicReturnArrivesAndIHaveNotDefinedAHandler(OpenChannelContext): 170 | def given_I_am_listening_for_asyncio_exceptions(self): 171 | self.expected_message = asynqp.Message('body') 172 | 173 | self.exception = None 174 | self.loop.set_exception_handler(lambda l, c: setattr(self, "exception", c["exception"])) 175 | 176 | def when_BasicReturn_arrives(self): 177 | method = spec.BasicReturn(123, "you messed up", "the.exchange", "the.routing.key") 178 | self.server.send_frame(frames.MethodFrame(self.channel.id, method)) 179 | 180 | header = message.get_header_payload(self.expected_message, spec.BasicGet.method_type[0]) 181 | self.server.send_frame(frames.ContentHeaderFrame(self.channel.id, header)) 182 | 183 | body = message.get_frame_payloads(self.expected_message, 100)[0] 184 | self.server.send_frame(frames.ContentBodyFrame(self.channel.id, body)) 185 | self.tick() 186 | 187 | def it_should_throw_an_exception(self): 188 | assert self.exception is not None 189 | 190 | def it_should_set_the_reply_code(self): 191 | assert self.exception.args == (self.expected_message,) 192 | 193 | def cleanup_the_exception_handler(self): 194 | self.loop.set_exception_handler(util.testing_exception_handler) 195 | 196 | 197 | # test that the call to handler.ready() happens at the correct time 198 | class WhenBasicReturnArrivesAfterThrowingTheExceptionOnce(OpenChannelContext): 199 | def given_I_am_listening_for_asyncio_exceptions(self): 200 | self.expected_message = asynqp.Message('body') 201 | 202 | self.exception = None 203 | self.loop.set_exception_handler(lambda l, c: setattr(self, "exception", c["exception"])) 204 | 205 | self.return_msg() # cause basic_return exception to be thrown 206 | self.exception = None # reset self.exception 207 | 208 | def when_BasicReturn_arrives(self): 209 | self.return_msg() 210 | 211 | def it_should_throw_the_exception_again(self): 212 | assert self.exception is not None 213 | 214 | def cleanup_the_exception_handler(self): 215 | self.loop.set_exception_handler(util.testing_exception_handler) 216 | 217 | def return_msg(self): 218 | method = spec.BasicReturn(123, "you messed up", "the.exchange", "the.routing.key") 219 | self.server.send_frame(frames.MethodFrame(self.channel.id, method)) 220 | 221 | header = message.get_header_payload(self.expected_message, spec.BasicGet.method_type[0]) 222 | self.server.send_frame(frames.ContentHeaderFrame(self.channel.id, header)) 223 | 224 | body = message.get_frame_payloads(self.expected_message, 100)[0] 225 | self.server.send_frame(frames.ContentBodyFrame(self.channel.id, body)) 226 | self.tick() 227 | 228 | 229 | class WhenTheHandlerIsNotCallable(OpenChannelContext): 230 | def when_I_set_the_handler(self): 231 | self.exception = contexts.catch(self.channel.set_return_handler, "i am not callable") 232 | 233 | def it_should_throw_a_TypeError(self): 234 | assert isinstance(self.exception, TypeError) 235 | 236 | 237 | class WhenAConnectionIsLostCloseChannel(OpenChannelContext): 238 | def when_connection_is_closed(self): 239 | with suppress(Exception): 240 | self.connection.protocol.connection_lost(Exception()) 241 | self.tick() 242 | self.was_closed = self.channel.is_closed() 243 | 244 | def it_should_not_hang(self): 245 | self.loop.run_until_complete(asyncio.wait_for(self.channel.close(), 0.2)) 246 | 247 | def if_should_have_closed_channel(self): 248 | assert self.was_closed 249 | 250 | 251 | class WhenWeCloseConnectionChannelShouldAlsoClose(OpenChannelContext): 252 | def when_connection_is_closed(self): 253 | self.task = asyncio.ensure_future(self.connection.close(), loop=self.loop) 254 | self.server.send_method(0, spec.ConnectionCloseOK()) 255 | self.tick() 256 | self.was_closed = self.channel.is_closed() 257 | self.loop.run_until_complete(asyncio.wait_for(self.task, 0.2)) 258 | 259 | def it_should_not_hang_channel_close(self): 260 | self.loop.run_until_complete(asyncio.wait_for(self.channel.close(), 0.2)) 261 | 262 | def if_should_have_closed_channel(self): 263 | assert self.was_closed 264 | 265 | 266 | class WhenServerClosesConnectionChannelShouldAlsoClose(OpenChannelContext): 267 | def when_connection_is_closed(self): 268 | self.server.send_method( 269 | 0, spec.ConnectionClose(123, 'you muffed up', 10, 20)) 270 | self.tick() 271 | self.was_closed = self.channel.is_closed() 272 | 273 | def it_should_not_hang_channel_close(self): 274 | self.loop.run_until_complete(asyncio.wait_for(self.channel.close(), 0.2)) 275 | 276 | def if_should_have_closed_channel(self): 277 | assert self.was_closed 278 | 279 | 280 | class WhenServerAndClientCloseChannelAtATime(OpenChannelContext): 281 | def when_both_sides_close_channel(self): 282 | # Client tries to close connection 283 | self.task = asyncio.ensure_future(self.channel.close(), loop=self.loop) 284 | self.tick() 285 | # Before OK arrives server closes connection 286 | self.server.send_method( 287 | self.channel.id, 288 | spec.ChannelClose(404, 'i am tired of you', 40, 50)) 289 | self.tick() 290 | self.task.result() 291 | 292 | def if_should_have_closed_channel(self): 293 | assert self.channel.is_closed() 294 | 295 | def it_should_have_killed_synchroniser_with_404(self): 296 | assert self.channel.synchroniser.connection_exc == exceptions.NotFound 297 | -------------------------------------------------------------------------------- /test/connection_tests.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import sys 3 | from contextlib import suppress 4 | import contexts 5 | from asynqp import spec, exceptions 6 | from asynqp.connection import open_connection 7 | from .base_contexts import MockServerContext, OpenConnectionContext 8 | 9 | 10 | class WhenRespondingToConnectionStart(MockServerContext): 11 | def given_I_wrote_the_protocol_header(self): 12 | connection_info = {'username': 'guest', 'password': 'guest', 'virtual_host': '/'} 13 | self.async_partial(open_connection(self.loop, self.transport, self.protocol, self.dispatcher, connection_info)) 14 | 15 | def when_ConnectionStart_arrives(self): 16 | self.server.send_method(0, spec.ConnectionStart(0, 9, {}, 'PLAIN AMQPLAIN', 'en_US')) 17 | 18 | def it_should_send_start_ok(self): 19 | expected_method = spec.ConnectionStartOK( 20 | {"product": "asynqp", "version": "0.1", "platform": sys.version, 21 | "capabilities": { 22 | "consumer_cancel_notify": True 23 | }}, 24 | 'AMQPLAIN', 25 | {'LOGIN': 'guest', 'PASSWORD': 'guest'}, 26 | 'en_US' 27 | ) 28 | self.server.should_have_received_method(0, expected_method) 29 | 30 | 31 | class WhenRespondingToConnectionTune(MockServerContext): 32 | def given_a_started_connection(self): 33 | connection_info = {'username': 'guest', 'password': 'guest', 'virtual_host': '/'} 34 | self.async_partial(open_connection(self.loop, self.transport, self.protocol, self.dispatcher, connection_info)) 35 | self.server.send_method(0, spec.ConnectionStart(0, 9, {}, 'PLAIN AMQPLAIN', 'en_US')) 36 | 37 | def when_ConnectionTune_arrives(self): 38 | self.server.send_method(0, spec.ConnectionTune(0, 131072, 600)) 39 | 40 | def it_should_send_tune_ok_followed_by_open(self): 41 | tune_ok_method = spec.ConnectionTuneOK(0, 131072, 600) 42 | open_method = spec.ConnectionOpen('/', '', False) 43 | self.server.should_have_received_methods(0, [tune_ok_method, open_method]) 44 | 45 | 46 | class WhenRespondingToConnectionClose(OpenConnectionContext): 47 | def when_the_close_frame_arrives(self): 48 | self.server.send_method(0, spec.ConnectionClose(123, 'you muffed up', 10, 20)) 49 | 50 | def it_should_send_close_ok(self): 51 | self.server.should_have_received_method(0, spec.ConnectionCloseOK()) 52 | 53 | def it_should_not_block_clonnection_close(self): 54 | self.loop.run_until_complete( 55 | asyncio.wait_for(self.connection.close(), 0.2)) 56 | 57 | 58 | class WhenTheApplicationClosesTheConnection(OpenConnectionContext): 59 | def when_I_close_the_connection(self): 60 | self.async_partial(self.connection.close()) 61 | 62 | def it_should_send_ConnectionClose_with_no_exception(self): 63 | expected = spec.ConnectionClose(0, 'Connection closed by application', 0, 0) 64 | self.server.should_have_received_method(0, expected) 65 | 66 | 67 | class WhenRecievingConnectionCloseOK(OpenConnectionContext): 68 | def given_a_connection_that_I_closed(self): 69 | asyncio.ensure_future(self.connection.close()) 70 | self.tick() 71 | 72 | def when_connection_close_ok_arrives(self): 73 | self.server.send_method(0, spec.ConnectionCloseOK()) 74 | self.tick() 75 | 76 | def it_should_close_the_transport(self): 77 | assert self.transport.closed 78 | 79 | 80 | class WhenAConnectionThatIsClosingReceivesAMethod(OpenConnectionContext): 81 | def given_a_closed_connection(self): 82 | t = asyncio.ensure_future(self.connection.close()) 83 | t._log_destroy_pending = False 84 | self.tick() 85 | self.server.reset() 86 | 87 | def when_another_frame_arrives(self): 88 | self.server.send_method(0, spec.ConnectionStart(0, 9, {}, 'PLAIN AMQPLAIN', 'en_US')) 89 | self.tick() 90 | 91 | def it_MUST_be_discarded(self): 92 | self.server.should_not_have_received_any() 93 | 94 | 95 | class WhenAConnectionThatWasClosedByTheServerReceivesAMethod(OpenConnectionContext): 96 | def given_a_closed_connection(self): 97 | self.server.send_method(0, spec.ConnectionClose(123, 'you muffed up', 10, 20)) 98 | self.tick() 99 | self.server.reset() 100 | 101 | def when_another_frame_arrives(self): 102 | self.server.send_method(0, spec.BasicDeliver('', 1, False, '', '')) 103 | self.tick() 104 | 105 | def it_MUST_be_discarded(self): 106 | self.server.should_not_have_received_any() 107 | 108 | 109 | class WhenAConnectionIsLostCloseConnection(OpenConnectionContext): 110 | def when_connection_is_closed(self): 111 | with suppress(Exception): 112 | self.connection.protocol.connection_lost(Exception()) 113 | 114 | def it_should_not_hang(self): 115 | self.loop.run_until_complete(asyncio.wait_for(self.connection.close(), 0.2)) 116 | 117 | 118 | class WhenServerClosesTransportWithoutConnectionClose(OpenConnectionContext): 119 | 120 | def given_a_channel(self): 121 | task = self.loop.create_task(self.connection.open_channel()) 122 | self.tick() 123 | self.server.send_method(1, spec.ChannelOpenOK('')) 124 | self.channel = self.wait_for(task) 125 | 126 | def when_server_closes_transport(self): 127 | with suppress(exceptions.ConnectionLostError): 128 | self.protocol.connection_lost(None) 129 | 130 | def it_should_raise_error_in_connection_methods(self): 131 | try: 132 | self.wait_for(self.channel.declare_queue("some.queue")) 133 | except exceptions.ConnectionLostError as err: 134 | assert type(err) == exceptions.ConnectionLostError 135 | else: 136 | assert False, "ConnectionLostError not raised" 137 | 138 | 139 | class WhenOpeningAChannelOnAClosedConnection(OpenConnectionContext): 140 | def when_client_closes_connection(self): 141 | task = asyncio.ensure_future(self.connection.close()) 142 | self.tick() 143 | self.server.send_method(0, spec.ConnectionCloseOK()) 144 | self.tick() 145 | self.tick() 146 | task.result() 147 | 148 | def it_should_raise_error_in_connection_methods(self): 149 | exc = contexts.catch(self.wait_for, self.connection.open_channel()) 150 | assert isinstance(exc, exceptions.ConnectionClosed) 151 | 152 | 153 | class WhenServerAndClientCloseConnectionAtATime(OpenConnectionContext): 154 | def when_both_sides_close_channel(self): 155 | # Client tries to close connection 156 | self.task = asyncio.ensure_future(self.connection.close(), loop=self.loop) 157 | self.tick() 158 | # Before OK arrives server closes connection 159 | self.server.send_method( 160 | 0, spec.ConnectionClose(123, 'you muffed up', 10, 20)) 161 | self.tick() 162 | self.tick() 163 | self.task.result() 164 | 165 | def if_should_have_closed_connection(self): 166 | assert self.connection._closing 167 | 168 | def it_should_have_killed_synchroniser_with_server_error(self): 169 | assert isinstance( 170 | self.connection.synchroniser.connection_exc, 171 | exceptions.ConnectionClosed) 172 | assert self.connection.synchroniser.connection_exc.reply_code == 123 173 | -------------------------------------------------------------------------------- /test/exchange_tests.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import uuid 3 | from datetime import datetime 4 | import asynqp 5 | from asynqp import spec 6 | from asynqp import frames 7 | from asynqp import message 8 | from asynqp import exceptions 9 | from .base_contexts import OpenChannelContext, ExchangeContext 10 | 11 | 12 | class WhenDeclaringAnExchange(OpenChannelContext): 13 | def when_I_declare_an_exchange(self): 14 | self.async_partial(self.channel.declare_exchange('my.nice.exchange', 'fanout', durable=True, auto_delete=False, internal=False)) 15 | 16 | def it_should_send_ExchangeDeclare(self): 17 | expected_method = spec.ExchangeDeclare(0, 'my.nice.exchange', 'fanout', False, True, False, False, False, {}) 18 | self.server.should_have_received_method(self.channel.id, expected_method) 19 | 20 | 21 | class WhenExchangeDeclareOKArrives(OpenChannelContext): 22 | def given_I_declared_an_exchange(self): 23 | self.task = asyncio.ensure_future(self.channel.declare_exchange('my.nice.exchange', 'fanout', durable=True, auto_delete=False, internal=False)) 24 | self.tick() 25 | 26 | def when_the_reply_arrives(self): 27 | self.server.send_method(self.channel.id, spec.ExchangeDeclareOK()) 28 | self.result = self.task.result() 29 | 30 | def it_should_have_the_correct_name(self): 31 | assert self.result.name == 'my.nice.exchange' 32 | 33 | def it_should_have_the_correct_type(self): 34 | assert self.result.type == 'fanout' 35 | 36 | def it_should_be_durable(self): 37 | assert self.result.durable 38 | 39 | def it_should_not_auto_delete(self): 40 | assert not self.result.auto_delete 41 | 42 | def it_should_not_be_internal(self): 43 | assert not self.result.internal 44 | 45 | 46 | # "The server MUST pre-declare a direct exchange with no public name 47 | # to act as the default exchange for content Publish methods and for default queue bindings." 48 | # Clients are not allowed to re-declare the default exchange, but they are allowed to publish to it 49 | class WhenIDeclareTheDefaultExchange(OpenChannelContext): 50 | def when_I_declare_an_exchange_with_an_empty_name(self): 51 | self.server.reset() 52 | task = asyncio.ensure_future(self.channel.declare_exchange('', 'direct', durable=True, auto_delete=False, internal=False)) 53 | self.tick() 54 | self.exchange = task.result() 55 | 56 | def it_should_not_send_exchange_declare(self): 57 | self.server.should_not_have_received_any() 58 | 59 | def it_should_return_an_exchange_with_no_name(self): 60 | assert self.exchange.name == '' 61 | 62 | def it_should_be_a_direct_exchange(self): 63 | assert self.exchange.type == 'direct' 64 | 65 | def it_should_be_durable(self): 66 | assert self.exchange.durable 67 | 68 | def it_should_not_auto_delete(self): 69 | assert not self.exchange.auto_delete 70 | 71 | def it_should_not_be_internal(self): 72 | assert not self.exchange.internal 73 | 74 | 75 | class WhenIUseAnIllegalExchangeName(OpenChannelContext): 76 | @classmethod 77 | def examples_of_bad_words(cls): 78 | yield "amq.starts.with.amq." 79 | yield "contains'illegal$ymbols" 80 | 81 | def because_I_try_to_declare_the_exchange(self, name): 82 | task = asyncio.ensure_future(self.channel.declare_exchange(name, 'direct')) 83 | self.tick() 84 | self.exception = task.exception() 85 | 86 | def it_should_throw_ValueError(self): 87 | assert isinstance(self.exception, ValueError) 88 | 89 | 90 | class WhenPublishingAShortMessage(ExchangeContext): 91 | def given_a_message(self): 92 | self.correlation_id = str(uuid.uuid4()) 93 | self.message_id = str(uuid.uuid4()) 94 | self.timestamp = datetime(2014, 5, 4) 95 | self.msg = asynqp.Message( 96 | 'body', 97 | content_type='application/json', 98 | content_encoding='utf-8', 99 | headers={}, 100 | delivery_mode=2, 101 | priority=5, 102 | correlation_id=self.correlation_id, 103 | reply_to='me', 104 | expiration='tomorrow', 105 | message_id=self.message_id, 106 | timestamp=self.timestamp, 107 | type='telegram', 108 | user_id='benjamin', 109 | app_id='asynqptests' 110 | ) 111 | 112 | def when_I_publish_the_message(self): 113 | self.exchange.publish(self.msg, 'routing.key', mandatory=True) 114 | 115 | def it_should_send_a_BasicPublish_method_followed_by_a_header_and_the_body(self): 116 | expected_method = spec.BasicPublish(0, self.exchange.name, 'routing.key', True, False) 117 | header_payload = message.ContentHeaderPayload(60, 4, [ 118 | 'application/json', 119 | 'utf-8', 120 | {}, 2, 5, 121 | self.correlation_id, 122 | 'me', 'tomorrow', 123 | self.message_id, 124 | self.timestamp, 125 | 'telegram', 126 | 'benjamin', 127 | 'asynqptests' 128 | ]) 129 | expected_header = frames.ContentHeaderFrame(self.channel.id, header_payload) 130 | expected_body = frames.ContentBodyFrame(self.channel.id, b'body') 131 | self.server.should_have_received_frames([ 132 | frames.MethodFrame(self.channel.id, expected_method), 133 | expected_header, 134 | expected_body 135 | ], any_order=False) 136 | 137 | 138 | class WhenPublishingALongMessage(ExchangeContext): 139 | def given_a_message(self): 140 | self.body1 = b"a" * (self.frame_max - 8) 141 | self.body2 = b"b" * (self.frame_max - 8) 142 | self.body3 = b"c" * (self.frame_max - 8) 143 | body = self.body1 + self.body2 + self.body3 144 | self.msg = asynqp.Message(body) 145 | 146 | def when_I_publish_the_message(self): 147 | self.exchange.publish(self.msg, 'routing.key') 148 | 149 | def it_should_send_multiple_body_frames(self): 150 | expected_body1 = frames.ContentBodyFrame(self.channel.id, self.body1) 151 | expected_body2 = frames.ContentBodyFrame(self.channel.id, self.body2) 152 | expected_body3 = frames.ContentBodyFrame(self.channel.id, self.body3) 153 | self.server.should_have_received_frames([ 154 | expected_body1, 155 | expected_body2, 156 | expected_body3 157 | ], any_order=False) 158 | 159 | 160 | class WhenDeletingAnExchange(ExchangeContext): 161 | def when_I_delete_the_exchange(self): 162 | self.async_partial(self.exchange.delete(if_unused=True)) 163 | 164 | def it_should_send_ExchangeDelete(self): 165 | self.server.should_have_received_method(self.channel.id, spec.ExchangeDelete(0, self.exchange.name, True, False)) 166 | 167 | 168 | class WhenExchangeDeleteOKArrives(ExchangeContext): 169 | def given_I_deleted_the_exchange(self): 170 | asyncio.ensure_future(self.exchange.delete(if_unused=True), loop=self.loop) 171 | self.tick() 172 | 173 | def when_confirmation_arrives(self): 174 | self.server.send_method(self.channel.id, spec.ExchangeDeleteOK()) 175 | 176 | def it_should_not_throw(self): 177 | pass 178 | 179 | 180 | class WhenExchangeDeclareWithPassiveAndOKArrives(OpenChannelContext): 181 | def given_I_declared_an_exchange(self): 182 | self.task = asyncio.ensure_future( 183 | self.channel.declare_exchange( 184 | 'name_1', 'fanout', passive=True, 185 | durable=True, auto_delete=False, internal=False)) 186 | self.tick() 187 | 188 | def when_the_exchange_declare_ok_arrives(self): 189 | self.server.send_method(self.channel.id, spec.ExchangeDeclareOK()) 190 | 191 | def it_should_return_an_exchange_object(self): 192 | result = self.task.result() 193 | assert result.name == 'name_1' 194 | assert result.type == 'fanout' 195 | 196 | def it_should_have_sent_passive_in_frame(self): 197 | self.server.should_have_received_method( 198 | self.channel.id, spec.ExchangeDeclare( 199 | 0, 'name_1', 'fanout', True, True, False, False, False, {})) 200 | 201 | 202 | class WhenExchangeDeclareWithPassiveAndErrorArrives(OpenChannelContext): 203 | def given_I_declared_an_exchange(self): 204 | self.task = asyncio.ensure_future( 205 | self.channel.declare_exchange( 206 | 'name_1', 'fanout', passive=True, 207 | durable=True, auto_delete=False, internal=False)) 208 | self.tick() 209 | 210 | def when_error_arrives(self): 211 | self.server.send_method( 212 | self.channel.id, spec.ChannelClose(404, 'Bad exchange', 40, 50)) 213 | 214 | def it_should_raise_not_found_error(self): 215 | assert isinstance(self.task.exception(), exceptions.NotFound) 216 | 217 | 218 | class WhenIDeclareExchangeWithNoWait(OpenChannelContext): 219 | def given_I_declared_a_queue_with_passive(self): 220 | self.task = asyncio.ensure_future(self.channel.declare_exchange( 221 | 'my.nice.exchange', 'fanout', durable=True, auto_delete=False, 222 | internal=False, nowait=True), loop=self.loop) 223 | self.tick() 224 | 225 | def it_should_return_exchange_object_without_wait(self): 226 | result = self.task.result() 227 | assert result 228 | assert result.name == 'my.nice.exchange' 229 | assert result.type == 'fanout' 230 | 231 | def it_should_have_sent_nowait_in_frame(self): 232 | self.server.should_have_received_method( 233 | self.channel.id, spec.ExchangeDeclare( 234 | 0, 'my.nice.exchange', 'fanout', False, True, False, False, 235 | True, {})) 236 | -------------------------------------------------------------------------------- /test/heartbeat_tests.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from unittest import mock 3 | from asynqp.frames import HeartbeatFrame 4 | from asynqp.exceptions import ConnectionLostError 5 | from .base_contexts import MockServerContext 6 | 7 | 8 | class WhenServerWaitsForHeartbeat(MockServerContext): 9 | def when_heartbeating_starts(self): 10 | self.protocol.start_heartbeat(0.01) 11 | self.loop.run_until_complete(asyncio.sleep(0.015)) 12 | 13 | def it_should_send_the_heartbeat(self): 14 | self.server.should_have_received_frame(HeartbeatFrame()) 15 | 16 | def cleanup_tasks(self): 17 | self.protocol.heartbeat_monitor.stop() 18 | self.loop.run_until_complete( 19 | asyncio.wait_for(self.protocol.heartbeat_monitor.wait_closed(), 20 | timeout=0.2)) 21 | 22 | 23 | class WhenServerRespondsToHeartbeat(MockServerContext): 24 | def given_i_started_heartbeating(self): 25 | self.protocol.start_heartbeat(0.01) 26 | self.loop.run_until_complete(asyncio.sleep(0.015)) 27 | 28 | def when_the_server_replies(self): 29 | self.server.send_frame(HeartbeatFrame()) 30 | self.loop.run_until_complete(asyncio.sleep(0.005)) 31 | 32 | def it_should_send_the_heartbeat(self): 33 | self.server.should_have_received_frames([HeartbeatFrame(), HeartbeatFrame()]) 34 | 35 | def cleanup_tasks(self): 36 | self.protocol.heartbeat_monitor.stop() 37 | self.loop.run_until_complete( 38 | asyncio.wait_for(self.protocol.heartbeat_monitor.wait_closed(), 39 | timeout=0.2)) 40 | 41 | 42 | class WhenServerDoesNotRespondToHeartbeat(MockServerContext): 43 | def given_i_started_heartbeating(self): 44 | self.protocol.start_heartbeat(0.01) 45 | 46 | def when_the_server_dies(self): 47 | with mock.patch("asynqp.routing.Dispatcher.dispatch_all") as mocked: 48 | self.loop.run_until_complete(asyncio.sleep(0.021)) 49 | self.mocked = mocked 50 | 51 | def it_should_dispatch_a_poison_pill(self): 52 | assert self.mocked.called 53 | assert isinstance( 54 | self.mocked.call_args[0][0].exception, ConnectionLostError) 55 | 56 | def cleanup_tasks(self): 57 | self.protocol.heartbeat_monitor.stop() 58 | self.loop.run_until_complete( 59 | asyncio.wait_for(self.protocol.heartbeat_monitor.wait_closed(), 60 | timeout=0.2)) 61 | -------------------------------------------------------------------------------- /test/integration_tests.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import asynqp 3 | import socket 4 | import contexts 5 | from .util import run_briefly 6 | 7 | 8 | class ConnectionContext: 9 | def given_a_connection(self): 10 | self.loop = asyncio.get_event_loop() 11 | self.connection = self.loop.run_until_complete(asyncio.wait_for(asynqp.connect(), 0.2)) 12 | 13 | def cleanup_the_connection(self): 14 | self.loop.run_until_complete(asyncio.wait_for(self.connection.close(), 0.2)) 15 | 16 | 17 | class ChannelContext(ConnectionContext): 18 | def given_a_channel(self): 19 | self.channel = self.loop.run_until_complete(asyncio.wait_for(self.connection.open_channel(), 0.2)) 20 | 21 | def cleanup_the_channel(self): 22 | self.loop.run_until_complete(asyncio.wait_for(self.channel.close(), 0.2)) 23 | 24 | 25 | class BoundQueueContext(ChannelContext): 26 | def given_a_queue_bound_to_an_exchange(self): 27 | self.loop.run_until_complete(asyncio.wait_for(self.setup(), 0.5)) 28 | 29 | def cleanup_the_queue_and_exchange(self): 30 | self.loop.run_until_complete(asyncio.wait_for(self.teardown(), 0.3)) 31 | 32 | @asyncio.coroutine 33 | def setup(self): 34 | self.queue = yield from self.channel.declare_queue('my.queue', exclusive=True) 35 | self.exchange = yield from self.channel.declare_exchange('my.exchange', 'fanout') 36 | 37 | yield from self.queue.bind(self.exchange, 'doesntmatter') 38 | 39 | @asyncio.coroutine 40 | def teardown(self): 41 | yield from self.queue.delete(if_unused=False, if_empty=False) 42 | yield from self.exchange.delete(if_unused=False) 43 | 44 | 45 | class WhenConnectingToRabbit: 46 | def given_the_loop(self): 47 | self.loop = asyncio.get_event_loop() 48 | 49 | def when_I_connect(self): 50 | self.connection = self.loop.run_until_complete(asyncio.wait_for(asynqp.connect(), 0.2)) 51 | 52 | def it_should_connect(self): 53 | assert self.connection is not None 54 | 55 | def cleanup_the_connection(self): 56 | self.loop.run_until_complete(asyncio.wait_for(self.connection.close(), 0.2)) 57 | 58 | 59 | class WhenConnectingToRabbitWithAnExistingSocket: 60 | def given_the_loop(self): 61 | self.loop = asyncio.get_event_loop() 62 | self.sock = socket.create_connection(("localhost", 5672)) 63 | 64 | def when_I_connect(self): 65 | self.connection = self.loop.run_until_complete(asyncio.wait_for(asynqp.connect(sock=self.sock), 0.2)) 66 | 67 | def it_should_connect(self): 68 | assert self.connection is not None 69 | 70 | def cleanup_the_connection(self): 71 | self.loop.run_until_complete(asyncio.wait_for(self.connection.close(), 0.2)) 72 | self.sock.close() 73 | 74 | 75 | class WhenOpeningAChannel(ConnectionContext): 76 | def when_I_open_a_channel(self): 77 | self.channel = self.loop.run_until_complete(asyncio.wait_for(self.connection.open_channel(), 0.2)) 78 | 79 | def it_should_give_me_the_channel(self): 80 | assert self.channel is not None 81 | 82 | def cleanup_the_channel(self): 83 | self.loop.run_until_complete(asyncio.wait_for(self.channel.close(), 0.2)) 84 | 85 | 86 | class WhenDeclaringAQueue(ChannelContext): 87 | ARGUMENTS = {'x-expires': 300, 'x-message-ttl': 1000, 'x-table-test': {'a': [1, 'a', {}, []], 'c': 1}} 88 | 89 | def when_I_declare_a_queue(self): 90 | coro = self.channel.declare_queue('my.queue', exclusive=True, arguments=WhenDeclaringAQueue.ARGUMENTS) 91 | self.queue = self.loop.run_until_complete(asyncio.wait_for(coro, 0.2)) 92 | 93 | def it_should_have_the_correct_queue_name(self): 94 | assert self.queue.name == 'my.queue' 95 | 96 | def it_should_have_the_correct_attributes_in_rabbitmq(self): 97 | assert self.queue.arguments == WhenDeclaringAQueue.ARGUMENTS 98 | 99 | def cleanup_the_queue(self): 100 | self.loop.run_until_complete(asyncio.wait_for(self.queue.delete(if_unused=False, if_empty=False), 0.2)) 101 | 102 | 103 | class WhenDeclaringAnExchange(ChannelContext): 104 | def when_I_declare_an_exchange(self): 105 | self.exchange = self.loop.run_until_complete(asyncio.wait_for(self.channel.declare_exchange('my.exchange', 'fanout'), 0.2)) 106 | 107 | def it_should_have_the_correct_name(self): 108 | assert self.exchange.name == 'my.exchange' 109 | 110 | def cleanup_the_exchange(self): 111 | self.loop.run_until_complete(asyncio.wait_for(self.exchange.delete(if_unused=False), 0.2)) 112 | 113 | 114 | class WhenPublishingAndGettingAShortMessage(BoundQueueContext): 115 | def given_I_published_a_message(self): 116 | self.message = asynqp.Message('here is the body') 117 | self.exchange.publish(self.message, 'routingkey') 118 | 119 | def when_I_get_the_message(self): 120 | self.result = self.loop.run_until_complete(asyncio.wait_for(self.queue.get(), 0.2)) 121 | 122 | def it_should_return_my_message(self): 123 | assert self.result == self.message 124 | 125 | 126 | class WhenConsumingAShortMessage(BoundQueueContext): 127 | def given_a_consumer(self): 128 | self.message = asynqp.Message('this is my body') 129 | self.message_received = asyncio.Future() 130 | self.loop.run_until_complete(asyncio.wait_for(self.queue.consume(self.message_received.set_result), 0.2)) 131 | 132 | def when_I_publish_a_message(self): 133 | self.exchange.publish(self.message, 'routingkey') 134 | self.loop.run_until_complete(asyncio.wait_for(self.message_received, 0.2)) 135 | 136 | def it_should_deliver_the_message_to_the_consumer(self): 137 | assert self.message_received.result() == self.message 138 | 139 | 140 | class WhenIStartAConsumerWithAMessageWaiting(BoundQueueContext): 141 | def given_a_published_message(self): 142 | self.message = asynqp.Message('this is my body') 143 | self.exchange.publish(self.message, 'routingkey') 144 | 145 | def when_I_start_a_consumer(self): 146 | self.message_received = asyncio.Future() 147 | self.loop.run_until_complete(asyncio.wait_for(self.start_consumer(), 0.2)) 148 | 149 | def it_should_deliver_the_message_to_the_consumer(self): 150 | assert self.message_received.result() == self.message 151 | 152 | @asyncio.coroutine 153 | def start_consumer(self): 154 | yield from self.queue.consume(self.message_received.set_result) 155 | yield from self.message_received 156 | 157 | 158 | class WhenIStartAConsumerWithSeveralMessagesWaiting(BoundQueueContext): 159 | def given_published_messages(self): 160 | self.message1 = asynqp.Message('one') 161 | self.message2 = asynqp.Message('one') 162 | self.exchange.publish(self.message1, 'routingkey') 163 | self.exchange.publish(self.message2, 'routingkey') 164 | 165 | self.received = [] 166 | 167 | def when_I_start_a_consumer(self): 168 | self.loop.run_until_complete(asyncio.wait_for(self.start_consumer(), 0.3)) 169 | 170 | def it_should_deliver_the_messages_to_the_consumer(self): 171 | assert self.received == [self.message1, self.message2] 172 | 173 | @asyncio.coroutine 174 | def start_consumer(self): 175 | yield from self.queue.consume(self.received.append) 176 | yield from asyncio.sleep(0.05) # possibly flaky 177 | 178 | 179 | class WhenPublishingAndGettingALongMessage(BoundQueueContext): 180 | def given_a_multi_frame_message_and_a_consumer(self): 181 | frame_max = self.connection.connection_info['frame_max'] 182 | body1 = "a" * (frame_max - 8) 183 | body2 = "b" * (frame_max - 8) 184 | body3 = "c" * (frame_max - 8) 185 | body = body1 + body2 + body3 186 | self.msg = asynqp.Message(body) 187 | 188 | def when_I_publish_and_get_the_message(self): 189 | self.exchange.publish(self.msg, 'routingkey') 190 | self.result = self.loop.run_until_complete(asyncio.wait_for(self.queue.get(), 0.2)) 191 | 192 | def it_should_return_my_message(self): 193 | assert self.result == self.msg 194 | 195 | 196 | class WhenPublishingAndConsumingALongMessage(BoundQueueContext): 197 | def given_a_multi_frame_message(self): 198 | frame_max = self.connection.connection_info['frame_max'] 199 | body1 = "a" * (frame_max - 8) 200 | body2 = "b" * (frame_max - 8) 201 | body3 = "c" * (frame_max - 8) 202 | body = body1 + body2 + body3 203 | self.msg = asynqp.Message(body) 204 | 205 | self.message_received = asyncio.Future() 206 | self.loop.run_until_complete(asyncio.wait_for(self.queue.consume(self.message_received.set_result), 0.2)) 207 | 208 | def when_I_publish_and_get_the_message(self): 209 | self.exchange.publish(self.msg, 'routingkey') 210 | self.loop.run_until_complete(asyncio.wait_for(self.message_received, 0.2)) 211 | 212 | def it_should_deliver_the_message_to_the_consumer(self): 213 | assert self.message_received.result() == self.msg 214 | 215 | 216 | class WhenBasicCancelIsInterleavedWithAnotherMethod(BoundQueueContext): 217 | def given_I_have_started_a_consumer(self): 218 | self.consumer = self.loop.run_until_complete(asyncio.wait_for(self.queue.consume(lambda x: None), 0.2)) 219 | 220 | def when_I_cancel_the_consumer_and_also_get_a_message(self): 221 | self.consumer.cancel() 222 | self.exception = contexts.catch(self.loop.run_until_complete, asyncio.wait_for(self.queue.get(), 0.2)) 223 | 224 | def it_should_not_throw(self): 225 | assert self.exception is None 226 | 227 | 228 | class WhenPublishingWithUnsetLoop: 229 | 230 | def given_I_have_a_queue(self): 231 | @asyncio.coroutine 232 | def set_up(): 233 | self.connection = yield from asynqp.connect(loop=self.loop) 234 | self.channel = yield from self.connection.open_channel() 235 | self.exchange = yield from self.channel.declare_exchange( 236 | '', 'direct') 237 | self.queue = yield from self.channel.declare_queue( 238 | durable=False, 239 | exclusive=True, 240 | auto_delete=True) 241 | self.loop = asyncio.get_event_loop() 242 | asyncio.set_event_loop(None) 243 | self.loop.run_until_complete(set_up()) 244 | 245 | def when_I_publish_the_message(self): 246 | message = asynqp.Message(b"Test message") 247 | self.exchange.publish(message, self.queue.name) 248 | 249 | def it_should_return_my_message(self): 250 | for retry in range(10): 251 | msg = self.loop.run_until_complete(self.queue.get(no_ack=True)) 252 | if msg is not None: 253 | break 254 | assert msg.body == b"Test message" 255 | 256 | def cleanup_loop(self): 257 | @asyncio.coroutine 258 | def tear_down(): 259 | yield from self.channel.close() 260 | yield from self.connection.close() 261 | self.loop.run_until_complete(tear_down()) 262 | asyncio.set_event_loop(self.loop) 263 | 264 | 265 | class WhenConsumingWithUnsetLoop: 266 | 267 | def given_I_published_a_message(self): 268 | @asyncio.coroutine 269 | def set_up(): 270 | self.connection = yield from asynqp.connect(loop=self.loop) 271 | self.channel = yield from self.connection.open_channel() 272 | self.exchange = yield from self.channel.declare_exchange( 273 | '', 'direct') 274 | self.queue = yield from self.channel.declare_queue( 275 | durable=False, 276 | exclusive=True, 277 | auto_delete=True) 278 | self.loop = asyncio.get_event_loop() 279 | asyncio.set_event_loop(None) 280 | self.loop.run_until_complete(set_up()) 281 | 282 | message = asynqp.Message(b"Test message") 283 | self.exchange.publish(message, self.queue.name) 284 | 285 | def when_I_consume_a_message(self): 286 | self.result = [] 287 | consumer = self.loop.run_until_complete( 288 | self.queue.consume(self.result.append, exclusive=True)) 289 | for retry in range(10): 290 | run_briefly(self.loop) 291 | if self.result: 292 | break 293 | consumer.cancel() 294 | 295 | def it_should_return_my_message(self): 296 | assert self.result, "Message not consumed" 297 | assert self.result[0].body == b"Test message" 298 | 299 | def cleanup_loop(self): 300 | @asyncio.coroutine 301 | def tear_down(): 302 | yield from self.channel.close() 303 | yield from self.connection.close() 304 | self.loop.run_until_complete(tear_down()) 305 | asyncio.set_event_loop(self.loop) 306 | 307 | 308 | class WhenISendZeroMessage(BoundQueueContext): 309 | def given_an_empty_message(self): 310 | self.message = asynqp.Message('') 311 | self.exchange.publish(self.message, 'routingkey') 312 | 313 | def when_I_start_a_consumer(self): 314 | self.message_received = asyncio.Future() 315 | self.loop.run_until_complete(asyncio.wait_for(self.start_consumer(), 0.2)) 316 | 317 | def it_should_deliver_the_message_to_the_consumer(self): 318 | assert self.message_received.result() == self.message 319 | 320 | @asyncio.coroutine 321 | def start_consumer(self): 322 | yield from self.queue.consume(self.message_received.set_result) 323 | yield from self.message_received 324 | -------------------------------------------------------------------------------- /test/message_tests.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import json 3 | import uuid 4 | from datetime import datetime 5 | import asynqp 6 | from asynqp import amqptypes 7 | from asynqp import message 8 | from asynqp import spec 9 | from asynqp import frames 10 | from .base_contexts import QueueContext 11 | 12 | 13 | class WhenGettingTheContentHeader: 14 | def given_a_message(self): 15 | self.correlation_id = str(uuid.uuid4()) 16 | self.message_id = str(uuid.uuid4()) 17 | self.timestamp = datetime.fromtimestamp(12345) 18 | self.message = asynqp.Message( 19 | 'body', 20 | content_type='application/json', 21 | content_encoding='utf-8', 22 | headers={}, 23 | delivery_mode=2, 24 | priority=5, 25 | correlation_id=self.correlation_id, 26 | reply_to='me', 27 | expiration='tomorrow', 28 | message_id=self.message_id, 29 | timestamp=self.timestamp, 30 | type='telegram', 31 | user_id='benjamin', 32 | app_id='asynqptests' 33 | ) 34 | 35 | def when_I_ask_for_the_header(self): 36 | self.payload = message.get_header_payload(self.message, 50) 37 | 38 | def it_should_return_the_frames(self): 39 | assert self.payload == message.ContentHeaderPayload(50, 4, [ 40 | 'application/json', 41 | 'utf-8', 42 | {}, 2, 5, 43 | self.correlation_id, 44 | 'me', 'tomorrow', 45 | self.message_id, 46 | self.timestamp, 47 | 'telegram', 48 | 'benjamin', 49 | 'asynqptests' 50 | ]) 51 | 52 | 53 | class WhenIPassInADictWithNoContentHeader: 54 | def when_I_make_a_message_with_a_dict_and_no_content_type(self): 55 | self.body = {'somestuff': 123} 56 | self.message = asynqp.Message(self.body) 57 | 58 | def it_should_jsonify_the_dict(self): 59 | assert json.loads(self.message.body.decode(self.message.content_encoding)) == self.body 60 | 61 | def it_should_set_the_content_type_for_me(self): 62 | assert self.message.content_type == 'application/json' 63 | 64 | 65 | class WhenIPassInADictWithAContentTypeHeader: 66 | def when_I_make_a_message_with_a_dict_and_a_content_type(self): 67 | self.body = {'somestuff': 123} 68 | self.message = asynqp.Message(self.body, content_type='application/vnd.my.mime.type') 69 | 70 | def it_should_jsonify_the_dict(self): 71 | assert json.loads(self.message.body.decode(self.message.content_encoding)) == self.body 72 | 73 | def it_should_not_set_the_content_type_for_me(self): 74 | assert self.message.content_type == 'application/vnd.my.mime.type' 75 | 76 | 77 | class WhenIPassInAStrWithNoEncoding: 78 | def when_I_make_a_message_with_a_str(self): 79 | self.body = "my_str" 80 | self.message = asynqp.Message(self.body) 81 | 82 | def it_should_encode_the_body_as_utf8_for_me(self): 83 | assert self.message.body.decode('utf-8') == self.body 84 | 85 | 86 | class WhenIPassInAStrWithAnEncoding: 87 | def when_I_make_a_message_with_a_str_and_an_encoding(self): 88 | self.body = "my_str" 89 | self.message = asynqp.Message(self.body, content_encoding='latin-1') 90 | 91 | def it_should_encode_the_body_with_the_encoding_I_wanted(self): 92 | assert self.message.body.decode('latin-1') == self.body 93 | 94 | 95 | class WhenIPassInBytes: 96 | def when_I_make_a_message_with_bytes(self): 97 | self.body = b'hello' 98 | self.message = asynqp.Message(self.body) 99 | 100 | def it_should_not_try_to_decode_the_body(self): 101 | assert self.message.body == self.body 102 | 103 | 104 | class WhenGettingFramesForAShortMessage: 105 | def given_a_message(self): 106 | self.message = asynqp.Message('body') 107 | 108 | def when_I_get_the_frames(self): 109 | self.frames = message.get_frame_payloads(self.message, 100) 110 | 111 | def it_should_return_one_frame(self): 112 | assert self.frames == [b'body'] 113 | 114 | 115 | class WhenGettingFramesForALongMessage: 116 | def given_a_message(self): 117 | self.message = asynqp.Message('much longer body') 118 | 119 | def because_the_message_is_longer_than_the_max_size(self): 120 | self.frames = message.get_frame_payloads(self.message, 5) 121 | 122 | def it_should_split_the_body_into_frames(self): 123 | assert self.frames == [b'much ', b'longe', b'r bod', b'y'] 124 | 125 | 126 | class WhenIAcknowledgeADeliveredMessage(QueueContext): 127 | def given_I_received_a_message(self): 128 | self.delivery_tag = 12487 129 | 130 | msg = asynqp.Message('body', timestamp=datetime(2014, 5, 5)) 131 | task = asyncio.ensure_future(self.queue.get()) 132 | self.tick() 133 | method = spec.BasicGetOK(self.delivery_tag, False, 'my.exchange', 'routing.key', 0) 134 | self.server.send_method(self.channel.id, method) 135 | 136 | header = message.get_header_payload(msg, spec.BasicGet.method_type[0]) 137 | self.server.send_frame(frames.ContentHeaderFrame(self.channel.id, header)) 138 | 139 | body = message.get_frame_payloads(msg, 100)[0] 140 | self.server.send_frame(frames.ContentBodyFrame(self.channel.id, body)) 141 | self.tick() 142 | 143 | self.msg = task.result() 144 | 145 | def when_I_ack_the_message(self): 146 | self.msg.ack() 147 | 148 | def it_should_send_BasicAck(self): 149 | self.server.should_have_received_method(self.channel.id, spec.BasicAck(self.delivery_tag, False)) 150 | 151 | 152 | class WhenIRejectADeliveredMessage(QueueContext): 153 | def given_I_received_a_message(self): 154 | self.delivery_tag = 12487 155 | 156 | msg = asynqp.Message('body', timestamp=datetime(2014, 5, 5)) 157 | task = asyncio.ensure_future(self.queue.get()) 158 | self.tick() 159 | method = spec.BasicGetOK(self.delivery_tag, False, 'my.exchange', 'routing.key', 0) 160 | self.server.send_method(self.channel.id, method) 161 | 162 | header = message.get_header_payload(msg, spec.BasicGet.method_type[0]) 163 | self.server.send_frame(frames.ContentHeaderFrame(self.channel.id, header)) 164 | 165 | body = message.get_frame_payloads(msg, 100)[0] 166 | self.server.send_frame(frames.ContentBodyFrame(self.channel.id, body)) 167 | self.tick() 168 | 169 | self.msg = task.result() 170 | 171 | def when_I_reject_the_message(self): 172 | self.msg.reject(requeue=True) 173 | 174 | def it_should_send_BasicReject(self): 175 | self.server.should_have_received_method(self.channel.id, spec.BasicReject(self.delivery_tag, True)) 176 | 177 | 178 | class WhenIGetJSONFromADeliveredMessage: 179 | def given_a_message(self): 180 | self.body = {'x': 123, 'y': ['json', 15, 'c00l']} 181 | self.msg = asynqp.Message(self.body, timestamp=datetime(2014, 5, 5)) 182 | 183 | def when_I_get_the_json(self): 184 | self.result = self.msg.json() 185 | 186 | def it_should_give_me_the_body(self): 187 | assert self.result == self.body 188 | 189 | 190 | class WhenSettingAProperty: 191 | def given_a_message(self): 192 | self.msg = asynqp.Message("abc") 193 | 194 | def when_I_set_a_property(self): 195 | self.msg.content_type = "application/json" 196 | 197 | def it_should_cast_it_to_the_correct_amqp_type(self): 198 | assert isinstance(self.msg.content_type, amqptypes.ShortStr) 199 | assert self.msg.content_type == amqptypes.ShortStr("application/json") 200 | 201 | 202 | class WhenSettingAPropertyAndIHaveAlreadyCastItMyself: 203 | def given_a_message(self): 204 | self.msg = asynqp.Message("abc") 205 | self.val = amqptypes.ShortStr("application/json") 206 | 207 | def when_I_set_a_property(self): 208 | self.msg.content_type = self.val 209 | 210 | def it_should_not_attempt_to_cast_it(self): 211 | assert self.msg.content_type is self.val 212 | 213 | 214 | class WhenSettingAnAttributeThatIsNotAProperty: 215 | def given_a_message(self): 216 | self.msg = asynqp.Message("abc") 217 | 218 | def when_I_set_a_property(self): 219 | self.msg.foo = 123 220 | 221 | def it_should_not_attempt_to_cast_it(self): 222 | assert self.msg.foo == 123 223 | 224 | 225 | class WhenIReadAContentHeaderWithoutAllProperties: 226 | 227 | def given_headers_wit_only_content_encoding(self): 228 | self.data = ( 229 | b'\x00<\x00\x00\x00\x00\x00\x00\x00\x00\x00\x08p\x00' 230 | b'\x05utf-8\x00\x00\x00\x00\x01') 231 | 232 | def when_I_read_properties(self): 233 | self.payload = message.ContentHeaderPayload.read(self.data) 234 | 235 | def it_should_have_only_content_encoding(self): 236 | assert self.payload == message.ContentHeaderPayload( 237 | 60, 8, [None, 'utf-8', {}, 1, None, None, 238 | None, None, None, None, None, None, None]) 239 | -------------------------------------------------------------------------------- /test/method_tests.py: -------------------------------------------------------------------------------- 1 | import asynqp 2 | from asynqp import spec 3 | from asynqp import frames 4 | from asynqp import amqptypes 5 | from asynqp import message 6 | from .base_contexts import ProtocolContext, MockDispatcherContext 7 | 8 | 9 | class WhenConnectionStartArrives(MockDispatcherContext): 10 | def given_a_connection_start_method_I_copied_from_the_rabbitmq_server(self): 11 | self.raw = (b"\x01\x00\x00\x00\x00\x01\x50" # type, channel, size 12 | b"\x00\x0A\x00\x0A\x00\t\x00\x00\x01" 13 | b"%\x0ccapabilitiesF\x00\x00\x00X\x12publisher_confirmst\x01\x1aexchange_exchange_bindings" 14 | b"t\x01\nbasic.nackt\x01\x16consumer_cancel_notifyt\x01\tcopyrightS\x00\x00\x00'Copyright " 15 | b"(C) 2007-2013 GoPivotal, Inc.\x0binformationS\x00\x00\x005Licensed under the MPL. " 16 | b" See http://www.rabbitmq.com/\x08platformS\x00\x00\x00\nErlang/OTP\x07productS\x00\x00\x00\x08" 17 | b"RabbitMQ\x07versionS\x00\x00\x00\x053.1.5" 18 | b"\x00\x00\x00\x0eAMQPLAIN PLAIN\x00\x00\x00\x0Ben_US en_GB\xCE") 19 | 20 | expected_method = spec.ConnectionStart(0, 9, { 21 | 'capabilities': {'publisher_confirms': True, 22 | 'exchange_exchange_bindings': True, 23 | 'basic.nack': True, 24 | 'consumer_cancel_notify': True}, 25 | 'copyright': 'Copyright (C) 2007-2013 GoPivotal, Inc.', 26 | 'information': 'Licensed under the MPL. See http://www.rabbitmq.com/', 27 | 'platform': 'Erlang/OTP', 28 | 'product': 'RabbitMQ', 29 | 'version': '3.1.5' 30 | }, 'AMQPLAIN PLAIN', 'en_US en_GB') 31 | self.expected_frame = asynqp.frames.MethodFrame(0, expected_method) 32 | 33 | def when_the_frame_arrives(self): 34 | self.protocol.data_received(self.raw) 35 | self.tick() 36 | 37 | def it_should_dispatch_a_correctly_deserialised_ConnectionStart_method(self): 38 | self.dispatcher.dispatch.assert_called_once_with(self.expected_frame) 39 | 40 | 41 | class WhenSendingConnectionStartOK(ProtocolContext): 42 | def given_a_method_to_send(self): 43 | method = spec.ConnectionStartOK({'somecrap': 'aboutme'}, 'AMQPLAIN', {'auth': 'info'}, 'en_US') 44 | self.frame = asynqp.frames.MethodFrame(0, method) 45 | 46 | def when_we_send_the_method(self): 47 | self.protocol.send_frame(self.frame) 48 | 49 | def it_should_send_the_correct_bytestring(self): 50 | expected_bytes = (b'\x01\x00\x00\x00\x00\x00>\x00\n\x00\x0b\x00\x00\x00\x15\x08somecrapS' 51 | b'\x00\x00\x00\x07aboutme\x08AMQPLAIN\x00\x00\x00\x0e\x04' 52 | b'authS\x00\x00\x00\x04info\x05en_US\xce') 53 | self.transport.write.assert_called_once_with(expected_bytes) 54 | 55 | 56 | class WhenConnectionTuneArrives(MockDispatcherContext): 57 | def given_a_connection_tune_method_I_copied_from_the_rabbitmq_server(self): 58 | self.raw = b'\x01\x00\x00\x00\x00\x00\x0C\x00\x0A\x00\x1E\x00\x00\x00\x02\x00\x00\x02\x58\xCE' 59 | expected_method = spec.ConnectionTune(0, 131072, 600) 60 | self.expected_frame = asynqp.frames.MethodFrame(0, expected_method) 61 | 62 | def when_the_frame_arrives(self): 63 | self.protocol.data_received(self.raw) 64 | self.tick() 65 | 66 | def it_should_dispatch_a_correctly_deserialised_ConnectionTune_method(self): 67 | self.dispatcher.dispatch.assert_called_once_with(self.expected_frame) 68 | 69 | 70 | class WhenSendingConnectionTuneOK(ProtocolContext): 71 | def given_a_method_to_send(self): 72 | method = spec.ConnectionTuneOK(1024, 131072, 10) 73 | self.frame = asynqp.frames.MethodFrame(0, method) 74 | 75 | def when_I_send_the_method(self): 76 | self.protocol.send_frame(self.frame) 77 | 78 | def it_should_write_the_correct_bytestring(self): 79 | self.transport.write.assert_called_once_with(b'\x01\x00\x00\x00\x00\x00\x0C\x00\n\x00\x1F\x04\x00\x00\x02\x00\x00\x00\x0A\xCE') 80 | 81 | 82 | class WhenSendingConnectionOpen(ProtocolContext): 83 | def given_a_method_to_send(self): 84 | method = spec.ConnectionOpen('/', '', False) 85 | self.frame = asynqp.frames.MethodFrame(0, method) 86 | 87 | def when_I_send_the_method(self): 88 | self.protocol.send_frame(self.frame) 89 | 90 | def it_should_write_the_correct_bytestring(self): 91 | self.transport.write.assert_called_once_with(b'\x01\x00\x00\x00\x00\x00\x08\x00\x0A\x00\x28\x01/\x00\x00\xCE') 92 | 93 | 94 | class WhenSendingQueueDeclare(ProtocolContext): 95 | def given_a_method_to_send(self): 96 | self.method = spec.QueueDeclare(0, 'a', False, False, False, True, False, {}) 97 | 98 | def when_I_send_the_method(self): 99 | self.protocol.send_method(1, self.method) 100 | 101 | def it_should_write_the_correct_bytestring(self): 102 | self.transport.write.assert_called_once_with(b'\x01\x00\x01\x00\x00\x00\x0D\x00\x32\x00\x0A\x00\x00\x01a\x08\x00\x00\x00\x00\xCE') 103 | 104 | 105 | class WhenSendingContentHeader(ProtocolContext): 106 | def given_a_content_header_frame(self): 107 | payload = message.ContentHeaderPayload(50, 100, [amqptypes.Octet(3), None, amqptypes.Table({'some': 'value'})]) 108 | self.frame = frames.ContentHeaderFrame(1, payload) 109 | 110 | def when_I_send_the_frame(self): 111 | self.protocol.send_frame(self.frame) 112 | 113 | def it_should_write_the_correct_bytestring(self): 114 | self.transport.write.assert_called_once_with( 115 | b'\x02\x00\x01\x00\x00\x00\x22' # regular frame header 116 | b'\x00\x32\x00\x00' # class id 50; weight is always 0 117 | b'\x00\x00\x00\x00\x00\x00\x00\x64' # body length 100 118 | b'\xA0\x00' # property_flags 0b1010000000000000 119 | b'\x03\x00\x00\x00\x0F\x04someS\x00\x00\x00\x05value' # property list 120 | b'\xCE') 121 | 122 | 123 | class WhenAContentHeaderArrives(MockDispatcherContext): 124 | def given_a_content_header_frame(self): 125 | self.raw = ( 126 | b'\x02\x00\x01\x00\x00\x00\x25' # regular frame header 127 | b'\x00\x32\x00\x00' # class id 50; weight is always 0 128 | b'\x00\x00\x00\x00\x00\x00\x00\x64' # body length 100 129 | b'\xA0\x00' # property_flags 0b1010000000000000 130 | b'\x03yes\x00\x00\x00\x0F\x04someS\x00\x00\x00\x05value' # property list 131 | b'\xCE') 132 | 133 | expected_payload = message.ContentHeaderPayload(50, 100, [ 134 | amqptypes.ShortStr('yes'), None, amqptypes.Table({'some': 'value'}), 135 | None, None, None, None, None, None, None, None, None, None]) 136 | self.expected_frame = frames.ContentHeaderFrame(1, expected_payload) 137 | 138 | def when_the_frame_arrives(self): 139 | self.protocol.data_received(self.raw) 140 | self.tick() 141 | 142 | def it_should_deserialise_it_to_a_ContentHeaderFrame(self): 143 | self.dispatcher.dispatch.assert_called_once_with(self.expected_frame) 144 | 145 | 146 | class WhenBasicGetOKArrives(MockDispatcherContext): 147 | def given_a_frame(self): 148 | self.raw = ( 149 | b'\x01\x00\x01\x00\x00\x00\x22' # type, channel, size 150 | b'\x00\x3C\x00\x47' # 60, 71 151 | b'\x00\x00\x00\x00\x00\x00\x00\x01' # delivery tag 152 | b'\x00' # not redelivered 153 | b'\x08exchange' 154 | b'\x07routing' 155 | b'\x00\x00\x00\x00' # no more messages 156 | b'\xCE') 157 | 158 | expected_method = spec.BasicGetOK(1, False, 'exchange', 'routing', 0) 159 | self.expected_frame = frames.MethodFrame(1, expected_method) 160 | 161 | def when_the_frame_arrives(self): 162 | self.protocol.data_received(self.raw) 163 | self.tick() 164 | 165 | def it_should_deserialise_it_to_the_correct_method(self): 166 | self.dispatcher.dispatch.assert_called_once_with(self.expected_frame) 167 | -------------------------------------------------------------------------------- /test/protocol_tests.py: -------------------------------------------------------------------------------- 1 | from unittest import mock 2 | import contexts 3 | import asynqp 4 | from asynqp import spec 5 | from asynqp import protocol 6 | from asynqp.exceptions import ConnectionLostError 7 | from .base_contexts import MockDispatcherContext, MockServerContext 8 | from .util import testing_exception_handler 9 | 10 | 11 | class WhenInitiatingProceedings(MockServerContext): 12 | def when_i_send_the_protocol_header(self): 13 | self.protocol.send_protocol_header() 14 | 15 | def it_should_write_the_correct_header(self): 16 | self.server.should_have_received_bytes(b'AMQP\x00\x00\x09\x01') 17 | 18 | 19 | class WhenAWholeFrameArrives(MockDispatcherContext): 20 | def establish_the_frame(self): 21 | self.raw = b'\x01\x00\x00\x00\x00\x00\x05\x00\x0A\x00\x29\x00\xCE' 22 | method = spec.ConnectionOpenOK('') 23 | self.expected_frame = asynqp.frames.MethodFrame(0, method) 24 | self.protocol.heartbeat_monitor = mock.Mock(spec=protocol.HeartbeatMonitor) 25 | 26 | def because_the_whole_frame_arrives(self): 27 | self.protocol.data_received(self.raw) 28 | self.tick() 29 | 30 | def it_should_dispatch_the_method(self): 31 | self.dispatcher.dispatch.assert_called_once_with(self.expected_frame) 32 | 33 | def it_should_reset_the_heartbeat_timeout(self): 34 | assert self.protocol.heartbeat_monitor.heartbeat_received.called 35 | 36 | 37 | class WhenAFrameDoesNotEndInFrameEnd(MockServerContext): 38 | def establish_the_bad_frame(self): 39 | self.raw = b'\x01\x00\x00\x00\x00\x00\x05\x00\x0A\x00\x29\x00\xCD' 40 | 41 | def because_the_bad_frame_arrives(self): 42 | self.exception = contexts.catch(self.server.send_bytes, self.raw) 43 | 44 | def it_MUST_close_the_connection(self): 45 | assert self.transport.closed 46 | 47 | def it_should_raise_an_exception(self): 48 | assert isinstance(self.exception, asynqp.AMQPError) 49 | 50 | 51 | class WhenHalfAFrameArrives(MockDispatcherContext): 52 | @classmethod 53 | def examples_of_incomplete_frames(cls): 54 | yield b'\x01\x00\x00\x00\x00\x00\x05\x00\x0A\x00' # cut off half way through the payload 55 | yield b'\x01\x00\x00\x00\x00\x00\x05\x00\x0A\x00\x29\x00' # cut off right before the frame end byte 56 | yield b'\x01\x00' # cut off before the end of the header 57 | 58 | def because_some_of_a_frame_arrives(self, raw): 59 | self.protocol.data_received(raw) 60 | self.tick() 61 | 62 | def it_should_not_dispatch_the_method_yet(self): 63 | assert not self.dispatcher.dispatch.called 64 | 65 | 66 | class WhenAFrameArrivesInTwoParts(MockDispatcherContext): 67 | @classmethod 68 | def examples_of_broken_up_frames(cls): 69 | yield b'\x01\x00\x00\x00\x00\x00\x05\x00\x0A', b'\x00\x29\x00\xCE' # cut off half way through the payload 70 | yield b'\x01\x00\x00\x00\x00\x00\x05\x00\x0A\x00\x29\x00', b'\xCE' # cut off right before the frame end byte 71 | yield b'\x01\x00', b'\x00\x00\x00\x00\x05\x00\x0A\x00\x29\x00\xCE' # cut off before the end of the header 72 | 73 | def establish_the_frame(self): 74 | method = spec.ConnectionOpenOK('') 75 | self.expected_frame = asynqp.frames.MethodFrame(0, method) 76 | 77 | def because_the_whole_frame_eventually_arrives(self, raw1, raw2): 78 | self.protocol.data_received(raw1) 79 | self.tick() 80 | self.protocol.data_received(raw2) 81 | self.tick() 82 | 83 | def it_should_dispatch_the_method(self): 84 | self.dispatcher.dispatch.assert_called_once_with(self.expected_frame) 85 | 86 | 87 | class WhenMoreThanAWholeFrameArrives(MockDispatcherContext): 88 | def establish_the_frame(self): 89 | self.raw = b'\x01\x00\x00\x00\x00\x00\x05\x00\x0A\x00\x29\x00\xCE\x01\x00\x00\x00\x00\x00\x05\x00\x0A' 90 | method = spec.ConnectionOpenOK('') 91 | self.expected_frame = asynqp.frames.MethodFrame(0, method) 92 | 93 | def because_more_than_a_whole_frame_arrives(self): 94 | self.protocol.data_received(self.raw) 95 | self.tick() 96 | 97 | def it_should_dispatch_the_method_once(self): 98 | self.dispatcher.dispatch.assert_called_once_with(self.expected_frame) 99 | 100 | 101 | class WhenTwoFramesArrive(MockDispatcherContext): 102 | def establish_the_frame(self): 103 | self.raw = b'\x01\x00\x00\x00\x00\x00\x05\x00\x0A\x00\x29\x00\xCE\x01\x00\x00\x00\x00\x00\x05\x00\x0A\x00\x29\x00\xCE' 104 | method = spec.ConnectionOpenOK('') 105 | self.expected_frame = asynqp.frames.MethodFrame(0, method) 106 | 107 | def because_more_than_a_whole_frame_arrives(self): 108 | self.protocol.data_received(self.raw) 109 | self.tick() 110 | 111 | def it_should_dispatch_the_method_twice(self): 112 | self.dispatcher.dispatch.assert_has_calls([mock.call(self.expected_frame), mock.call(self.expected_frame)]) 113 | 114 | 115 | class WhenTwoFramesArrivePiecemeal(MockDispatcherContext): 116 | @classmethod 117 | def examples_of_broken_up_frames(cls): 118 | yield b'\x01\x00\x00\x00\x00\x00\x05\x00\x0A\x00\x29\x00\xCE', b'\x01\x00\x00\x00\x00\x00\x05\x00\x0A\x00\x29\x00\xCE' 119 | yield b'\x01\x00\x00\x00\x00\x00\x05\x00\x0A\x00\x29\x00\xCE\x01\x00\x00\x00\x00\x00\x05\x00\x0A\x00', b'\x29\x00\xCE' 120 | yield b'\x01\x00\x00\x00\x00\x00\x05\x00\x0A\x00\x29\x00\xCE\x01\x00', b'\x00\x00\x00\x00\x05\x00\x0A\x00\x29\x00\xCE' 121 | yield b'\x01', b'\x00\x00\x00\x00\x00\x05\x00\x0A\x00\x29\x00\xCE\x01\x00\x00\x00\x00\x00\x05\x00\x0A\x00\x29\x00\xCE' 122 | yield b'\x01', b'\x00\x00\x00\x00\x00\x05\x00', b'\x0A\x00\x29\x00', b'\xCE\x01\x00\x00\x00\x00\x00\x05\x00', b'\x0A\x00\x29\x00\xCE', b'' 123 | 124 | def establish_what_we_expected(self): 125 | method = spec.ConnectionOpenOK('') 126 | self.expected_frame = asynqp.frames.MethodFrame(0, method) 127 | 128 | def because_two_frames_arrive_in_bits(self, fragments): 129 | for fragment in fragments: 130 | self.protocol.data_received(fragment) 131 | self.tick() 132 | 133 | def it_should_dispatch_the_method_twice(self): 134 | self.dispatcher.dispatch.assert_has_calls([mock.call(self.expected_frame), mock.call(self.expected_frame)]) 135 | 136 | 137 | class WhenTheConnectionIsLost(MockServerContext): 138 | def given_an_exception_handler(self): 139 | self.connection_lost_error_raised = False 140 | self.loop.set_exception_handler(self.exception_handler) 141 | 142 | def exception_handler(self, loop, context): 143 | exception = context.get('exception') 144 | if type(exception) is ConnectionLostError: 145 | self.connection_lost_error_raised = True 146 | else: 147 | self.loop.default_exception_handler(context) 148 | 149 | def when_the_connection_is_closed(self): 150 | self.loop.call_soon(self.protocol.connection_lost, Exception) 151 | self.tick() 152 | 153 | def it_should_raise_a_connection_lost_error(self): 154 | assert self.connection_lost_error_raised is True 155 | 156 | def cleanup(self): 157 | self.loop.set_exception_handler(testing_exception_handler) 158 | -------------------------------------------------------------------------------- /test/serialisation_tests.py: -------------------------------------------------------------------------------- 1 | from io import BytesIO 2 | import contexts 3 | from datetime import datetime, timezone, timedelta 4 | from asynqp import serialisation, AMQPError 5 | 6 | 7 | class WhenParsingATable: 8 | @classmethod 9 | def examples_of_tables(self): 10 | yield b"\x00\x00\x00\x00", {} 11 | yield b"\x00\x00\x00\x0E\x04key1t\x00\x04key2t\x01", {'key1': False, 'key2': True} 12 | yield b"\x00\x00\x00\x06\x03keyb\xff", {'key': -1} 13 | yield b"\x00\x00\x00\x07\x03keys\xff\xff", {'key': -1} 14 | yield b"\x00\x00\x00\x09\x03keyI\xff\xff\xff\xff", {'key': -1} 15 | yield b"\x00\x00\x00\x0C\x03keyl\xff\xff\xff\xff\xff\xff\xff\xff", {'key': -1} 16 | yield b"\x00\x00\x00\x05\x03keyV", {'key': None} 17 | yield b"\x00\x00\x00\x05\x03keyA\x00\x00\x00\x00", {'key': []} 18 | yield b"\x00\x00\x00\x0C\x03keyx\x00\x00\x00\x04\x00\x01\x02\x03", {'key': b"\x00\x01\x02\x03"} 19 | yield b"\x00\x00\x00\x0E\x03keyS\x00\x00\x00\x05hello", {'key': 'hello'} 20 | yield b"\x00\x00\x00\x16\x03keyF\x00\x00\x00\x0D\x0Aanotherkeyt\x00", {'key': {'anotherkey': False}} 21 | 22 | def because_we_read_the_table(self, bytes, expected): 23 | self.result = serialisation.read_table(BytesIO(bytes)) 24 | 25 | def it_should_return_the_table(self, bytes, expected): 26 | assert self.result == expected 27 | 28 | 29 | class WhenPackingAndUnpackingATable: 30 | @classmethod 31 | def examples_of_tables(cls): 32 | yield {'a': (1 << 16), 'b': (1 << 15)} 33 | yield {'c': 65535, 'd': -65535} 34 | yield {'e': -65536} 35 | yield {'f': -0x7FFFFFFF, 'g': 0x7FFFFFFF} 36 | yield {'x': b"\x01\x02"} 37 | yield {'x': []} 38 | yield {'l': None} 39 | yield {'l': 1.0} 40 | 41 | def because_we_pack_and_unpack_the_table(self, table): 42 | self.result = serialisation.read_table(BytesIO(serialisation.pack_table(table))) 43 | 44 | def it_should_return_the_table(self, table): 45 | assert self.result == table 46 | 47 | 48 | class WhenParsingAHugeTable: 49 | @classmethod 50 | def examples_of_huge_tables(self): 51 | # That would be -1 for an signed int 52 | yield b"\xFF\xFF\xFF\xFF\xFF" 53 | 54 | def because_we_read_the_table(self, bytes): 55 | # We expect the serialisation to read over the bounds, but only if it is unsigned 56 | self.exception = contexts.catch(serialisation.read_table, BytesIO(bytes)) 57 | 58 | def it_should_throw_an_AMQPError(self): 59 | assert isinstance(self.exception, AMQPError) 60 | 61 | 62 | class WhenParsingABadTable: 63 | @classmethod 64 | def examples_of_bad_tables(self): 65 | yield b"\x00\x00\x00\x0F\x04key1t\x00\x04key2t\x01" # length too long 66 | yield b"\x00\x00\x00\x06\x04key1X" # bad value type code 67 | 68 | def because_we_read_the_table(self, bytes): 69 | self.exception = contexts.catch(serialisation.read_table, BytesIO(bytes)) 70 | 71 | def it_should_throw_an_AMQPError(self): 72 | assert isinstance(self.exception, AMQPError) 73 | 74 | 75 | class WhenParsingAnArray: 76 | @classmethod 77 | def examples_of_arrays(self): 78 | yield b"\x00\x00\x00\x00", [] 79 | yield b"\x00\x00\x00\x04t\x00t\x01", [False, True] 80 | yield b"\x00\x00\x00\x02b\xff", [-1] 81 | yield b"\x00\x00\x00\x03s\xff\xff", [-1] 82 | yield b"\x00\x00\x00\x05I\xff\xff\xff\xff", [-1] 83 | yield b"\x00\x00\x00\x09l\xff\xff\xff\xff\xff\xff\xff\xff", [-1] 84 | yield b"\x00\x00\x00\x01V", [None] 85 | yield b"\x00\x00\x00\x05A\x00\x00\x00\x00", [[]] 86 | yield b"\x00\x00\x00\x09x\x00\x00\x00\x04\x00\x01\x02\x03", [b"\x00\x01\x02\x03"] 87 | yield b"\x00\x00\x00\x0AS\x00\x00\x00\x05hello", ['hello'] 88 | yield b"\x00\x00\x00\x12F\x00\x00\x00\x0D\x0Aanotherkeyt\x00", [{'anotherkey': False}] 89 | 90 | def because_we_read_the_array(self, buffer, expected): 91 | self.result = serialisation.read_array(BytesIO(buffer)) 92 | 93 | def it_should_return_the_array(self, buffer, expected): 94 | assert self.result == expected 95 | 96 | 97 | class WhenParsingALongString: 98 | def because_we_read_a_long_string(self): 99 | self.result = serialisation.read_long_string(BytesIO(b"\x00\x00\x00\x05hello")) 100 | 101 | def it_should_return_the_string(self): 102 | assert self.result == 'hello' 103 | 104 | 105 | class WhenParsingABadLongString: 106 | def because_we_read_a_bad_long_string(self): 107 | self.exception = contexts.catch(serialisation.read_long_string, BytesIO(b"\x00\x00\x00\x10hello")) # length too long 108 | 109 | def it_should_throw_an_AMQPError(self): 110 | assert isinstance(self.exception, AMQPError) 111 | 112 | 113 | class WhenPackingBools: 114 | @classmethod 115 | def examples_of_bools(self): 116 | yield [False], b"\x00" 117 | yield [True], b"\x01" 118 | yield [True, False, True], b'\x05' 119 | yield [True, False], b'\x01' 120 | yield [True, True, True, True, True, True, True, True], b'\xFF' 121 | 122 | def because_I_pack_them(self, bools, expected): 123 | self.result = serialisation.pack_bools(*bools) 124 | 125 | def it_should_pack_them_correctly(self, bools, expected): 126 | assert self.result == expected 127 | 128 | 129 | class WhenParsingATimestamp: 130 | @classmethod 131 | def examples_of_timestamps(cls): 132 | # The timestamp should be zero relative to epoch 133 | yield b'\x00\x00\x00\x00\x00\x00\x00\x00', datetime(1970, 1, 1, tzinfo=timezone.utc) 134 | # And independent of the timezone 135 | yield b'\x00\x00\x00\x00\x00\x00\x00\x00', datetime(1970, 1, 1, 1, 30, tzinfo=timezone(timedelta(hours=1, minutes=30))) 136 | # And and increase by a millisecond 137 | yield b'\x00\x00\x00\x00\x00\x00\x00\x01', datetime(1970, 1, 1, microsecond=1000, tzinfo=timezone.utc) 138 | # Cannot validate, that it is unsigned, as it is 139 | # yield b'\x80\x00\x00\x00\x00\x00\x00\x00', datetime(1970, 1, 1, microsecond=1000, tzinfo=timezone.utc) 140 | 141 | def because_we_read_a_timestamp(self, binary, _): 142 | self.result = serialisation.read_timestamp(BytesIO(binary)) 143 | 144 | def it_should_read_it_correctly(self, _, expected): 145 | assert self.result == expected 146 | 147 | 148 | class WhenWritingATimestamp: 149 | @classmethod 150 | def examples_of_timestamps(cls): 151 | for encoded, timeval in WhenParsingATimestamp.examples_of_timestamps(): 152 | yield timeval, encoded 153 | 154 | def because_I_pack_them(self, timeval, _): 155 | self.result = serialisation.pack_timestamp(timeval) 156 | 157 | def it_should_pack_them_correctly(self, _, expected): 158 | assert self.result == expected 159 | 160 | 161 | class WhenPackingAndUnpackingATimestamp: 162 | # Ensure, we do not add some offset by the serialisation process 163 | @classmethod 164 | def examples_of_timestamps(cls): 165 | yield datetime(1970, 1, 1, tzinfo=timezone.utc) 166 | yield datetime(1979, 1, 1, tzinfo=timezone(timedelta(hours=1, minutes=30))) 167 | 168 | def because_I_pack_them(self, timeval): 169 | packed = serialisation.pack_timestamp(timeval) 170 | unpacked = serialisation.read_timestamp(BytesIO(packed)) 171 | self.result = unpacked - timeval 172 | 173 | def it_should_pack_them_correctly(self, timeval): 174 | assert abs(self.result.total_seconds()) < 1.0e-9 175 | -------------------------------------------------------------------------------- /test/util.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from contextlib import contextmanager 3 | from unittest import mock 4 | import asynqp.frames 5 | from asynqp import protocol 6 | from asynqp.exceptions import ConnectionLostError 7 | 8 | 9 | def testing_exception_handler(loop, context): 10 | ''' 11 | Hides the expected ``ConnectionClosedErrors`` and 12 | ``ConnectionLostErros`` during tests 13 | ''' 14 | exception = context.get('exception') 15 | if exception and isinstance(exception, ConnectionLostError): 16 | pass 17 | else: 18 | loop.default_exception_handler(context) 19 | 20 | 21 | class MockServer(object): 22 | def __init__(self, protocol, tick): 23 | self.protocol = protocol 24 | self.tick = tick 25 | self.data = [] 26 | 27 | def send_bytes(self, b): 28 | self.protocol.data_received(b) 29 | self.tick() 30 | 31 | def send_frame(self, frame): 32 | self.send_bytes(frame.serialise()) 33 | 34 | def send_method(self, channel_number, method): 35 | frame = asynqp.frames.MethodFrame(channel_number, method) 36 | self.send_frame(frame) 37 | 38 | def reset(self): 39 | self.data = [] 40 | 41 | def should_have_received_frames(self, expected_frames, any_order=False): 42 | results = (read(x) for x in self.data) 43 | frames = [x for x in results if x is not None] 44 | if any_order: 45 | for frame in expected_frames: 46 | assert frame in frames, "{} should have been in {}".format(frame, frames) 47 | else: 48 | expected_frames = tuple(expected_frames) 49 | assert expected_frames in windows(frames, len(expected_frames)), "{} should have been in {}".format(expected_frames, frames) 50 | 51 | def should_have_received_methods(self, channel_number, methods, any_order=False): 52 | frames = (asynqp.frames.MethodFrame(channel_number, m) for m in methods) 53 | self.should_have_received_frames(frames, any_order) 54 | 55 | def should_have_received_frame(self, expected_frame): 56 | self.should_have_received_frames([expected_frame], any_order=True) 57 | 58 | def should_have_received_method(self, channel_number, method): 59 | self.should_have_received_methods(channel_number, [method], any_order=True) 60 | 61 | def should_not_have_received_method(self, channel_number, method): 62 | results = (read(x) for x in self.data) 63 | frames = [x for x in results if x is not None] 64 | 65 | frame = asynqp.frames.MethodFrame(channel_number, method) 66 | assert frame not in frames, "{} should not have been in {}".format(frame, frames) 67 | 68 | def should_not_have_received_any(self): 69 | assert not self.data, "{} should have been empty".format(self.data) 70 | 71 | def should_have_received_bytes(self, b): 72 | assert b in self.data 73 | 74 | 75 | def read(data): 76 | if data == b'AMQP\x00\x00\x09\x01': 77 | return 78 | 79 | result = protocol.FrameReader().read_frame(data) 80 | if result is None: 81 | return 82 | return result[0] 83 | 84 | 85 | def windows(l, size): 86 | return zip(*[l[x:] for x in range(size)]) 87 | 88 | 89 | class FakeTransport(object): 90 | def __init__(self, server): 91 | self.server = server 92 | self.closed = False 93 | 94 | def write(self, data): 95 | self.server.data.append(data) 96 | 97 | def close(self): 98 | self.closed = True 99 | 100 | 101 | def any(cls): 102 | class _any(cls): 103 | def __init__(self): 104 | pass 105 | 106 | def __eq__(self, other): 107 | return isinstance(other, cls) 108 | return _any() 109 | 110 | 111 | @contextmanager 112 | def silence_expected_destroy_pending_log(expected_coro_name=''): 113 | real_async = asyncio.ensure_future 114 | 115 | def async_wrapper(*args, **kwargs): 116 | t = real_async(*args, **kwargs) 117 | if expected_coro_name in repr(t): 118 | t._log_destroy_pending = False 119 | return t 120 | 121 | with mock.patch.object(asyncio, 'ensure_future', async_wrapper): 122 | yield 123 | 124 | 125 | def run_briefly(loop): 126 | @asyncio.coroutine 127 | def once(): 128 | pass 129 | gen = once() 130 | t = loop.create_task(gen) 131 | # Don't log a warning if the task is not done after run_until_complete(). 132 | # It occurs if the loop is stopped or if a task raises a BaseException. 133 | t._log_destroy_pending = False 134 | try: 135 | loop.run_until_complete(t) 136 | finally: 137 | gen.close() 138 | --------------------------------------------------------------------------------