├── .codeclimate.yml ├── .gitignore ├── .travis.yml ├── LICENSE ├── MANIFEST.in ├── README.rst ├── docs ├── Makefile ├── api_consumer.rst ├── api_data.rst ├── api_smart_consumer.rst ├── api_testing.rst ├── cli.rst ├── conf.py ├── configuration.rst ├── consumer.rst ├── consumer_howto.rst ├── example_config.rst ├── history.rst └── index.rst ├── example.yaml ├── examples.py ├── init.d └── rejected ├── rejected ├── __init__.py ├── consumer.py ├── controller.py ├── data.py ├── log.py ├── mcp.py ├── mixins.py ├── process.py ├── state.py ├── statsd.py ├── testing.py └── utils.py ├── requires ├── development.txt ├── installation.txt ├── rtd.txt └── testing.txt ├── setup.cfg ├── setup.py ├── tests ├── __init__.py ├── mocks.py ├── test_consumer.py ├── test_data.py ├── test_mcp.py ├── test_percentile.py ├── test_process.py ├── test_state.py ├── test_statsd.py ├── test_testing.py └── test_utils.py └── utils └── test_generator.py /.codeclimate.yml: -------------------------------------------------------------------------------- 1 | languages: 2 | Python: true 3 | exclude_paths: 4 | - docs/* 5 | - tests/* 6 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.pyc 2 | *.egg-info 3 | .coverage 4 | .idea 5 | .DS_Store 6 | build 7 | dist 8 | tests/cover 9 | cover 10 | atlassian-ide-plugin.xml 11 | cli.py 12 | docs/_build 13 | coverage.xml 14 | reports 15 | env* 16 | venv 17 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | sudo: false 2 | language: python 3 | dist: xenial 4 | python: 5 | - 2.7 6 | - 3.5 7 | - 3.6 8 | - 3.7 9 | - 3.8 10 | - 3.9 11 | install: 12 | - pip install -r requires/development.txt 13 | - pip install -r requires/testing.txt 14 | - python setup.py develop 15 | script: 16 | - flake8 17 | - nosetests 18 | after_success: 19 | - codecov 20 | deploy: 21 | distributions: sdist bdist_wheel 22 | provider: pypi 23 | user: crad 24 | on: 25 | python: 3.7 26 | tags: true 27 | all_branches: true 28 | password: 29 | secure: "QNndN99rD5boB/Sg3I0CzjkFUF1JmGrsQKZ7ONiA+obUWQDqOmggUoPEs1zN8xIExDcM4tPhlCQX0QiYJrKdLQwWiClvKo1wpYUxVm0s/W8SqvhV3IK9VxhMrbZUkmksO48TH4YKav06rEkVxke9g3U92XUJZ6cRAnYUKrjMYaQ=" 30 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2009-2017, Gavin M. Roy 2 | All rights reserved. 3 | 4 | Redistribution and use in source and binary forms, with or without modification, 5 | are permitted provided that the following conditions are met: 6 | 7 | * Redistributions of source code must retain the above copyright notice, this 8 | list of conditions and the following disclaimer. 9 | * Redistributions in binary form must reproduce the above copyright notice, 10 | this list of conditions and the following disclaimer in the documentation 11 | and/or other materials provided with the distribution. 12 | * Neither the name of the rejected project nor the names of its 13 | contributors may be used to endorse or promote products derived from this 14 | software without specific prior written permission. 15 | 16 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND 17 | ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED 18 | WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. 19 | IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, 20 | INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, 21 | BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 22 | DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF 23 | LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE 24 | OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF 25 | ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 26 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include LICENSE 2 | include README.rst 3 | include requires/testing.txt 4 | include requires/installation.txt 5 | include requires/development.txt 6 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | Rejected 2 | ======== 3 | 4 | Rejected is a AMQP consumer daemon and message processing framework. It allows 5 | for rapid development of message processing consumers by handling all of the 6 | core functionality of communicating with RabbitMQ and management of consumer 7 | processes. 8 | 9 | Rejected runs as a master process with multiple consumer configurations that are 10 | each run it an isolated process. It has the ability to collect statistical 11 | data from the consumer processes and report on it. 12 | 13 | Rejected supports Python 2.7 and 3.4+. 14 | 15 | |Version| |Status| |Coverage| |License| 16 | 17 | Features 18 | -------- 19 | 20 | - Automatic exception handling including connection management and consumer restarting 21 | - Smart consumer classes that can automatically decode and deserialize message bodies based upon message headers 22 | - Metrics logging and submission to statsd and InfluxDB 23 | - Built-in profiling of consumer code 24 | - Ability to write asynchronous code in consumers allowing for parallel communication with external resources 25 | 26 | Documentation 27 | ------------- 28 | 29 | https://rejected.readthedocs.io 30 | 31 | Example Consumers 32 | ----------------- 33 | .. code:: python 34 | 35 | from rejected import consumer 36 | import logging 37 | 38 | LOGGER = logging.getLogger(__name__) 39 | 40 | 41 | class Test(consumer.Consumer): 42 | 43 | def process(self, message): 44 | LOGGER.debug('In Test.process: %s' % message.body) 45 | 46 | Async Consumer 47 | ^^^^^^^^^^^^^^ 48 | To make a consumer async, you can decorate the 49 | `Consumer.prepare `_ 50 | and `Consumer.process `_ 51 | methods using Tornado's 52 | `@gen.coroutine `_. 53 | Asynchronous consumers do not allow for concurrent processing multiple messages in the same process, but 54 | rather allow you to use asynchronous clients like 55 | `Tornado's `_ 56 | `AsyncHTTPClient `_ and the 57 | `Queries `_ 58 | PostgreSQL library to perform parallel tasks using coroutines when processing a single message. 59 | 60 | .. code:: python 61 | 62 | import logging 63 | 64 | from rejected import consumer 65 | 66 | from tornado import gen 67 | from tornado import httpclient 68 | 69 | 70 | class AsyncExampleConsumer(consumer.Consumer): 71 | 72 | @gen.coroutine 73 | def process(self): 74 | LOGGER.debug('Message: %r', self.body) 75 | http_client = httpclient.AsyncHTTPClient() 76 | results = yield [http_client.fetch('http://www.github.com'), 77 | http_client.fetch('http://www.reddit.com')] 78 | LOGGER.info('Length: %r', [len(r.body) for r in results]) 79 | 80 | 81 | Example Configuration 82 | --------------------- 83 | .. code:: yaml 84 | 85 | %YAML 1.2 86 | --- 87 | Application: 88 | poll_interval: 10.0 89 | stats: 90 | log: True 91 | influxdb: 92 | enabled: True 93 | scheme: http 94 | host: localhost 95 | port: 8086 96 | user: username 97 | password: password 98 | database: dbname 99 | statsd: 100 | enabled: True 101 | host: localhost 102 | port: 8125 103 | prefix: applications.rejected 104 | Connections: 105 | rabbitmq: 106 | host: localhost 107 | port: 5672 108 | user: guest 109 | pass: guest 110 | ssl: False 111 | vhost: / 112 | heartbeat_interval: 300 113 | Consumers: 114 | example: 115 | consumer: rejected.example.Consumer 116 | sentry_dsn: https://[YOUR-SENTRY-DSN] 117 | connections: 118 | - name: rabbitmq1 119 | consume: True 120 | drop_exchange: dlxname 121 | qty: 2 122 | queue: generated_messages 123 | qos_prefetch: 100 124 | ack: True 125 | max_errors: 100 126 | config: 127 | foo: True 128 | bar: baz 129 | 130 | Daemon: 131 | user: rejected 132 | group: daemon 133 | pidfile: /var/run/rejected/example.%(pid)s.pid 134 | 135 | Logging: 136 | version: 1 137 | formatters: 138 | verbose: 139 | format: "%(levelname) -10s %(asctime)s %(process)-6d %(processName) -25s %(name) -20s %(funcName) -25s: %(message)s" 140 | datefmt: "%Y-%m-%d %H:%M:%S" 141 | verbose_correlation: 142 | format: "%(levelname) -10s %(asctime)s %(process)-6d %(processName) -25s %(name) -20s %(funcName) -25s: %(message)s {CID %(correlation_id)s}" 143 | datefmt: "%Y-%m-%d %H:%M:%S" 144 | syslog: 145 | format: "%(levelname)s %(name)s.%(funcName)s: %(message)s" 146 | syslog_correlation: 147 | format: "%(levelname)s %(name)s.%(funcName)s: %(message)s {CID %(correlation_id)s)" 148 | filters: 149 | correlation: 150 | '()': rejected.log.CorrelationFilter 151 | 'exists': True 152 | no_correlation: 153 | '()': rejected.log.CorrelationFilter 154 | 'exists': False 155 | handlers: 156 | console: 157 | class: logging.StreamHandler 158 | formatter: verbose 159 | debug_only: false 160 | filters: [no_correlation] 161 | console_correlation: 162 | class: logging.StreamHandler 163 | formatter: verbose_correlation 164 | debug_only: false 165 | filters: [correlation] 166 | syslog: 167 | class: logging.handlers.SysLogHandler 168 | facility: daemon 169 | address: /var/run/syslog 170 | formatter: syslog 171 | filters: [no_correlation] 172 | syslog_correlation: 173 | class: logging.handlers.SysLogHandler 174 | facility: daemon 175 | address: /var/run/syslog 176 | formatter: syslog 177 | filters: [correlation] 178 | loggers: 179 | helper: 180 | level: INFO 181 | propagate: true 182 | handlers: [console, console_correlation, syslog, syslog_correlation] 183 | rejected: 184 | level: INFO 185 | propagate: true 186 | handlers: [console, console_correlation, syslog, syslog_correlation] 187 | tornado: 188 | level: INFO 189 | propagate: true 190 | handlers: [console, console_correlation, syslog, syslog_correlation] 191 | disable_existing_loggers: true 192 | incremental: false 193 | 194 | Version History 195 | --------------- 196 | Available at https://rejected.readthedocs.org/en/latest/history.html 197 | 198 | .. |Version| image:: https://img.shields.io/pypi/v/rejected.svg? 199 | :target: https://pypi.python.org/pypi/rejected 200 | 201 | .. |Status| image:: https://img.shields.io/travis/gmr/rejected.svg? 202 | :target: https://travis-ci.org/gmr/rejected 203 | 204 | .. |Coverage| image:: https://img.shields.io/codecov/c/github/gmr/rejected.svg? 205 | :target: https://codecov.io/github/gmr/rejected?branch=master 206 | 207 | .. |License| image:: https://img.shields.io/pypi/l/rejected.svg? 208 | :target: https://rejected.readthedocs.org 209 | 210 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | PAPER = 8 | BUILDDIR = _build 9 | 10 | # User-friendly check for sphinx-build 11 | ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) 12 | $(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) 13 | endif 14 | 15 | # Internal variables. 16 | PAPEROPT_a4 = -D latex_paper_size=a4 17 | PAPEROPT_letter = -D latex_paper_size=letter 18 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 19 | # the i18n builder cannot share the environment and doctrees with the others 20 | I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 21 | 22 | .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext 23 | 24 | help: 25 | @echo "Please use \`make ' where is one of" 26 | @echo " html to make standalone HTML files" 27 | @echo " dirhtml to make HTML files named index.html in directories" 28 | @echo " singlehtml to make a single large HTML file" 29 | @echo " pickle to make pickle files" 30 | @echo " json to make JSON files" 31 | @echo " htmlhelp to make HTML files and a HTML help project" 32 | @echo " qthelp to make HTML files and a qthelp project" 33 | @echo " devhelp to make HTML files and a Devhelp project" 34 | @echo " epub to make an epub" 35 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" 36 | @echo " latexpdf to make LaTeX files and run them through pdflatex" 37 | @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" 38 | @echo " text to make text files" 39 | @echo " man to make manual pages" 40 | @echo " texinfo to make Texinfo files" 41 | @echo " info to make Texinfo files and run them through makeinfo" 42 | @echo " gettext to make PO message catalogs" 43 | @echo " changes to make an overview of all changed/added/deprecated items" 44 | @echo " xml to make Docutils-native XML files" 45 | @echo " pseudoxml to make pseudoxml-XML files for display purposes" 46 | @echo " linkcheck to check all external links for integrity" 47 | @echo " doctest to run all doctests embedded in the documentation (if enabled)" 48 | 49 | clean: 50 | rm -rf $(BUILDDIR)/* 51 | 52 | html: 53 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html 54 | @echo 55 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." 56 | 57 | dirhtml: 58 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml 59 | @echo 60 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." 61 | 62 | singlehtml: 63 | $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml 64 | @echo 65 | @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." 66 | 67 | pickle: 68 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle 69 | @echo 70 | @echo "Build finished; now you can process the pickle files." 71 | 72 | json: 73 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json 74 | @echo 75 | @echo "Build finished; now you can process the JSON files." 76 | 77 | htmlhelp: 78 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp 79 | @echo 80 | @echo "Build finished; now you can run HTML Help Workshop with the" \ 81 | ".hhp project file in $(BUILDDIR)/htmlhelp." 82 | 83 | qthelp: 84 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp 85 | @echo 86 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \ 87 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:" 88 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/rejected.qhcp" 89 | @echo "To view the help file:" 90 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/rejected.qhc" 91 | 92 | devhelp: 93 | $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp 94 | @echo 95 | @echo "Build finished." 96 | @echo "To view the help file:" 97 | @echo "# mkdir -p $$HOME/.local/share/devhelp/rejected" 98 | @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/rejected" 99 | @echo "# devhelp" 100 | 101 | epub: 102 | $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub 103 | @echo 104 | @echo "Build finished. The epub file is in $(BUILDDIR)/epub." 105 | 106 | latex: 107 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 108 | @echo 109 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." 110 | @echo "Run \`make' in that directory to run these through (pdf)latex" \ 111 | "(use \`make latexpdf' here to do that automatically)." 112 | 113 | latexpdf: 114 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 115 | @echo "Running LaTeX files through pdflatex..." 116 | $(MAKE) -C $(BUILDDIR)/latex all-pdf 117 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 118 | 119 | latexpdfja: 120 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 121 | @echo "Running LaTeX files through platex and dvipdfmx..." 122 | $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja 123 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 124 | 125 | text: 126 | $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text 127 | @echo 128 | @echo "Build finished. The text files are in $(BUILDDIR)/text." 129 | 130 | man: 131 | $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man 132 | @echo 133 | @echo "Build finished. The manual pages are in $(BUILDDIR)/man." 134 | 135 | texinfo: 136 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 137 | @echo 138 | @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." 139 | @echo "Run \`make' in that directory to run these through makeinfo" \ 140 | "(use \`make info' here to do that automatically)." 141 | 142 | info: 143 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 144 | @echo "Running Texinfo files through makeinfo..." 145 | make -C $(BUILDDIR)/texinfo info 146 | @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." 147 | 148 | gettext: 149 | $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale 150 | @echo 151 | @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." 152 | 153 | changes: 154 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes 155 | @echo 156 | @echo "The overview file is in $(BUILDDIR)/changes." 157 | 158 | linkcheck: 159 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck 160 | @echo 161 | @echo "Link check complete; look for any errors in the above output " \ 162 | "or in $(BUILDDIR)/linkcheck/output.txt." 163 | 164 | doctest: 165 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest 166 | @echo "Testing of doctests in the sources finished, look at the " \ 167 | "results in $(BUILDDIR)/doctest/output.txt." 168 | 169 | xml: 170 | $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml 171 | @echo 172 | @echo "Build finished. The XML files are in $(BUILDDIR)/xml." 173 | 174 | pseudoxml: 175 | $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml 176 | @echo 177 | @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." 178 | -------------------------------------------------------------------------------- /docs/api_consumer.rst: -------------------------------------------------------------------------------- 1 | Consumer 2 | ======== 3 | 4 | .. autoclass:: rejected.consumer.Consumer 5 | :members: 6 | :inherited-members: 7 | :exclude-members: execute, log_exception, on_confirmation, require_setting, set_channel 8 | 9 | .. autoclass:: rejected.consumer.PublishingConsumer 10 | :members: 11 | :inherited-members: 12 | :exclude-members: execute, log_exception, on_confirmation, require_setting, set_channel 13 | -------------------------------------------------------------------------------- /docs/api_data.rst: -------------------------------------------------------------------------------- 1 | Rejected Data Objects 2 | ===================== 3 | 4 | .. automodule:: rejected.data 5 | :members: 6 | -------------------------------------------------------------------------------- /docs/api_smart_consumer.rst: -------------------------------------------------------------------------------- 1 | SmartConsumer 2 | ============= 3 | 4 | .. autoclass:: rejected.consumer.SmartConsumer 5 | :members: 6 | :inherited-members: 7 | :exclude-members: execute, log_exception, on_confirmation, require_setting, set_channel 8 | 9 | .. autoclass:: rejected.consumer.SmartPublishingConsumer 10 | :members: 11 | :inherited-members: 12 | :exclude-members: execute, log_exception, on_confirmation, require_setting, set_channel 13 | -------------------------------------------------------------------------------- /docs/api_testing.rst: -------------------------------------------------------------------------------- 1 | Testing Support 2 | =============== 3 | 4 | .. automodule:: rejected.testing 5 | :members: 6 | -------------------------------------------------------------------------------- /docs/cli.rst: -------------------------------------------------------------------------------- 1 | Command-Line Options 2 | ==================== 3 | The rejected command line application allows you to spawn the rejected process 4 | as a daemon. Additionally it has options for running interactively (``-f``), which 5 | along with the ``-o`` switch for specifying a single consumer to run and ``-q`` 6 | to specify quantity, makes for easier debugging. 7 | 8 | If you specify ``-P /path/to/write/data/to``, rejected will automatically enable 9 | :py:mod:`cProfile`, writing the profiling data to the path specified. This can 10 | be used in conjunction with graphviz to diagram code execution and hotspots. 11 | 12 | Help 13 | ---- 14 | .. code:: 15 | 16 | usage: rejected [-h] [-c CONFIG] [-f] [-P PROFILE] [-o CONSUMER] 17 | [-p PREPEND_PATH] [-q QUANTITY] 18 | 19 | RabbitMQ consumer framework 20 | 21 | optional arguments: 22 | -h, --help show this help message and exit 23 | -c CONFIG, --config CONFIG 24 | Path to the configuration file 25 | -f, --foreground Run the application interactively 26 | -P PROFILE, --profile PROFILE 27 | Profile the consumer modules, specifying the output 28 | directory. 29 | -o CONSUMER, --only CONSUMER 30 | Only run the consumer specified 31 | -p PREPEND_PATH, --prepend-path PREPEND_PATH 32 | Prepend the python path with the value. 33 | -q QUANTITY, --qty QUANTITY 34 | Run the specified quanty of consumer processes when 35 | used in conjunction with -o 36 | -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | import os 3 | import sys 4 | sys.path.insert(0, os.path.abspath('..')) 5 | 6 | import rejected # noqa E402 7 | 8 | release = rejected.__version__ 9 | version = '.'.join(release.split('.')[0:1]) 10 | 11 | extensions = [ 12 | 'sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.viewcode', 13 | 'sphinx.ext.autosummary', 'sphinx.ext.intersphinx' 14 | ] 15 | 16 | templates_path = ['_templates'] 17 | source_suffix = '.rst' 18 | master_doc = 'index' 19 | 20 | project = 'rejected' 21 | copyright = '2009-2022, Gavin M. Roy' 22 | 23 | exclude_patterns = ['_build'] 24 | pygments_style = 'sphinx' 25 | 26 | intersphinx_mapping = { 27 | 'python': ('https://docs.python.org/3/', None), 28 | 'pika': ('https://pika.readthedocs.io/en/latest/', None), 29 | 'raven': ('https://raven.readthedocs.io/en/latest/', None), 30 | 'tornado': ('http://www.tornadoweb.org/en/latest/', None) 31 | } 32 | 33 | html_theme = 'default' 34 | html_static_path = ['_static'] 35 | htmlhelp_basename = 'rejecteddoc' 36 | 37 | latex_elements = {} 38 | 39 | latex_documents = [ 40 | ('index', 'rejected.tex', u'rejected Documentation', u'Gavin M. Roy', 41 | 'manual'), 42 | ] 43 | 44 | man_pages = [('index', 'rejected', u'rejected Documentation', 45 | [u'Gavin M. Roy'], 1)] 46 | 47 | texinfo_documents = [ 48 | ('index', 'rejected', u'rejected Documentation', u'Gavin M. Roy', 49 | 'rejected', 'One line description of project.', 'Miscellaneous'), 50 | ] 51 | -------------------------------------------------------------------------------- /docs/configuration.rst: -------------------------------------------------------------------------------- 1 | Configuration File Syntax 2 | ========================= 3 | The rejected configuration uses `YAML `_ as the markup language. 4 | YAML's format, like Python code is whitespace dependent for control structure in 5 | blocks. If you're having problems with your rejected configuration, the first 6 | thing you should do is ensure that the YAML syntax is correct. `yamllint.com `_ 7 | is a good resource for validating that your configuration file can be parsed. 8 | 9 | The configuration file is split into three main sections: Application, Daemon, and Logging. 10 | 11 | The :ref:`example configuration ` file provides a good starting 12 | point for creating your own configuration file. 13 | 14 | .. _application: 15 | 16 | Application 17 | ----------- 18 | The application section of the configuration is broken down into multiple top-level options: 19 | 20 | +---------------+-----------------------------------------------------------------------------------------+ 21 | | poll_interval | How often rejected should poll consumer processes for status in seconds (int/float) | 22 | +---------------+-----------------------------------------------------------------------------------------+ 23 | | sentry_dsn | If Sentry support is installed, optionally set a global DSN for all consumers (str) | 24 | +---------------+-----------------------------------------------------------------------------------------+ 25 | | `stats`_ | Enable and configure statsd metric submission (obj) | 26 | +---------------+-----------------------------------------------------------------------------------------+ 27 | | `Connections`_| A subsection with RabbitMQ connection information for consumers (obj) | 28 | +---------------+-----------------------------------------------------------------------------------------+ 29 | | `Consumers`_ | Where each consumer type is configured (obj) | 30 | +---------------+-----------------------------------------------------------------------------------------+ 31 | 32 | stats 33 | ^^^^^ 34 | +-------+----------------------------------------------------------------------------------------+ 35 | | stats | | 36 | +=======+===============+========================================================================+ 37 | | | log | Toggle top-level logging of consumer process stats (bool) | 38 | +-------+---------------+------------------------------------------------------------------------+ 39 | | | `influxdb`_ | Configure the submission of per-message measurements to InfluxDB (obj) | 40 | +-------+---------------+------------------------------------------------------------------------+ 41 | | | `statsd`_ | Configure the submission of per-message measurements to statsd (obj) | 42 | +-------+---------------+------------------------------------------------------------------------+ 43 | 44 | influxdb 45 | ^^^^^^^^ 46 | +------------------+------------------------------------------------------------------------------------------------------+ 47 | | stats > influxdb | | 48 | +==================+==========+===========================================================================================+ 49 | | | scheme | The scheme to use when submitting metrics to the InfluxDB server. Default: ``http`` (str) | 50 | +------------------+----------+-------------------------------------------------------------------------------------------+ 51 | | | host | The hostname or ip address of the InfluxDB server. Default: ``localhost`` (str) | 52 | +------------------+----------+-------------------------------------------------------------------------------------------+ 53 | | | port | The port of the influxdb server. Default: ``8086`` (int) | 54 | +------------------+----------+-------------------------------------------------------------------------------------------+ 55 | | | user | An optional username to use when submitting measurements. (str) | 56 | +------------------+----------+-------------------------------------------------------------------------------------------+ 57 | | | password | An optional password to use when submitting measurements. (str) | 58 | +------------------+----------+-------------------------------------------------------------------------------------------+ 59 | | | database | The InfluxDB database to submit measurements to. Default: ``rejected`` (str) | 60 | +------------------+----------+-------------------------------------------------------------------------------------------+ 61 | 62 | statsd 63 | ^^^^^^ 64 | +----------------+-------------------------------------------------------------------------------------------+ 65 | | stats > statsd | | 66 | +================+==================+========================================================================+ 67 | | | enabled | Toggle statsd reporting off and on (bool) | 68 | +----------------+------------------+------------------------------------------------------------------------+ 69 | | | prefix | An optional prefix to use when creating the statsd metric path (str) | 70 | +----------------+------------------+------------------------------------------------------------------------+ 71 | | | host | The hostname or ip address of the statsd server (str) | 72 | +----------------+------------------+------------------------------------------------------------------------+ 73 | | | port | The port of the statsd server. Default: ``8125`` (int) | 74 | +----------------+------------------+------------------------------------------------------------------------+ 75 | | | include_hostname | Include the hostname in the measurement path. Default: ``True`` (bool) | 76 | +----------------+------------------+------------------------------------------------------------------------+ 77 | | | tcp | Use TCP to connect to statsd (true/false). Default: ``false`` (str) | 78 | +----------------+------------------+------------------------------------------------------------------------+ 79 | 80 | Connections 81 | ^^^^^^^^^^^ 82 | Each RabbitMQ connection entry should be a nested object with a unique name with connection attributes. 83 | 84 | +-----------------+-------------------------------------------------------------------------------------+ 85 | | Connection Name | | 86 | +=================+=====================+===============================================================+ 87 | | | host | The hostname or ip address of the RabbitMQ server (str) | 88 | | +---------------------+---------------------------------------------------------------+ 89 | | | port | The port of the RabbitMQ server (int) | 90 | | +---------------------+---------------------------------------------------------------+ 91 | | | vhost | The virtual host to connect to (str) | 92 | | +---------------------+---------------------------------------------------------------+ 93 | | | user | The username to connect as (str) | 94 | | +---------------------+---------------------------------------------------------------+ 95 | | | pass | The password to use (str) | 96 | | +---------------------+---------------------------------------------------------------+ 97 | | | `ssl_options`_ | Optional: the SSL options for the `SSL connection socket`_ | 98 | | +---------------------+---------------------------------------------------------------+ 99 | | | heartbeat_interval | Optional: the AMQP heartbeat interval (int) default: 300 sec | 100 | +-----------------+---------------------+---------------------------------------------------------------+ 101 | 102 | ssl_options 103 | ^^^^^^^^^^^ 104 | +---------------------------+---------------------------------------------------------------------------------------------------------+ 105 | | Connections > ssl_options | | 106 | +===========================+==============+==========================================================================================+ 107 | | | ca_certs | The file path to the concatenated list of CA certificates (str) | 108 | +---------------------------+--------------+------------------------------------------------------------------------------------------+ 109 | | | ca_path | The directory path to the PEM formatted CA certificates (str) | 110 | +---------------------------+--------------+------------------------------------------------------------------------------------------+ 111 | | | ca_data | The PEM encoded CA certificates (str) | 112 | +---------------------------+--------------+------------------------------------------------------------------------------------------+ 113 | | | prototcol | The ssl `PROTOCOL_*`_ enum integer value. Default: ``2`` for enum ``PROTOCOL_TLS`` (int) | 114 | +---------------------------+--------------+------------------------------------------------------------------------------------------+ 115 | | | certfile | The file path to the PEM formatted certificate file (str) | 116 | +---------------------------+--------------+------------------------------------------------------------------------------------------+ 117 | | | keyfile | The file path to the certificate private key (str) | 118 | +---------------------------+--------------+------------------------------------------------------------------------------------------+ 119 | | | password | The password for decrypting the ``keyfile`` private key (str) | 120 | +---------------------------+--------------+------------------------------------------------------------------------------------------+ 121 | | | ciphers | The set of available ciphers in the OpenSSL cipher list format (str) | 122 | +---------------------------+--------------+------------------------------------------------------------------------------------------+ 123 | 124 | Consumers 125 | ^^^^^^^^^ 126 | Each consumer entry should be a nested object with a unique name with consumer attributes. 127 | 128 | +---------------+-----------------------------------------------------------------------------------------------------------+ 129 | | Consumer Name | | 130 | +===============+=======================+===================================================================================+ 131 | | | consumer | The package.module.Class path to the consumer code (str) | 132 | | +-----------------------+-----------------------------------------------------------------------------------+ 133 | | | connections | The connections to connect to (list) - See `Consumer Connections`_ | 134 | | +-----------------------+-----------------------------------------------------------------------------------+ 135 | | | qty | The number of consumers per connection to run (int) | 136 | | +-----------------------+-----------------------------------------------------------------------------------+ 137 | | | queue | The RabbitMQ queue name to consume from (str) | 138 | | +-----------------------+-----------------------------------------------------------------------------------+ 139 | | | ack | Explicitly acknowledge messages (no_ack = not ack) (bool) | 140 | | +-----------------------+-----------------------------------------------------------------------------------+ 141 | | | max_errors | Number of errors encountered before restarting a consumer (int) | 142 | | +-----------------------+-----------------------------------------------------------------------------------+ 143 | | | sentry_dsn | If Sentry support is installed, set a consumer specific sentry DSN (str) | 144 | | +-----------------------+-----------------------------------------------------------------------------------+ 145 | | | drop_exchange | The exchange to publish a message to when it is dropped. If not specified, | 146 | | | | dropped messages are not republished anywhere. | 147 | | +-----------------------+-----------------------------------------------------------------------------------+ 148 | | | drop_invalid_messages | Drop a message if the type property doesn't match the specified message type (str)| 149 | | +-----------------------+-----------------------------------------------------------------------------------+ 150 | | | message_type | Used to validate the message type of a message before processing. This attribute | 151 | | | | can be set to a string that is matched against the AMQP message type or a list of | 152 | | | | acceptable message types. (str, array) | 153 | | +-----------------------+-----------------------------------------------------------------------------------+ 154 | | | error_exchange | The exchange to publish messages that raise | 155 | | | | :exc:`~rejected.consumer.ProcessingException` to (str) | 156 | | +-----------------------+-----------------------------------------------------------------------------------+ 157 | | | error_max_retry | The number of :exc:`~rejected.consumer.ProcessingException` raised on a message | 158 | | | | before a message is dropped. If not specified messages will never be dropped (int)| 159 | | +-----------------------+-----------------------------------------------------------------------------------+ 160 | | | influxdb_measurement | When using InfluxDB, the measurement name for per-message measurements. | 161 | | | | Defaults to the consumer name. (str) | 162 | | +-----------------------+-----------------------------------------------------------------------------------+ 163 | | | config | Free-form key-value configuration section for the consumer (obj) | 164 | +---------------+-----------------------+-----------------------------------------------------------------------------------+ 165 | 166 | Consumer Connections 167 | ^^^^^^^^^^^^^^^^^^^^ 168 | The consumer connections configuration allows for one or more connections to be 169 | made by a single consumer. This configuration section supports two formats. If 170 | a list of connection names are specified, the consumer will connect to and consume 171 | from the all of the specified connections. 172 | 173 | .. code:: yaml 174 | 175 | Consumer Name: 176 | connections: 177 | - connection1 178 | - connection2 179 | 180 | If the ``connections`` list include structured values, additional settings can be 181 | set. For example, you may want to consume from one RabbitMQ broker and publish to 182 | another, as is illustrated below: 183 | 184 | .. code:: yaml 185 | 186 | Consumer Name: 187 | connections: 188 | - name: connection1 189 | consume: True 190 | publisher_confirmation: False 191 | - name: connection2 192 | consume: False 193 | publisher_confirmation: True 194 | 195 | In the above example, the consumer will have two connections, ``connection1`` and 196 | ``connection2``. It will only consume from ``connection1`` but can publish 197 | messages ``connection2`` by specifying the connection name in the 198 | :py:meth:`~rejected.consumer.Consumer.publish_message` method. 199 | 200 | Structured Connections 201 | !!!!!!!!!!!!!!!!!!!!!! 202 | 203 | When specifying a structured consumer connection, the following attributes are 204 | available. 205 | 206 | +-----------------------------+---------------------------------------------------------------------------------------------+ 207 | | Consumer Name > connections | | 208 | +=============================+========================+====================================================================+ 209 | | | name | The connection name, as specified in the Connections section of | 210 | | | | the application configuration. | 211 | | +------------------------+--------------------------------------------------------------------+ 212 | | | consume | Specify if the connection should consume on the connection. (bool) | 213 | +-----------------------------+------------------------+--------------------------------------------------------------------+ 214 | | | publisher_confirmation | Enable publisher confirmations. (bool) | 215 | +-----------------------------+------------------------+--------------------------------------------------------------------+ 216 | 217 | .. _daemon: 218 | 219 | Daemon 220 | ------ 221 | This section contains the settings required to run the application as a daemon. They are as follows: 222 | 223 | +---------+---------------------------------------------------------------------------+ 224 | | user | The username to run as when the process is daemonized (bool) | 225 | +---------+---------------------------------------------------------------------------+ 226 | | group | Optional The group name to switch to when the process is daemonized (str) | 227 | +---------+---------------------------------------------------------------------------+ 228 | | pidfile | The pidfile to write when the process is daemonized (str) | 229 | +---------+---------------------------------------------------------------------------+ 230 | 231 | 232 | .. _logging: 233 | 234 | Logging 235 | ------- 236 | rejected uses :py:mod:`logging.config.dictConfig ` to create a flexible method for configuring the python standard logging module. If rejected is being run in Python 2.6, `logutils.dictconfig.dictConfig `_ is used instead. 237 | 238 | The following basic example illustrates all of the required sections in the dictConfig format, implemented in YAML: 239 | 240 | .. code:: yaml 241 | 242 | version: 1 243 | formatters: [] 244 | verbose: 245 | format: '%(levelname) -10s %(asctime)s %(process)-6d %(processName) -15s %(name) -10s %(funcName) -20s: %(message)s' 246 | datefmt: '%Y-%m-%d %H:%M:%S' 247 | handlers: 248 | console: 249 | class: logging.StreamHandler 250 | formatter: verbose 251 | debug_only: True 252 | loggers: 253 | rejected: 254 | handlers: [console] 255 | level: INFO 256 | propagate: true 257 | myconsumer: 258 | handlers: [console] 259 | level: DEBUG 260 | propagate: true 261 | disable_existing_loggers: true 262 | incremental: false 263 | 264 | .. NOTE:: 265 | The debug_only node of the Logging > handlers > console section is not part of the standard dictConfig format. Please see the :ref:`caveats` section below for more information. 266 | 267 | .. _caveats: 268 | 269 | Logging Caveats 270 | ^^^^^^^^^^^^^^^ 271 | In order to allow for customizable console output when running in the foreground and no console output when daemonized, a ``debug_only`` node has been added to the standard dictConfig format in the handler section. This method is evaluated when logging is configured and if present, it is removed prior to passing the dictionary to dictConfig if present. 272 | 273 | If the value is set to true and the application is not running in the foreground, the configuration for the handler and references to it will be removed from the configuration dictionary. 274 | 275 | Troubleshooting 276 | ^^^^^^^^^^^^^^^ 277 | If you find that your application is not logging anything or sending output to the terminal, ensure that you have created a logger section in your configuration for your consumer package. For example if your Consumer instance is named ``myconsumer.MyConsumer`` make sure there is a ``myconsumer`` logger in the logging configuration. 278 | 279 | .. _SSL connection socket: https://docs.python.org/3/library/ssl.html#ssl.wrap_socket 280 | .. _PROTOCOL_*: https://docs.python.org/3/library/ssl.html#ssl.SSLContext 281 | -------------------------------------------------------------------------------- /docs/consumer.rst: -------------------------------------------------------------------------------- 1 | .. _consumer_api: 2 | 3 | Consumer API 4 | ============ 5 | The :py:class:`Consumer ` and 6 | :py:class:`SmartConsumer ` classes to extend 7 | for consumer applications. 8 | 9 | While the :py:class:`Consumer ` class provides all 10 | the structure required for implementing a rejected consumer, 11 | the :py:class:`SmartConsumer ` adds 12 | functionality designed to make writing consumers even easier. When messages 13 | are received by consumers extending :py:class:`SmartConsumer `, 14 | if the message's ``content_type`` property contains one of the supported mime-types, 15 | the message body will automatically be deserialized, making the deserialized 16 | message body available via the ``body`` attribute. Additionally, should one of 17 | the supported ``content_encoding`` types (``gzip`` or ``bzip2``) be specified in the 18 | message's property, it will automatically be decoded. 19 | 20 | Message Type Validation 21 | ----------------------- 22 | In any of the consumer base classes, if the ``MESSAGE_TYPE`` attribute is set, 23 | the ``type`` property of incoming messages will be validated against when a message is 24 | received, checking for string equality against the ``MESSAGE_TYPE`` attribute. 25 | If they are not matched, the consumer will not process the message and will drop the 26 | message without an exception if the ``DROP_INVALID_MESSAGES`` attribute is set to 27 | ``True``. If it is ``False``, a :py:class:`~rejected.consumer.MessageException` is raised. 28 | 29 | Republishing of Dropped Messages 30 | -------------------------------- 31 | If the consumer is configured by specifying ``DROP_EXCHANGE`` as an attribute of 32 | the consumer class or in the consumer configuration with the ``drop_exchange`` 33 | configuration variable, when a message is dropped, it is published to that 34 | exchange prior to the message being rejected in RabbitMQ. When the 35 | message is republished, four new values are added to the AMQP ``headers`` 36 | message property: ``X-Dropped-By``, ``X-Dropped-Reason``, ``X-Dropped-Timestamp``, 37 | ``X-Original-Exchange``. 38 | 39 | The ``X-Dropped-By`` header value contains the configured name of the 40 | consumer that dropped the message. ``X-Dropped-Reason`` contains the 41 | reason the message was dropped (eg invalid message type or maximum error 42 | count). ``X-Dropped-Timestamp`` value contains the ISO-8601 formatted 43 | timestamp of when the message was dropped. Finally, the 44 | ``X-Original-Exchange`` value contains the original exchange that the 45 | message was published to. 46 | 47 | Consumer Classes 48 | ---------------- 49 | .. toctree:: 50 | :glob: 51 | :maxdepth: 1 52 | 53 | api_consumer 54 | api_smart_consumer 55 | 56 | Exceptions 57 | ---------- 58 | There are three exception types that consumer applications should raise to handle 59 | problems that may arise when processing a message. When these exceptions are raised, 60 | rejected will reject the message delivery, letting RabbitMQ know that there was 61 | a failure. 62 | 63 | The :py:class:`ConsumerException ` should be 64 | raised when there is a problem in the consumer itself, such as inability to contact 65 | a database server or other resources. When a 66 | :py:class:`ConsumerException ` is raised, 67 | the message will be rejected *and* requeued, adding it back to the RabbitMQ it 68 | was delivered back to. Additionally, rejected keeps track of consumer exceptions 69 | and will shutdown the consumer process and start a new one once a consumer has 70 | exceeded its configured maximum error count within a ``60`` second window. The 71 | default maximum error count is ``5``. 72 | 73 | The :py:class:`MessageException ` should be 74 | raised when there is a problem with the message. When this exception is raised, 75 | the message will be rejected on the RabbitMQ server *without* requeue, discarding 76 | the message. This should be done when there is a problem with the message itself, 77 | such as a malformed payload or non-supported properties like ``content-type`` 78 | or ``type``. 79 | 80 | If a consumer raises a :exc:`~rejected.consumer.ProcessingException`, the 81 | message that was being processed will be republished to the exchange 82 | specified by the ``error`` exchange configuration value or the 83 | ``ERROR_EXCHANGE`` attribute of the consumer's class. The message will be 84 | published using the routing key that was last used for the message. The 85 | original message body and properties will be used and two additional 86 | header property values may be added: 87 | 88 | - ``X-Processing-Exception`` contains the string value of the exception that was 89 | raised, if specified. 90 | - ``X-Processing-Exceptions`` contains the quantity of processing exceptions 91 | that have been raised for the message. 92 | 93 | In combination with a queue that has ``x-message-ttl`` set 94 | and ``x-dead-letter-exchange`` that points to the original exchange for the 95 | queue the consumer is consuming off of, you can implement a delayed retry 96 | cycle for messages that are failing to process due to external resource or 97 | service issues. 98 | 99 | If ``ERROR_MAX_RETRY`` is set on the class, the headers for each method 100 | will be inspected and if the value of ``X-Processing-Exceptions`` is 101 | greater than or equal to the ``ERROR_MAX_RETRY`` value, the message will 102 | be dropped. 103 | 104 | .. note:: If unhandled exceptions are raised by a consumer, they will be caught by rejected, logged, and turned into a :py:class:`ConsumerException `. 105 | 106 | .. autoclass:: rejected.consumer.RejectedException 107 | :members: 108 | 109 | .. autoclass:: rejected.consumer.ConsumerException 110 | :members: 111 | 112 | .. autoclass:: rejected.consumer.MessageException 113 | :members: 114 | 115 | .. autoclass:: rejected.consumer.ProcessingException 116 | :members: 117 | -------------------------------------------------------------------------------- /docs/consumer_howto.rst: -------------------------------------------------------------------------------- 1 | Consumer Examples 2 | ================= 3 | The following example illustrates a very simple consumer that simply logs each 4 | message body as it's received. 5 | 6 | .. code:: python 7 | 8 | from rejected import consumer 9 | import logging 10 | 11 | __version__ = '1.0.0' 12 | 13 | LOGGER = logging.getLogger(__name__) 14 | 15 | 16 | class ExampleConsumer(consumer.Consumer): 17 | 18 | def process(self): 19 | LOGGER.info(self.body) 20 | 21 | All interaction with RabbitMQ with regard to connection management and message 22 | handling, including acknowledgements and rejections are automatically handled 23 | for you. 24 | 25 | The ``__version__`` variable provides context in the rejected log files when 26 | consumers are started and can be useful for investigating consumer behaviors in 27 | production. 28 | 29 | In this next example, a contrived ``ExampleConsumer._connect_to_database`` method 30 | is added that will return ``False``. When ``ExampleConsumer.process`` evaluates 31 | if it could connect to the database and finds it can not, it will raise a 32 | :py:class:`rejected.consumer.ConsumerException` which will requeue the message 33 | in RabbitMQ and increment an error counter. When too many errors occur, rejected 34 | will automatically restart the consumer after a brief quiet period. For more 35 | information on these exceptions, check out the :ref:`consumer API documentation `. 36 | 37 | .. code:: python 38 | 39 | from rejected import consumer 40 | import logging 41 | 42 | __version__ = '1.0.0' 43 | 44 | LOGGER = logging.getLogger(__name__) 45 | 46 | 47 | class ExampleConsumer(consumer.Consumer): 48 | 49 | def _connect_to_database(self): 50 | return False 51 | 52 | def process(self): 53 | if not self._connect_to_database: 54 | raise consumer.ConsumerException('Database error') 55 | 56 | LOGGER.info(self.body) 57 | 58 | Some consumers are also publishers. In this next example, the message body will 59 | be republished to a new exchange on the same RabbitMQ connection: 60 | 61 | .. code:: python 62 | 63 | from rejected import consumer 64 | import logging 65 | 66 | __version__ = '1.0.0' 67 | 68 | LOGGER = logging.getLogger(__name__) 69 | 70 | 71 | class ExampleConsumer(consumer.PublishingConsumer): 72 | 73 | def process(self): 74 | LOGGER.info(self.body) 75 | self.publish('new-exchange', 'routing-key', {}, self.body) 76 | 77 | Note that the previous example extends :py:class:`rejected.consumer.PublishingConsumer` 78 | instead of :py:class:`rejected.consumer.Consumer`. For more information about what 79 | base consumer classes exist, be sure to check out the :ref:`consumer API documentation `. 80 | -------------------------------------------------------------------------------- /docs/example_config.rst: -------------------------------------------------------------------------------- 1 | .. _config_example: 2 | 3 | Configuration Example 4 | ===================== 5 | The following example will configure rejected to a consumer that connects to two 6 | different RabbitMQ servers, running two instances per connection, for a total 7 | of four consumer processes. It will consume from a queue named ``generated_messages`` 8 | and provides configuration for the consumer code itself that would consist of a dict 9 | with the keys ``foo`` and ``bar``. 10 | 11 | .. code:: yaml 12 | 13 | %YAML 1.2 14 | --- 15 | Application: 16 | poll_interval: 10.0 17 | stats: 18 | log: True 19 | influxdb: 20 | host: localhost 21 | port: 8086 22 | database: rejected 23 | statsd: 24 | host: localhost 25 | port: 8125 26 | Connections: 27 | rabbit1: 28 | host: rabbit1 29 | port: 5672 30 | user: rejected 31 | pass: password 32 | ssl: False 33 | vhost: / 34 | heartbeat_interval: 300 35 | rabbit2: 36 | host: rabbit2 37 | port: 5672 38 | user: rejected 39 | pass: password 40 | ssl: False 41 | vhost: / 42 | heartbeat_interval: 300 43 | Consumers: 44 | example: 45 | consumer: example.Consumer 46 | connections: 47 | - rabbit1 48 | - name: rabbit2 49 | consume: False 50 | drop_exchange: dlxname 51 | qty: 2 52 | queue: generated_messages 53 | dynamic_qos: True 54 | ack: True 55 | max_errors: 100 56 | config: 57 | foo: True 58 | bar: baz 59 | 60 | Daemon: 61 | user: rejected 62 | group: daemon 63 | pidfile: /var/run/rejected.pid 64 | 65 | Logging: 66 | version: 1 67 | formatters: 68 | verbose: 69 | format: "%(levelname) -10s %(asctime)s %(process)-6d %(processName) -25s %(name) -30s %(funcName) -30s: %(message)s" 70 | datefmt: "%Y-%m-%d %H:%M:%S" 71 | syslog: 72 | format: "%(levelname)s %(name)s.%(funcName)s(): %(message)s" 73 | filters: [] 74 | handlers: 75 | console: 76 | class: logging.StreamHandler 77 | formatter: verbose 78 | debug_only: false 79 | syslog: 80 | class: logging.handlers.SysLogHandler 81 | facility: daemon 82 | address: /var/run/syslog 83 | #address: /dev/log 84 | formatter: syslog 85 | loggers: 86 | example: 87 | level: INFO 88 | propagate: true 89 | handlers: [console, syslog] 90 | helper: 91 | level: INFO 92 | propagate: true 93 | handlers: [console, syslog] 94 | rejected: 95 | level: INFO 96 | propagate: true 97 | handlers: [console, syslog] 98 | sprockets_influxdb: 99 | level: WARNING 100 | propagate: false 101 | handlers: [console, syslog] 102 | root: 103 | level: INFO 104 | propagate: true 105 | handlers: [console, syslog] 106 | disable_existing_loggers: true 107 | incremental: false 108 | -------------------------------------------------------------------------------- /docs/history.rst: -------------------------------------------------------------------------------- 1 | Changelog 2 | ========= 3 | 4 | 3.23.0 5 | ------ 6 | - Update on channel and connection callback to accept any `*args, **kwargs` to accommodate Pika updates. Since arguments are not used, except logging only. 7 | - Update unit tests that may be broken due to asyncio / tornado updates. 8 | - Shutdown message connection it we detect it's not running to allow the MCP to attempt reconnect. 9 | - Update `rejected.testing` to not capture unhandled exceptions 10 | 11 | 3.22.3 12 | ------ 13 | - Loosen pyyaml for v6+ and Python 3.12 14 | 15 | 3.22.2 16 | ------ 17 | - Update on closed callback to receive an exception for Pika 1.3 18 | 19 | 3.22.1 20 | ------ 21 | - Handle AMQPHeartbeatTimeout gracefully - `#45 `_ - `joshehlinger `_ 22 | 23 | 3.22.0 24 | ------ 25 | - Bump pika version from 0.13.1 to 1.2.0 26 | - Update kwargs to basic_qos call since parameter names and order changed. 27 | - Update kwargs to basic_consume call since parameter names and order changed. 28 | - Update on_channel_closed callback to expect the closing_reason since the reply_code and reply_text were moved to attributes of the ChannelClosed exception. 29 | - Set the callback as a kwarg for channel.confirm_delivery since new parameter was introduced. 30 | - Renamed `self.handle` to `self.connection` in Connection class for reference to pika.tornado_connection.TornadoConnection. 31 | - Add handling for new :exc:`pika.exceptions.ConnectionWrongStateError` when closing channel or connection. 32 | - Add support for `ssl_options` config parameters and deprecate `ssl` since it is no longer supported. 33 | 34 | 3.21.1 35 | ------ 36 | - FIXED an issue with a uncaught exception raised when connecting or reconnecting and can't a socket can not be connected. 37 | 38 | 3.21.0 39 | ------ 40 | - FIXED multiple Python 3.9 issues 41 | - Require helper >= 2.5.0, <3 42 | - Fix helper.controller.Controller import path in rejected.controller 43 | - Change from using a namedtuple to using a slotted Class due to issues pickling rejected.process.Callbacks 44 | - Just use profile instead of switching between cProfile and profile 45 | - Have helper fix log configuration after multiprocess popens the child process 46 | 47 | 3.20.10 48 | ------- 49 | - FIXED a bug in rejected.testing PublishedMessage body and properties being swapped for republished messages `#41 `_ - `nvllsvm `_ 50 | 51 | 3.20.9 52 | ------ 53 | - FIXED a bug in rejected.testing that was introduced in 3.20.8 `#37 `_ - `nvllsvm `_ 54 | - FIXED a compatibility issue with Python 3.9 `#38 `_ - `nvllsvm `_ 55 | 56 | 3.20.8 57 | ------ 58 | - FIXED a bug in rejected.testing when using only positional arguments `#36 `_ - `nvllsvm `_ 59 | - FIXED a defect in string formatting when raising an exception due to a missing setting 60 | 61 | 3.20.7 62 | ------ 63 | - Loosen the pin on pyyaml due to security issue 64 | 65 | 3.20.6 66 | ------ 67 | 68 | - FIXED an issue when TCP statsd fails to connect not calling the tcp on close callback 69 | 70 | 3.20.5 71 | ------ 72 | 73 | - When TCP statsd is enabled and the statsd client can not connect, shutdown `rejected.process.Process` 74 | - Only log TCP statsd send_metric failures when the client believes it is connected 75 | - If send_metric failure occurs and it believes it's connected, shutdown `rejected.process.Process` 76 | - Log state on AMQP connection failure 77 | 78 | 3.20.4 79 | ------ 80 | 81 | - FIXED :meth:`~rejected.consumer.Consumer.initialize` getting called twice in `rejected.testing` 82 | 83 | 3.20.3 84 | ------ 85 | 86 | - Catch an operational exceptions when checking if a process is still alive 87 | 88 | 3.20.2 89 | ------ 90 | 91 | - Catch a few operational exceptions when starting a consumer process 92 | 93 | 3.20.1 94 | ------ 95 | 96 | - Don't expect asyncio's Futures to have `exc_info()`. 97 | 98 | 3.20.0 99 | ------ 100 | 101 | - flake8 cleanup 102 | - Update pins and minor fixes allowing support for Tornado 6, pika 0.13 103 | 104 | 3.19.21 105 | ------- 106 | 107 | - Better handle RabbitMQ connection failures, forced connection close, broker shutdowns, etc 108 | 109 | 3.19.20 110 | ------- 111 | 112 | - Address odd :meth:`~rejected.process.Connection.on_channel_closed` behavior with 113 | spinning connection creation when :exc:`pika.exceptions.ConnectionClosed` 114 | while trying to create a new channel. 115 | 116 | 3.19.19 117 | ------- 118 | 119 | - FIXED :exc:`rejected.consumer.RejectedException` to not blow up when an exception 120 | was created with no args or kwargs. 121 | 122 | 3.19.18 123 | ------- 124 | 125 | - FIXED :exc:`rejected.consumer.RejectedException` log message formatting when 126 | using format strings in the passed in value. 127 | 128 | 3.19.17 129 | ------- 130 | 131 | - Ensure exceptions are cast to strings when logging in :class:`rejected.consumer.Consumer` 132 | 133 | 3.19.16 134 | ------- 135 | 136 | - FIXED :meth:`~rejected.process.Connection.on_open` when pika raises 137 | :exc:`pika.exceptions.ConnectionClosed` when trying to create a new channel. 138 | 139 | 3.19.15 140 | ------- 141 | 142 | - FIXED :meth:`~rejected.process.Connection.on_channel_closed` when pika raises 143 | :exc:`pika.exceptions.ConnectionClosed` when trying to create a new channel. 144 | 145 | 3.19.14 146 | ------- 147 | 148 | - Fix misnamed ACK_PROCESSING_EXCEPTIONS constant in processing logic 149 | - Hard pin to pika 0.12.0 due to breaking changes in 0.13 150 | 151 | 3.19.13 152 | ------- 153 | 154 | - FIXED :exc:`rejected.consumer.RejectedException` to pull the metric and value 155 | args from the kwargs instead of explicitly defining them. This allows for 156 | consumers prior to 3.19 to experience the same metric style behavior as 157 | before. 158 | - ADDED ``ACK_PROCESSING_EXCEPTIONS`` class level attribute to :class:`rejected.consumer.Consumer` 159 | that allows a consumer to ack a :exc:`~rejected.consumer.ProcessingException` 160 | instead of rejecting it, constraining the use of dead-lettering in RbbitMQ 161 | to :exc:`~rejected.consumer.MessageException`s. 162 | 163 | 3.19.12 164 | ------- 165 | 166 | - Loosen the pika pin to work with Python 3.7 167 | 168 | 3.19.11 169 | ------- 170 | 171 | - FIXED consumer cancellation handling to shutdown the connection 172 | 173 | 3.19.10 174 | ------- 175 | 176 | - Address shutdown and dead process bugs 177 | 178 | 3.19.9 179 | ------ 180 | 181 | - TCP statsd wants a linefeed 182 | 183 | 3.19.8 184 | ------ 185 | 186 | - FIXED statsd TCP configuration setting bug (str vs bool) 187 | - Log setup exceptions 188 | 189 | 190 | 3.19.7 191 | ------ 192 | 193 | - ADDED ability to connect to statsd via TCP for submitting metrics 194 | 195 | 3.19.6 196 | ------ 197 | 198 | - ADDED ability to disable including the hostname when submitting stats to statsd 199 | 200 | 3.19.5 201 | ------ 202 | 203 | - Add SSL connection flag support to configuration `#20 `_ - `code-fabriek `_ 204 | - Fix documentation for :py:class:`rejected.data.Measurement` 205 | - Alter logging levels for connection failures 206 | - Add :py:attr:`rejected.testing.AsyncTestCase.measurement` 207 | 208 | 3.19.4 209 | ------ 210 | 211 | - Try to handle a MCP process cleanup race condition better (Sentry REJECTED-DA) 212 | 213 | 3.19.3 214 | ------ 215 | 216 | - Really fix a bug with the processing time and message age metrics 217 | 218 | 3.19.2 219 | ------ 220 | 221 | - Fix a bug with the processing time and message age metrics 222 | - Catch a timeout when waiting on a zombie 223 | 224 | 3.19.1 225 | ------ 226 | 227 | - Fix a bug in the new durations code 228 | 229 | 3.19.0 230 | ------ 231 | 232 | - Sentry client changes: 233 | - Do not assign version, let the client figure that out 234 | - Do not specify the versions of loaded modules, let the client figure that out 235 | - Add `rejected.data.Measurement.add_duration`, changing the behavior of 236 | recorded durations, creating a stack of timings instead of a single timing 237 | for the key. For InfluxDB submissions, if there is a only a single value, 238 | that metric will continue to submit as previous versions. If there are multiple, 239 | the average, min, max, median, and 95th percentile values will be submitted. 240 | - Add `rejected.consumer.Consumer.stats_add_duration` 241 | - Deprecate `rejected.consumer.Consumer.stats_add_timing` 242 | - Deprecate `rejected.consumer.Consumer.stats_add_timing` 243 | - Consumer tags are now in the format `[consumer-name]-[os PID]` 244 | - Created a base exception class `rejected.consumer.RejectedException` 245 | - `rejected.consumer.ConsumerException`, `rejected.consumer.MessageException`, 246 | and `rejected.consumer.ProcessingException` extend `rejected.consumer.RejectedException` 247 | - If a `rejected.consumer.ConsumerException`, `rejected.consumer.MessageException`, 248 | or `rejected.consumer.ProcessingException` are passed a keyword of `metric`, 249 | the consumer will automatically instrument a counter (statsd) or tag (InfluxDB) 250 | using the `metric` value. 251 | - `rejected.consumer.ConsumerException`, `rejected.consumer.MessageException`, 252 | and `rejected.consumer.ProcessingException` now support "new style" string formatting, 253 | automatically applying the args and keyword args that are passed into the creation 254 | of the exception. 255 | - Logging levels for exceptions changed: 256 | - `rejected.consumer.ConsumerException` are logged with error 257 | - `rejected.consumer.MessageException` are logged with info 258 | - `rejected.consumer.ProcessingException` are logged with warning 259 | - Fix the handling of child startup failures in the MCP 260 | - Fix a bug where un-configured consumers caused an exception in the MCP 261 | - Handle the edge case when a connection specified in the consumer config does not exist 262 | - Refactor how the version of the consumer module or package is determined 263 | - Add `ProcessingException` as a top-level package export 264 | - Fix misc docstrings 265 | - Fix the use of `SIGABRT` being used from child processes to notify the MCP when 266 | processes exit, instead register for `SIGCHLD` in the MCP. 267 | 268 | 3.18.9 269 | ------ 270 | 271 | - Added :meth:`rejected.testing.AsyncTestCase.published_messages` and :class:`rejected.testing.PublishedMessage` 272 | - Updated testing documentation 273 | - Updated the setup.py extras install for testing to install all testing dependencies 274 | - Made `raven` optional in `rejected.testing` 275 | 276 | 3.18.8 277 | ------ 278 | 279 | - Fix the mocks in `rejected.testing` 280 | 281 | 3.18.7 282 | ------ 283 | 284 | - Fix child process errors in shutdown 285 | - Fix unfiltered connection list returned to a process, introduced in 3.18.4 286 | 287 | 3.18.6 288 | ------ 289 | 290 | - Move message age stat to Consumer, add method to override key 291 | 292 | 3.18.5 293 | ------ 294 | 295 | - Treat NotImplementedError as an unhandled exception 296 | 297 | 3.18.4 298 | ------ 299 | 300 | - Handle UNHANDLED_EXCEPTION in rejected.testing 301 | - Add the `rejected.consumer.Consumer.io_loop` property 302 | - Add the `testing` setup.py `extras_require` entry 303 | 304 | 3.18.3 305 | ------ 306 | 307 | - Fix ``rejected.consumer.Consumer.require_setting`` 308 | 309 | 3.18.2 310 | ------ 311 | 312 | - Fix the republishing of dropped messages 313 | 314 | 3.18.1 315 | ------ 316 | 317 | - Fix ``ProcessingException`` AMQP header property assignment 318 | 319 | 3.18.0 320 | ------ 321 | 322 | - Add connection as an attribute of channel in ``rejected.testing`` 323 | - Refactor how error text is extracted in ``rejected.consumer.Consumer.execute`` 324 | - When a message raises a ProcessingException, the string value of the exception is added to the AMQP message headers property 325 | - Messages dropped by a consumer can now be republished to a different exchange 326 | 327 | 3.17.4 328 | ------ 329 | 330 | - Don't start consuming until all connections are ready, fix shutdown 331 | 332 | 3.17.3 333 | ------ 334 | 335 | - Fix publisher confirmations 336 | 337 | 3.17.2 338 | ------ 339 | 340 | - Don't blow up if `stats` is not defined in config 341 | 342 | 3.17.1 343 | ------ 344 | 345 | - Documentation updates 346 | - Fix the test for Consumer configuration 347 | 348 | 3.17.0 349 | ------ 350 | 351 | - `rejected.testing` updates 352 | - Add automatic assignment of `correlation-id` to `rejected.consumer.Consumer` 353 | - Only use `sentry_client` if it’s configured 354 | - Behavior change: Don't spawn a process per connection, Spawn `qty` consumers with N connections 355 | - Add State.is_active 356 | - Add attributes for the connection the message was received on and if the message was published by the consumer and returned by RabbitMQ 357 | - Deprecate `PublishingConsumer` and `SmartPublishingConsumer`, folding them into `Consumer` and `SmartConsumer` respectively 358 | - Refactor to not have a singular channel instance, but rather a dict of channels for all connections 359 | - Add the ability to specify a channel to publish a message on, defaulting to the channel the message was delivered on 360 | - Add a property that indicates the current message that is being processed was returned by RabbitMQ 361 | - Change `Consumer._execute` and `Consumer._set_channel` to be “public” but will hide from docs. 362 | - Major Process refactor 363 | - Create a new Connection class to isolate direct AMQP connection/channel management from the Process class. 364 | - Alter Process to allow for multiple connections. This allows a consumer to consume from multiple AMQP broker connections or have AMQP broker connections that are not used for consuming. This could be useful for consuming from one broker and publishing to another broker in a different data center. 365 | - Add new ``enabled`` flag in the config for statsd and influxdb stats monitoring 366 | - Add a new behavior that puts pending messages sent into a ``collections.deque`` when a consumer is processing instead of just blocking on message delivery until processing is done. This could have a negative impact on memory utilization for consumers with large messages, but can be controlled by the ``qos_prefetch`` setting. 367 | - Process now sends messages returned from RabbitMQ to the Consumer 368 | - Process now will notify a consumer when RabbitMQ blocks and unblocks a connection 369 | 370 | 3.16.7 371 | ------ 372 | 373 | - Allow for any AMQP properties when testing 374 | 375 | 3.16.6 376 | ------ 377 | 378 | - Refactor and cleanup Sentry configuration and behavior 379 | 380 | 3.16.5 381 | ------ 382 | 383 | - Fix InfluxDB error metrics 384 | 385 | 3.16.4 386 | ------ 387 | 388 | - Update logging levels in `rejected.consumer.Consumer._execute` 389 | - Set exception error strings in per-request measurements 390 | 391 | 3.16.3 392 | ------ 393 | 394 | - Better exception logging/sentry use in async consumers 395 | 396 | 3.16.2 397 | ------ 398 | 399 | - Fix a bug using -o in Python 3 400 | 401 | 3.16.1 402 | ------ 403 | 404 | - Add `rejected.consumer.Consumer.send_exception_to_sentry` 405 | 406 | 3.16.0 407 | ------ 408 | 409 | - Add `rejected.testing` testing framework 410 | 411 | 3.15.1 412 | ------ 413 | 414 | - Ensure that message age is always a float 415 | 416 | 3.15.0 417 | ------ 418 | 419 | - Sentry Updates 420 | - Catch all top-level startup exceptions and send them to sentry 421 | - Fix the sending of consumer exceptions to sentry 422 | 423 | 3.14.0 424 | ------ 425 | 426 | - Cleanup the shutdown and provide way to bypass cache in active_processes 427 | - If a consumer has not responded back with stats info after 3 attempts, it will be shutdown and a new consumer will take its place. 428 | - Add the consumer name to the extra values for logging 429 | 430 | 3.13.4 431 | ------ 432 | 433 | - Properly handle finishing in `rejected.consumer.Consumer.prepare` 434 | - Fix default/class level config of error exchange, etc 435 | 436 | 3.13.3 437 | ------ 438 | 439 | - Fix `rejected.consumer.Consumer.stats_track_duration` 440 | 441 | 3.13.2 442 | ------ 443 | 444 | - Better backwards compatibility with `rejected.consumer.Consumer` "stats" commands 445 | 446 | 3.13.1 447 | ------ 448 | 449 | - Bugfixes: 450 | - Construct the proper InfluxDB base URL 451 | - Fix the mixin __init__ signature to support the new kwargs 452 | - Remove overly verbose logging 453 | 454 | 3.13.0 455 | ------ 456 | 457 | - Remove Python 2.6 support 458 | - Documentation Updates 459 | - consumer.Consumer: Accept multiple MESSAGE_TYPEs. 460 | - PublishingConsumer: Remove routing key from metric. 461 | - Add per-consumer sentry configuration 462 | - Refactor Consumer stats and statsd support 463 | - Update to use the per-message measurement 464 | - Changes how we submit measurements to statsd 465 | - Drops some redundant measurements that were submitted 466 | - Renames the exception measurement names 467 | - Adds support for InfluxDB 468 | -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | .. rejected documentation master file, created by 2 | sphinx-quickstart on Wed Dec 17 10:31:58 2014. 3 | You can adapt this file completely to your liking, but it should at least 4 | contain the root `toctree` directive. 5 | 6 | rejected 7 | ======== 8 | Rejected is a AMQP consumer daemon and message processing framework. It allows 9 | for rapid development of message processing consumers by handling all of the 10 | core functionality of communicating with RabbitMQ and management of consumer 11 | processes. 12 | 13 | Rejected runs as a master process with multiple consumer configurations that are 14 | each run it an isolated process. It has the ability to collect statistical 15 | data from the consumer processes and report on it. 16 | 17 | Rejected supports Python 2.7 and 3.4+. 18 | 19 | |Version| |Status| |Climate| |License| 20 | 21 | Features 22 | -------- 23 | 24 | - Automatic exception handling including connection management and consumer restarting 25 | - Smart consumer classes that can automatically decode and deserialize message bodies based upon message headers 26 | - Metrics logging and submission to statsd and InfluxDB 27 | - Built-in profiling of consumer code 28 | - Ability to write asynchronous code in consumers allowing for parallel communication with external resources 29 | 30 | Installation 31 | ------------ 32 | rejected is available from the `Python Package Index `_ 33 | and can be installed by running :command:`pip install rejected`. 34 | 35 | For additional dependencies for optional features: 36 | 37 | - To install HTML support, run :command:`pip install rejected[html]` 38 | - To install InfluxDB support, run :command:`pip install rejected[influxdb]` 39 | - To install MessagePack support, run :command:`pip install rejected[msgpack]` 40 | - To install Sentry support, run :command:`pip install rejected[sentry]` 41 | - For testing, including all dependencies, run :command:`pip install rejected[testing]` 42 | 43 | 44 | Getting Started 45 | --------------- 46 | 47 | .. toctree:: 48 | :glob: 49 | :maxdepth: 2 50 | 51 | consumer_howto 52 | configuration 53 | example_config 54 | cli 55 | 56 | API Documentation 57 | ----------------- 58 | 59 | .. toctree:: 60 | :glob: 61 | :maxdepth: 2 62 | 63 | consumer 64 | api_testing 65 | 66 | Issues 67 | ------ 68 | Please report any issues to the Github repo at `https://github.com/gmr/rejected/issues `_ 69 | 70 | Source 71 | ------ 72 | rejected source is available on Github at `https://github.com/gmr/rejected `_ 73 | 74 | Version History 75 | --------------- 76 | See :doc:`history` 77 | 78 | Indices and tables 79 | ------------------ 80 | 81 | * :ref:`genindex` 82 | * :ref:`modindex` 83 | * :ref:`search` 84 | 85 | 86 | .. |Version| image:: https://img.shields.io/pypi/v/rejected.svg? 87 | :target: https://pypi.python.org/pypi/rejected 88 | 89 | .. |Status| image:: https://img.shields.io/travis/gmr/rejected.svg? 90 | :target: https://travis-ci.org/gmr/rejected 91 | 92 | .. |License| image:: https://img.shields.io/pypi/l/rejected.svg? 93 | :target: https://rejected.readthedocs.org 94 | 95 | .. |Climate| image:: https://img.shields.io/codeclimate/github/gmr/rejected.svg? 96 | :target: https://codeclimate.com/github/gmr/rejected 97 | -------------------------------------------------------------------------------- /example.yaml: -------------------------------------------------------------------------------- 1 | %YAML 1.2 2 | --- 3 | Application: 4 | poll_interval: 10.0 5 | # sentry_dsn: https://[YOUR-SENTRY-DSN] 6 | stats: 7 | log: True 8 | influxdb: 9 | enabled: False 10 | scheme: http 11 | host: influxdb 12 | port: 8086 13 | user: influxdb 14 | password: foo 15 | database: bar 16 | statsd: 17 | enabled: False 18 | host: localhost 19 | port: 8125 20 | prefix: application.rejected 21 | Connections: 22 | rabbitmq: 23 | host: localhost 24 | port: 5672 25 | user: guest 26 | pass: guest 27 | ssl: False 28 | vhost: / 29 | heartbeat_interval: 60 30 | rabbitmq2: 31 | host: localhost 32 | port: 5672 33 | user: rejected 34 | pass: rabbitmq 35 | ssl: False 36 | vhost: / 37 | heartbeat_interval: 60 38 | Consumers: 39 | async: 40 | consumer: examples.AsyncExampleConsumer 41 | connections: 42 | - rabbitmq 43 | - name: rabbitmq2 44 | consume: False 45 | confirm: False 46 | # sentry_dsn: sentry_dsn: https://[YOUR-SENTRY-DSN] 47 | qty: 1 48 | queue: test 49 | ack: True 50 | influxdb_measurement: async_consumer 51 | qos_prefetch: 100 52 | max_errors: 100 53 | 54 | sync: 55 | consumer: examples.ExampleConsumer 56 | connections: 57 | - rabbitmq 58 | - name: rabbitmq2 59 | consume: False 60 | - foo 61 | qty: 1 62 | queue: generated_messages 63 | ack: True 64 | max_errors: 100 65 | error_exchange: errors 66 | influxdb_measurement: sync_consumer 67 | qos_prefetch: 1 68 | config: 69 | foo: True 70 | bar: baz 71 | 72 | empty: 73 | connections: 74 | - rabbitmq 75 | 76 | Daemon: 77 | user: rejected 78 | group: daemon 79 | pidfile: /var/run/rejected.pid 80 | 81 | Logging: 82 | version: 1 83 | formatters: 84 | verbose: 85 | format: "%(levelname) -10s %(asctime)s %(process)-6d %(processName) -25s %(name) -20s %(funcName) -25s: %(message)s" 86 | datefmt: "%Y-%m-%d %H:%M:%S" 87 | verbose_correlation: 88 | format: "%(levelname) -10s %(asctime)s %(process)-6d %(processName) -25s %(name) -20s %(funcName) -25s: %(message)s {CID %(correlation_id)s}" 89 | datefmt: "%Y-%m-%d %H:%M:%S" 90 | syslog: 91 | format: "%(levelname)s %(name)s.%(funcName)s: %(message)s" 92 | syslog_correlation: 93 | format: "%(levelname)s %(name)s.%(funcName)s: %(message)s {CID %(correlation_id)s)" 94 | filters: 95 | correlation: 96 | '()': rejected.log.CorrelationFilter 97 | 'exists': True 98 | no_correlation: 99 | '()': rejected.log.CorrelationFilter 100 | 'exists': False 101 | handlers: 102 | console: 103 | class: logging.StreamHandler 104 | formatter: verbose 105 | debug_only: false 106 | filters: [no_correlation] 107 | console_correlation: 108 | class: logging.StreamHandler 109 | formatter: verbose_correlation 110 | debug_only: false 111 | filters: [correlation] 112 | syslog: 113 | class: logging.handlers.SysLogHandler 114 | facility: daemon 115 | address: /var/run/syslog 116 | formatter: syslog 117 | filters: [no_correlation] 118 | syslog_correlation: 119 | class: logging.handlers.SysLogHandler 120 | facility: daemon 121 | address: /var/run/syslog 122 | formatter: syslog 123 | filters: [correlation] 124 | loggers: 125 | helper: 126 | level: INFO 127 | propagate: true 128 | handlers: [console, console_correlation, syslog, syslog_correlation] 129 | rejected: 130 | level: INFO 131 | propagate: false 132 | handlers: [console, console_correlation, syslog, syslog_correlation] 133 | rejected.consumer: 134 | level: INFO 135 | propagate: false 136 | handlers: [console, console_correlation, syslog, syslog_correlation] 137 | rejected.example: 138 | level: INFO 139 | propagate: false 140 | handlers: [console, console_correlation, syslog, syslog_correlation] 141 | rejected.mcp: 142 | level: DEBUG 143 | propagate: false 144 | handlers: [console, console_correlation, syslog, syslog_correlation] 145 | rejected.process: 146 | level: DEBUG 147 | propagate: false 148 | handlers: [console, console_correlation, syslog, syslog_correlation] 149 | rejected.statsd: 150 | level: DEBUG 151 | propagate: false 152 | handlers: [console, console_correlation, syslog, syslog_correlation] 153 | sprockets_influxdb: 154 | level: DEBUG 155 | propagate: false 156 | handlers: [console, console_correlation, syslog, syslog_correlation] 157 | tornado: 158 | level: INFO 159 | propagate: true 160 | handlers: [console, console_correlation, syslog, syslog_correlation] 161 | disable_existing_loggers: true 162 | incremental: false 163 | -------------------------------------------------------------------------------- /examples.py: -------------------------------------------------------------------------------- 1 | """Example Rejected Consumer""" 2 | import random 3 | 4 | from rejected import consumer 5 | from tornado import gen, httpclient 6 | 7 | __version__ = '1.0.0' 8 | 9 | 10 | class ExampleConsumer(consumer.SmartConsumer): 11 | 12 | def process(self): 13 | self.logger.info('Message: %r', self.body) 14 | action = random.randint(0, 100) 15 | self.stats_incr('action', action) 16 | if action == 0: 17 | raise consumer.ConsumerException('zomg') 18 | elif action < 5: 19 | raise consumer.MessageException('reject') 20 | elif action < 10: 21 | raise consumer.ProcessingException('publish') 22 | 23 | 24 | class AsyncExampleConsumer(consumer.Consumer): 25 | 26 | @gen.coroutine 27 | def process(self): 28 | self.logger.info('Message: %r', self.body) 29 | http_client = httpclient.AsyncHTTPClient() 30 | with self.stats_track_duration('async_fetch'): 31 | results = yield [http_client.fetch('http://www.google.com'), 32 | http_client.fetch('http://www.bing.com')] 33 | self.logger.info('Length: %r', [len(r.body) for r in results]) 34 | -------------------------------------------------------------------------------- /init.d/rejected: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # chkconfig: 2345 99 60 3 | # description: rejected consumers 4 | # processname: rejected 5 | 6 | # Source function library. 7 | . /etc/init.d/functions 8 | 9 | # Installation dir 10 | rejected_DIR="/opt/rejected" 11 | 12 | prog="rejected" 13 | 14 | start() { 15 | if [ ${EUID} -ne 0 ]; then 16 | echo -n $"you must be root" 17 | failure $"you must be root" 18 | echo 19 | return 1 20 | fi 21 | 22 | echo -n $"Starting ${prog}: " 23 | 24 | /opt/rejected/bin/start.sh 25 | return 1 26 | } 27 | 28 | stop() { 29 | if [ ${EUID} -ne 0 ]; then 30 | echo -n $"you must be root" 31 | failure $"you must be root" 32 | echo 33 | return 1 34 | fi 35 | 36 | echo -n $"Stopping ${prog}: " 37 | 38 | /opt/rejected/bin/stop.sh 39 | RETVAL=$? 40 | [ $RETVAL -eq 0 ] && rm -f ${rejected_PID_FILE} 41 | echo 42 | return $RETVAL 43 | } 44 | 45 | restart() { 46 | stop 47 | start 48 | } 49 | 50 | case "$1" in 51 | start) 52 | start 53 | ;; 54 | stop) 55 | stop 56 | ;; 57 | restart) 58 | restart 59 | ;; 60 | *) 61 | echo $"Usage: $0 {start|stop|restart}" 62 | RETVAL=2 63 | ;; 64 | esac 65 | 66 | exit $RETVAL 67 | 68 | -------------------------------------------------------------------------------- /rejected/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Rejected is a Python RabbitMQ Consumer Framework and Controller Daemon 3 | 4 | """ 5 | import logging 6 | 7 | # Add NullHandler to prevent logging warnings 8 | logging.getLogger(__name__).addHandler(logging.NullHandler()) 9 | 10 | from rejected.consumer import ( # noqa: E402 11 | Consumer, 12 | ConsumerException, 13 | MessageException, 14 | ProcessingException, 15 | PublishingConsumer, 16 | SmartConsumer, 17 | SmartPublishingConsumer) 18 | 19 | __author__ = 'Gavin M. Roy ' 20 | __since__ = '2009-09-10' 21 | __version__ = '3.23.1' 22 | 23 | __all__ = [ 24 | '__author__', 25 | '__since__', 26 | '__version__', 27 | 'Consumer', 28 | 'ConsumerException', 29 | 'MessageException', 30 | 'ProcessingException', 31 | 'PublishingConsumer', 32 | 'SmartConsumer', 33 | 'SmartPublishingConsumer' 34 | ] 35 | -------------------------------------------------------------------------------- /rejected/controller.py: -------------------------------------------------------------------------------- 1 | """ 2 | OS Level controlling class invokes startup, shutdown and handles signals. 3 | 4 | """ 5 | import logging 6 | import os 7 | import signal 8 | import sys 9 | 10 | import helper 11 | from helper import controller, parser 12 | try: 13 | import raven 14 | except ImportError: 15 | raven = None 16 | 17 | from . import __version__, mcp 18 | 19 | LOGGER = logging.getLogger(__name__) 20 | 21 | 22 | class Controller(controller.Controller): 23 | """Rejected Controller application that invokes the MCP and handles all 24 | of the OS level concerns. 25 | 26 | """ 27 | def __init__(self, *args, **kwargs): 28 | super(Controller, self).__init__(*args, **kwargs) 29 | self._mcp = None 30 | self._sentry_client = None 31 | if raven and self.config.application.get('sentry_dsn'): 32 | kwargs = { 33 | 'exclude_paths': ['tornado'], 34 | 'include_paths': ['pika', 'helper', 'rejected'], 35 | 'processors': ['raven.processors.SanitizePasswordsProcessor'] 36 | } 37 | if os.environ.get('ENVIRONMENT'): 38 | kwargs['environment'] = os.environ['ENVIRONMENT'] 39 | self._sentry_client = raven.Client( 40 | self.config.application['sentry_dsn'], **kwargs) 41 | 42 | def _master_control_program(self): 43 | """Return an instance of the MasterControlProgram. 44 | 45 | :rtype: rejected.mcp.MasterControlProgram 46 | 47 | """ 48 | return mcp.MasterControlProgram(self.config, 49 | consumer=self.args.consumer, 50 | profile=self.args.profile, 51 | quantity=self.args.quantity) 52 | 53 | @staticmethod 54 | def _prepend_python_path(path): # pragma: no cover 55 | """Add the specified value to the python path. 56 | 57 | :param str path: The path to append 58 | 59 | """ 60 | LOGGER.debug('Prepending "%s" to the python path.', path) 61 | sys.path.insert(0, path) 62 | 63 | def setup(self): 64 | """Continue the run process blocking on MasterControlProgram.run""" 65 | # If the app was invoked to specified to prepend the path, do so now 66 | if self.args.prepend_path: 67 | self._prepend_python_path(self.args.prepend_path) 68 | 69 | def stop(self): 70 | """Shutdown the MCP and child processes cleanly""" 71 | LOGGER.info('Shutting down controller') 72 | self.set_state(self.STATE_STOP_REQUESTED) 73 | 74 | # Clear out the timer 75 | signal.setitimer(signal.ITIMER_PROF, 0, 0) 76 | 77 | self._mcp.stop_processes() 78 | 79 | if self._mcp.is_running: 80 | LOGGER.info('Waiting up to 3 seconds for MCP to shut things down') 81 | signal.setitimer(signal.ITIMER_REAL, 3, 0) 82 | signal.pause() 83 | LOGGER.info('Post pause') 84 | 85 | # Force MCP to stop 86 | if self._mcp.is_running: 87 | LOGGER.warning('MCP is taking too long, requesting process kills') 88 | self._mcp.stop_processes() 89 | del self._mcp 90 | else: 91 | LOGGER.info('MCP exited cleanly') 92 | 93 | # Change our state 94 | self._stopped() 95 | LOGGER.info('Shutdown complete') 96 | 97 | def run(self): 98 | """Run the rejected Application""" 99 | self.setup() 100 | self._mcp = self._master_control_program() 101 | try: 102 | self._mcp.run() 103 | except KeyboardInterrupt: 104 | LOGGER.info('Caught CTRL-C, shutting down') 105 | except Exception: 106 | exc_info = sys.exc_info() 107 | kwargs = {'logger': 'rejected.controller'} 108 | LOGGER.debug('Sending exception to sentry: %r', kwargs) 109 | if self._sentry_client: 110 | self._sentry_client.captureException(exc_info, **kwargs) 111 | raise 112 | if self.is_running: 113 | self.stop() 114 | 115 | 116 | def add_parser_arguments(): 117 | """Add options to the parser""" 118 | argparser = parser.get() 119 | argparser.add_argument('-P', '--profile', 120 | action='store', 121 | default=None, 122 | dest='profile', 123 | help='Profile the consumer modules, specifying ' 124 | 'the output directory.') 125 | argparser.add_argument('-o', '--only', 126 | action='store', 127 | default=None, 128 | dest='consumer', 129 | help='Only run the consumer specified') 130 | argparser.add_argument('-p', '--prepend-path', 131 | action='store', 132 | default=None, 133 | dest='prepend_path', 134 | help='Prepend the python path with the value.') 135 | argparser.add_argument('-q', '--qty', 136 | action='store', 137 | type=int, 138 | default=None, 139 | dest='quantity', 140 | help='Run the specified quantity of consumer ' 141 | 'processes when used in conjunction with -o') 142 | argparser.add_argument('--version', action='version', 143 | version='%(prog)s {}'.format(__version__)) 144 | 145 | 146 | def main(): 147 | """Called when invoking the command line script.""" 148 | add_parser_arguments() 149 | parser.description('RabbitMQ consumer framework') 150 | helper.start(Controller) 151 | 152 | 153 | if __name__ == '__main__': 154 | main() 155 | -------------------------------------------------------------------------------- /rejected/data.py: -------------------------------------------------------------------------------- 1 | """ 2 | Rejected data objects 3 | 4 | """ 5 | import collections 6 | import contextlib 7 | import copy 8 | import time 9 | 10 | MESSAGE_ACK = 1 11 | MESSAGE_DROP = 2 12 | MESSAGE_REQUEUE = 3 13 | 14 | CONSUMER_EXCEPTION = 10 15 | MESSAGE_EXCEPTION = 11 16 | PROCESSING_EXCEPTION = 12 17 | UNHANDLED_EXCEPTION = 13 18 | 19 | 20 | class Data(object): 21 | 22 | __slots__ = [] 23 | 24 | def __iter__(self): 25 | """Iterate the attributes and values as key, value pairs. 26 | 27 | :rtype: tuple 28 | 29 | """ 30 | for attribute in self.__slots__: 31 | yield (attribute, getattr(self, attribute)) 32 | 33 | def __repr__(self): 34 | """Return a string representation of the object and all of its 35 | attributes. 36 | 37 | :rtype: str 38 | 39 | """ 40 | items = ['%s=%s' % (k, getattr(self, k)) 41 | for k in self.__slots__ if getattr(self, k)] 42 | return '<%s(%s)>' % (self.__class__.__name__, items) 43 | 44 | 45 | class Message(Data): 46 | """Class for containing all the attributes about a message object creating 47 | a flatter, move convenient way to access the data while supporting the 48 | legacy methods that were previously in place in rejected < 2.0 49 | 50 | +------------------------------------------------------------------+ 51 | | Attributes | 52 | +======================+===========================================+ 53 | | :attr:`body` |The AMQP message body | 54 | +----------------------+-------------------------------------------+ 55 | | :attr:`connection` | The name of the connection that the | 56 | | | message was received on. | 57 | +----------------------+-------------------------------------------+ 58 | | :attr:`channel` | The channel that the message was | 59 | | | was received on. | 60 | +----------------------+-------------------------------------------+ 61 | | :attr:`consumer_tag` | The consumer tag registered with RabbitMQ | 62 | | | that identifies which consumer registered | 63 | | | to receive this message. | 64 | +----------------------+-------------------------------------------+ 65 | | :attr:`delivery_tag` | The delivery tag that represents the | 66 | | | deliver of this message with RabbitMQ. | 67 | +----------------------+-------------------------------------------+ 68 | | :attr:`exchange` | The exchange the message was published to | 69 | +----------------------+-------------------------------------------+ 70 | | :attr:`method` | The :class:`pika.spec.Basic.Deliver` or | 71 | | | :class:`pika.spec.Basic.Return` object | 72 | | | that represents how the message was | 73 | | | received by rejected. | 74 | +----------------------+-------------------------------------------+ 75 | | :attr:`properties` | The :class:`~pika.spec.BasicProperties` | 76 | | | object that represents the message's AMQP | 77 | | | properties. | 78 | +----------------------+-------------------------------------------+ 79 | | :attr:`redelivered` | A flag that indicates the message was | 80 | | | previously delivered by RabbitMQ. | 81 | +----------------------+-------------------------------------------+ 82 | | :attr:`returned` | A flag that indicates the message was | 83 | | | returned by RabbitMQ. | 84 | +----------------------+-------------------------------------------+ 85 | | :attr:`routing_key` | The routing key that was used to deliver | 86 | | | the message. | 87 | +----------------------+-------------------------------------------+ 88 | 89 | """ 90 | __slots__ = ['connection', 'channel', 'method', 'properties', 'body', 91 | 'consumer_tag', 'delivery_tag', 'exchange', 'redelivered', 92 | 'routing_key', 'returned'] 93 | 94 | def __init__(self, connection, channel, method, properties, body, 95 | returned=False): 96 | """Initialize a message setting the attributes from the given channel, 97 | method, header and body. 98 | 99 | :param str connection: The connection name for the message 100 | :param channel: The channel the message was received on 101 | :type channel: pika.channel.Channel 102 | :param pika.frames.Method method: pika Method Frame object 103 | :param pika.spec.BasicProperties properties: message properties 104 | :param str body: Opaque message body 105 | :param bool returned: Indicates the message was returned 106 | 107 | """ 108 | self.connection = connection 109 | self.channel = channel 110 | self.method = method 111 | self.properties = Properties(properties) 112 | self.body = copy.copy(body) 113 | self.returned = returned 114 | 115 | # Map method properties 116 | self.consumer_tag = method.consumer_tag 117 | self.delivery_tag = method.delivery_tag 118 | self.exchange = method.exchange 119 | self.redelivered = method.redelivered 120 | self.routing_key = method.routing_key 121 | 122 | 123 | class Properties(Data): 124 | """A class that represents all of the field attributes of AMQP's 125 | ``Basic.Properties``. 126 | 127 | +-----------------------------------------------------------------+ 128 | | Attributes | 129 | +==========================+======================================+ 130 | | :attr:`app_id` | Creating application id | 131 | +--------------------------+--------------------------------------+ 132 | | :attr:`content_type` | MIME content type | 133 | +--------------------------+--------------------------------------+ 134 | | :attr:`content_encoding` | MIME content encoding | 135 | +--------------------------+--------------------------------------+ 136 | | :attr:`correlation_id` | Application correlation identifier | 137 | +--------------------------+--------------------------------------+ 138 | | :attr:`delivery_mode` | Non-persistent (1) or persistent (2) | 139 | +--------------------------+--------------------------------------+ 140 | | :attr:`expiration` | Message expiration specification | 141 | +--------------------------+--------------------------------------+ 142 | | :attr:`headers` | Message header field table | 143 | +--------------------------+--------------------------------------+ 144 | | :attr:`message_id` | Application message identifier | 145 | +--------------------------+--------------------------------------+ 146 | | :attr:`priority` | Message priority, 0 to 9 | 147 | +--------------------------+--------------------------------------+ 148 | | :attr:`reply_to` | Address to reply to | 149 | +--------------------------+--------------------------------------+ 150 | | :attr:`timestamp` | Message timestamp | 151 | +--------------------------+--------------------------------------+ 152 | | :attr:`type` | Message type name | 153 | +--------------------------+--------------------------------------+ 154 | | :attr:`user_id` | Creating user id | 155 | +--------------------------+--------------------------------------+ 156 | 157 | """ 158 | __slots__ = ['app_id', 'content_type', 'content_encoding', 159 | 'correlation_id', 'delivery_mode', 'expiration', 'headers', 160 | 'priority', 'reply_to', 'message_id', 'timestamp', 'type', 161 | 'user_id'] 162 | 163 | def __init__(self, properties=None): 164 | """Create a base object to contain all of the properties we need 165 | 166 | :param pika.spec.BasicProperties properties: pika.spec.BasicProperties 167 | 168 | """ 169 | for attr in self.__slots__: 170 | setattr(self, attr, None) 171 | if properties and getattr(properties, attr): 172 | setattr(self, attr, getattr(properties, attr)) 173 | 174 | 175 | class Measurement(object): 176 | """Common Measurement Object that provides common methods for collecting 177 | and exposes measurement data that is submitted in 178 | :class:`rejected.process.Process` and :class:`rejected.consumer.Consumer` 179 | for submission to statsd or influxdb. 180 | 181 | +-------------------------------------------------------------------+ 182 | | Attributes | 183 | +===================+===============================================+ 184 | | :attr:`counters` | Counters that are affected by | 185 | | | :meth:`~rejected.data.Measurement.decr` and | 186 | | | :meth:`~rejected.data.Measurement.incr` | 187 | +-------------------+-----------------------------------------------+ 188 | | :attr:`durations` | List of duration values (float or int) | 189 | +-------------------+-----------------------------------------------+ 190 | | :attr:`tags` | Tag key/value pairs for use with InfluxDB | 191 | +-------------------+-----------------------------------------------+ 192 | | :attr:`values` | Numeric values such as integers, gauges, | 193 | | | and such. | 194 | +-------------------+-----------------------------------------------+ 195 | 196 | .. versionadded:: 3.13.0 197 | 198 | """ 199 | def __init__(self): 200 | self.durations = {} 201 | self.counters = collections.Counter() 202 | self.tags = {} 203 | self.values = {} 204 | 205 | def decr(self, key, value=1): 206 | """Decrement a counter. 207 | 208 | :param str key: The key to decrement 209 | :param int value: The value to decrement by 210 | 211 | """ 212 | self.counters[key] -= value 213 | 214 | def incr(self, key, value=1): 215 | """Increment a counter. 216 | 217 | :param str key: The key to increment 218 | :param int value: The value to increment by 219 | 220 | """ 221 | self.counters[key] += value 222 | 223 | def add_duration(self, key, value): 224 | """Add a duration for the specified key 225 | 226 | :param str key: The value name 227 | :param float value: The value 228 | 229 | .. versionadded:: 3.19.0 230 | 231 | """ 232 | if key not in self.durations: 233 | self.durations[key] = [] 234 | self.durations[key].append(value) 235 | 236 | def set_tag(self, key, value): 237 | """Set a tag. This is only used for InfluxDB measurements. 238 | 239 | :param str key: The tag name 240 | :param value: The tag value 241 | :type value: str or bool or int 242 | 243 | """ 244 | self.tags[key] = value 245 | 246 | def set_value(self, key, value): 247 | """Set a value. 248 | 249 | :param str key: The value name 250 | :type value: int or float 251 | :param value: The value 252 | 253 | """ 254 | self.values[key] = value 255 | 256 | @contextlib.contextmanager 257 | def track_duration(self, key): 258 | """Context manager that sets a value with the duration of time that it 259 | takes to execute whatever it is wrapping. 260 | 261 | :param str key: The timing name 262 | 263 | """ 264 | if key not in self.durations: 265 | self.durations[key] = [] 266 | start_time = time.time() 267 | try: 268 | yield 269 | finally: 270 | self.durations[key].append( 271 | max(start_time, time.time()) - start_time) 272 | -------------------------------------------------------------------------------- /rejected/log.py: -------------------------------------------------------------------------------- 1 | """ 2 | Logging Related Things 3 | 4 | """ 5 | import logging 6 | 7 | 8 | class CorrelationFilter(logging.Formatter): 9 | """Filter records that have a correlation_id""" 10 | def __init__(self, exists=None): 11 | super(CorrelationFilter, self).__init__() 12 | self.exists = exists 13 | 14 | def filter(self, record): 15 | """Filter returns based upon the combination of self.exists and 16 | the presence of the correlation_id record attribute. 17 | 18 | :param logging.LogRecord record: The logging record 19 | :rtype: bool 20 | 21 | """ 22 | if self.exists: 23 | return hasattr(record, 'correlation_id') 24 | return not hasattr(record, 'correlation_id') 25 | 26 | 27 | class CorrelationAdapter(logging.LoggerAdapter): 28 | """A LoggerAdapter that appends the a correlation ID to the message 29 | record properties. 30 | 31 | """ 32 | def __init__(self, logger, consumer, **extra): 33 | self.logger = logger 34 | self.consumer = consumer 35 | super(CorrelationAdapter, self).__init__(logger, extra) 36 | 37 | def process(self, msg, kwargs): 38 | """Process the logging message and keyword arguments passed in to 39 | a logging call to insert contextual information. 40 | 41 | :param str msg: The message to process 42 | :param dict kwargs: The kwargs to append 43 | :rtype: (str, dict) 44 | 45 | """ 46 | kwargs['extra'] = {'correlation_id': self.consumer.correlation_id, 47 | 'consumer': self.consumer.name} 48 | return msg, kwargs 49 | -------------------------------------------------------------------------------- /rejected/mcp.py: -------------------------------------------------------------------------------- 1 | """ 2 | Master Control Program 3 | 4 | """ 5 | import collections 6 | import logging 7 | import multiprocessing 8 | import os 9 | try: 10 | import Queue as queue 11 | except ImportError: 12 | import queue 13 | import signal 14 | import sys 15 | import time 16 | 17 | import psutil 18 | 19 | from . import __version__, process, state 20 | 21 | LOGGER = logging.getLogger(__name__) 22 | 23 | _PROCESS_RUNNING = [psutil.STATUS_RUNNING, psutil.STATUS_SLEEPING] 24 | _PROCESS_STOPPED_OR_DEAD = [psutil.STATUS_STOPPED, psutil.STATUS_DEAD] 25 | 26 | if sys.version_info < (3, 0, 0): 27 | FileNotFoundError = psutil.NoSuchProcess 28 | ProcessLookupError = OSError 29 | 30 | 31 | class Consumer(object): 32 | """Class used for keeping track of each consumer type being managed by 33 | the MCP 34 | 35 | """ 36 | 37 | def __init__(self, last_proc_num, processes, qty, queue): 38 | self.last_proc_num = last_proc_num 39 | self.processes = processes 40 | self.qty = qty 41 | self.queue = queue 42 | 43 | 44 | class MasterControlProgram(state.State): 45 | """Master Control Program keeps track of and manages consumer processes.""" 46 | 47 | DEFAULT_CONSUMER_QTY = 1 48 | MAX_SHUTDOWN_WAIT = 10 49 | MAX_UNRESPONSIVE_COUNT = 3 50 | POLL_INTERVAL = 60.0 51 | POLL_RESULTS_INTERVAL = 3.0 52 | SHUTDOWN_WAIT = 1 53 | 54 | def __init__(self, config, consumer=None, profile=None, quantity=None): 55 | """Initialize the Master Control Program 56 | 57 | :param config: The full content from the YAML config file 58 | :type config: helper.config.Config 59 | :param str consumer: If specified, only run processes for this consumer 60 | :param str profile: Optional profile output directory to 61 | enable profiling 62 | 63 | """ 64 | self.set_process_name() 65 | LOGGER.info('rejected v%s initializing', __version__) 66 | super(MasterControlProgram, self).__init__() 67 | 68 | # Default values 69 | self._active_cache = None 70 | self.consumer_cfg = self.get_consumer_cfg(config, consumer, quantity) 71 | self.consumers = {} 72 | self.config = config 73 | self.last_poll_results = {} 74 | self.poll_data = {'time': 0, 'processes': []} 75 | self.poll_timer = None 76 | self.profile = profile 77 | self.results_timer = None 78 | self.stats = {} 79 | self.stats_queue = multiprocessing.Queue() 80 | self.polled = False 81 | self.unresponsive = collections.Counter() 82 | 83 | # Flag to indicate child creation error 84 | self.child_abort = False 85 | 86 | # Carry for logging internal stats collection data 87 | self.log_stats_enabled = config.application.get('stats', {}).get( 88 | 'log', config.application.get('log_stats', False)) 89 | LOGGER.debug('Stats logging enabled: %s', self.log_stats_enabled) 90 | 91 | # Setup the poller related threads 92 | self.poll_interval = config.application.get('poll_interval', 93 | self.POLL_INTERVAL) 94 | LOGGER.debug('Set process poll interval to %.2f', self.poll_interval) 95 | 96 | def active_processes(self, use_cache=True): 97 | """Return a list of all active processes, pruning dead ones 98 | 99 | :rtype: list 100 | 101 | """ 102 | LOGGER.debug('Checking active processes (cache: %s)', use_cache) 103 | if use_cache and self._active_cache and \ 104 | self._active_cache[0] > time.time() - self.poll_interval: 105 | return self._active_cache[1] 106 | active_processes, dead_processes = [], [] 107 | for consumer in self.consumers: 108 | for name in list(self.consumers[consumer].processes.keys()): 109 | child = self.get_consumer_process(consumer, name) 110 | if child is None: 111 | continue 112 | if child.pid is None: 113 | dead_processes.append((consumer, name)) 114 | continue 115 | elif int(child.pid) == os.getpid(): 116 | continue 117 | try: 118 | proc = psutil.Process(child.pid) 119 | except (FileNotFoundError, psutil.NoSuchProcess): 120 | dead_processes.append((consumer, name)) 121 | continue 122 | 123 | if self.unresponsive[name] >= self.MAX_UNRESPONSIVE_COUNT: 124 | LOGGER.info( 125 | 'Killing unresponsive consumer %s (%i): ' 126 | '%i misses', name, proc.pid, self.unresponsive[name]) 127 | try: 128 | os.kill(child.pid, signal.SIGABRT) 129 | except OSError: 130 | pass 131 | dead_processes.append((consumer, name)) 132 | elif self.is_dead(proc, name): 133 | dead_processes.append((consumer, name)) 134 | else: 135 | active_processes.append(child) 136 | 137 | if dead_processes: 138 | LOGGER.debug('Removing %i dead process(es)', len(dead_processes)) 139 | for proc in dead_processes: 140 | self.remove_consumer_process(*proc) 141 | self._active_cache = time.time(), active_processes 142 | return active_processes 143 | 144 | def calculate_stats(self, data): 145 | """Calculate the stats data for our process level data. 146 | 147 | :param data: The collected stats data to report on 148 | :type data: dict 149 | 150 | """ 151 | timestamp = data['timestamp'] 152 | del data['timestamp'] 153 | 154 | # Iterate through the last poll results 155 | stats = self.consumer_stats_counter() 156 | consumer_stats = {} 157 | for name in data.keys(): 158 | consumer_stats[name] = self.consumer_stats_counter() 159 | consumer_stats[name]['processes'] = self.process_count(name) 160 | for proc in data[name].keys(): 161 | for key in stats: 162 | value = data[name][proc]['counts'].get(key, 0) 163 | stats[key] += value 164 | consumer_stats[name][key] += value 165 | 166 | # Return a data structure that can be used in reporting out the stats 167 | stats['processes'] = len(self.active_processes()) 168 | return { 169 | 'last_poll': timestamp, 170 | 'consumers': consumer_stats, 171 | 'process_data': data, 172 | 'counts': stats 173 | } 174 | 175 | def check_process_counts(self): 176 | """Check for the minimum consumer process levels and start up new 177 | processes needed. 178 | 179 | """ 180 | LOGGER.debug('Checking minimum consumer process levels') 181 | for name in self.consumers: 182 | processes_needed = self.process_spawn_qty(name) 183 | if processes_needed: 184 | LOGGER.info('Need to spawn %i processes for %s', 185 | processes_needed, name) 186 | self.start_processes(name, processes_needed) 187 | 188 | def collect_results(self, data_values): 189 | """Receive the data from the consumers polled and process it. 190 | 191 | :param dict data_values: The poll data returned from the consumer 192 | :type data_values: dict 193 | 194 | """ 195 | self.last_poll_results['timestamp'] = self.poll_data['timestamp'] 196 | 197 | # Get the name and consumer name and remove it from what is reported 198 | consumer_name = data_values['consumer_name'] 199 | del data_values['consumer_name'] 200 | process_name = data_values['name'] 201 | del data_values['name'] 202 | 203 | # Add it to our last poll global data 204 | if consumer_name not in self.last_poll_results: 205 | self.last_poll_results[consumer_name] = {} 206 | self.last_poll_results[consumer_name][process_name] = data_values 207 | 208 | # Calculate the stats 209 | self.stats = self.calculate_stats(self.last_poll_results) 210 | 211 | @staticmethod 212 | def consumer_keyword(counts): 213 | """Return consumer or consumers depending on the process count. 214 | 215 | :param dict counts: The count dictionary to use process count 216 | :rtype: str 217 | 218 | """ 219 | return 'consumer' if counts['processes'] == 1 else 'consumers' 220 | 221 | @staticmethod 222 | def consumer_stats_counter(): 223 | """Return a new consumer stats counter instance. 224 | 225 | :rtype: dict 226 | 227 | """ 228 | return { 229 | process.Process.ERROR: 0, 230 | process.Process.PROCESSED: 0, 231 | process.Process.REDELIVERED: 0 232 | } 233 | 234 | def get_consumer_process(self, consumer, name): 235 | """Get the process object for the specified consumer and process name. 236 | 237 | :param str consumer: The consumer name 238 | :param str name: The process name 239 | :returns: multiprocessing.Process 240 | 241 | """ 242 | return self.consumers[consumer].processes.get(name) 243 | 244 | @staticmethod 245 | def get_consumer_cfg(config, only, qty): 246 | """Get the consumers config, possibly filtering the config if only 247 | or qty is set. 248 | 249 | :param config: The consumers config section 250 | :type config: helper.config.Config 251 | :param str only: When set, filter to run only this consumer 252 | :param int qty: When set, set the consumer qty to this value 253 | :rtype: dict 254 | 255 | """ 256 | consumers = dict(config.application.Consumers) 257 | if only: 258 | for key in list(consumers.keys()): 259 | if key != only: 260 | del consumers[key] 261 | if qty: 262 | consumers[only]['qty'] = qty 263 | return consumers 264 | 265 | def is_dead(self, proc, name): 266 | """Checks to see if the specified process is dead. 267 | 268 | :param psutil.Process proc: The process to check 269 | :param str name: The name of consumer 270 | :rtype: bool 271 | 272 | """ 273 | LOGGER.debug('Checking %s (%r)', name, proc) 274 | try: 275 | status = proc.status() 276 | except psutil.NoSuchProcess: 277 | LOGGER.debug('NoSuchProcess: %s (%r)', name, proc) 278 | return True 279 | 280 | LOGGER.debug('Process %s (%s) status: %r (Unresponsive Count: %s)', 281 | name, proc.pid, status, self.unresponsive[name]) 282 | if status in _PROCESS_RUNNING: 283 | return False 284 | elif status == psutil.STATUS_ZOMBIE: 285 | try: 286 | proc.wait(0.1) 287 | except psutil.TimeoutExpired: 288 | pass 289 | try: 290 | proc.terminate() 291 | status = proc.status() 292 | except psutil.NoSuchProcess: 293 | LOGGER.debug('NoSuchProcess: %s (%r)', name, proc) 294 | return True 295 | return status in _PROCESS_STOPPED_OR_DEAD 296 | 297 | def kill_processes(self): 298 | """Gets called on shutdown by the timer when too much time has gone by, 299 | calling the terminate method instead of nicely asking for the consumers 300 | to stop. 301 | 302 | """ 303 | LOGGER.critical('Max shutdown exceeded, forcibly exiting') 304 | processes = self.active_processes(False) 305 | while processes: 306 | for proc in self.active_processes(False): 307 | if int(proc.pid) != int(os.getpid()): 308 | LOGGER.warning('Killing %s (%s)', proc.name, proc.pid) 309 | try: 310 | os.kill(int(proc.pid), signal.SIGKILL) 311 | except OSError: 312 | pass 313 | else: 314 | LOGGER.warning('Cowardly refusing kill self (%s, %s)', 315 | proc.pid, os.getpid()) 316 | time.sleep(0.5) 317 | processes = self.active_processes(False) 318 | 319 | LOGGER.info('Killed all children') 320 | return self.set_state(self.STATE_STOPPED) 321 | 322 | def log_stats(self): 323 | """Output the stats to the LOGGER.""" 324 | if not self.stats.get('counts'): 325 | LOGGER.info('Did not receive any stats data from children') 326 | return 327 | 328 | if self.poll_data['processes']: 329 | LOGGER.warning('%i process(es) did not respond with stats: %r', 330 | len(self.poll_data['processes']), 331 | self.poll_data['processes']) 332 | 333 | if self.stats['counts']['processes'] > 1: 334 | LOGGER.info('%i consumers processed %i messages with %i errors', 335 | self.stats['counts']['processes'], 336 | self.stats['counts']['processed'], 337 | self.stats['counts']['failed']) 338 | 339 | for key in self.stats['consumers'].keys(): 340 | LOGGER.info('%i %s %s processed %i messages with %i errors', 341 | self.stats['consumers'][key]['processes'], key, 342 | self.consumer_keyword(self.stats['consumers'][key]), 343 | self.stats['consumers'][key]['processed'], 344 | self.stats['consumers'][key]['failed']) 345 | 346 | def new_consumer(self, config, consumer_name): 347 | """Return a consumer dict for the given name and configuration. 348 | 349 | :param dict config: The consumer configuration 350 | :param str consumer_name: The consumer name 351 | :rtype: dict 352 | 353 | """ 354 | return Consumer(0, {}, config.get('qty', self.DEFAULT_CONSUMER_QTY), 355 | config.get('queue', consumer_name)) 356 | 357 | def new_process(self, consumer_name): 358 | """Create a new consumer instances 359 | 360 | :param str consumer_name: The name of the consumer 361 | :return tuple: (str, process.Process) 362 | 363 | """ 364 | process_name = '%s-%s' % (consumer_name, 365 | self.new_process_number(consumer_name)) 366 | kwargs = { 367 | 'config': self.config.application, 368 | 'consumer_name': consumer_name, 369 | 'profile': self.profile, 370 | 'daemon': False, 371 | 'stats_queue': self.stats_queue, 372 | 'logging_config': self.config.logging 373 | } 374 | return process_name, process.Process(name=process_name, kwargs=kwargs) 375 | 376 | def new_process_number(self, name): 377 | """Increment the counter for the process id number for a given consumer 378 | configuration. 379 | 380 | :param str name: Consumer name 381 | :rtype: int 382 | 383 | """ 384 | self.consumers[name].last_proc_num += 1 385 | return self.consumers[name].last_proc_num 386 | 387 | def on_sigchld(self, _signum, _unused_frame): 388 | """Invoked when a child sends up an SIGCHLD signal. 389 | 390 | :param int _signum: The signal that was invoked 391 | :param frame _unused_frame: The frame that was interrupted 392 | 393 | """ 394 | LOGGER.info('SIGCHLD received from child') 395 | if not self.active_processes(False): 396 | LOGGER.info('Stopping with no active processes and child error') 397 | signal.setitimer(signal.ITIMER_REAL, 0, 0) 398 | self.set_state(self.STATE_STOPPED) 399 | 400 | def on_timer(self, _signum, _unused_frame): 401 | """Invoked by the Poll timer signal. 402 | 403 | :param int _signum: The signal that was invoked 404 | :param frame _unused_frame: The frame that was interrupted 405 | 406 | """ 407 | if self.is_shutting_down: 408 | LOGGER.debug('Polling timer fired while shutting down') 409 | return 410 | if not self.polled: 411 | self.poll() 412 | self.polled = True 413 | self.set_timer(5) # Wait 5 seconds for results 414 | else: 415 | self.polled = False 416 | self.poll_results_check() 417 | self.set_timer(self.poll_interval) # Wait poll interval duration 418 | 419 | # If stats logging is enabled, log the stats 420 | if self.log_stats_enabled: 421 | self.log_stats() 422 | 423 | # Increment the unresponsive children 424 | for proc_name in self.poll_data['processes']: 425 | self.unresponsive[proc_name] += 1 426 | 427 | # Remove counters for processes that came back to life 428 | for proc_name in list(self.unresponsive.keys()): 429 | if proc_name not in self.poll_data['processes']: 430 | del self.unresponsive[proc_name] 431 | 432 | def poll(self): 433 | """Start the poll process by invoking the get_stats method of the 434 | consumers. If we hit this after another interval without fully 435 | processing, note it with a warning. 436 | 437 | """ 438 | self.set_state(self.STATE_ACTIVE) 439 | 440 | # If we don't have any active consumers, spawn new ones 441 | if not self.total_process_count: 442 | LOGGER.debug('Did not find any active consumers in poll') 443 | return self.check_process_counts() 444 | 445 | # Start our data collection dict 446 | self.poll_data = {'timestamp': time.time(), 'processes': []} 447 | 448 | # Iterate through all of the consumers 449 | for proc in list(self.active_processes()): 450 | if proc == multiprocessing.current_process(): 451 | continue 452 | 453 | # Send the profile signal 454 | try: 455 | os.kill(int(proc.pid), signal.SIGPROF) 456 | except ProcessLookupError as error: 457 | LOGGER.warning('Error sending SIGPROF to %s: %s', 458 | proc.pid, error) 459 | else: 460 | self.poll_data['processes'].append(proc.name) 461 | 462 | # Check if we need to start more processes 463 | self.check_process_counts() 464 | 465 | @property 466 | def poll_duration_exceeded(self): 467 | """Return true if the poll time has been exceeded. 468 | :rtype: bool 469 | 470 | """ 471 | return ((time.time() - self.poll_data['timestamp']) >= 472 | self.poll_interval) 473 | 474 | def poll_results_check(self): 475 | """Check the polling results by checking to see if the stats queue is 476 | empty. If it is not, try and collect stats. If it is set a timer to 477 | call ourselves in _POLL_RESULTS_INTERVAL. 478 | 479 | """ 480 | LOGGER.debug('Checking for poll results') 481 | while True: 482 | try: 483 | stats = self.stats_queue.get(False) 484 | except queue.Empty: 485 | break 486 | try: 487 | self.poll_data['processes'].remove(stats['name']) 488 | except ValueError: 489 | pass 490 | self.collect_results(stats) 491 | 492 | if self.poll_data['processes']: 493 | LOGGER.warning('Did not receive results from %r', 494 | self.poll_data['processes']) 495 | 496 | def process(self, name, process_name): 497 | """Return the process handle for the given consumer name and process 498 | name. 499 | 500 | :param str name: The consumer name from config 501 | :param str process_name: The automatically assigned process name 502 | :rtype: rejected.process.Process 503 | 504 | """ 505 | return self.consumers[name].processes[process_name] 506 | 507 | def process_count(self, name): 508 | """Return the process count for the given consumer name. 509 | 510 | :param str name: The consumer name 511 | :rtype: int 512 | 513 | """ 514 | return len(self.consumers[name].processes) 515 | 516 | def process_spawn_qty(self, name): 517 | """Return the number of processes to spawn for the given consumer name. 518 | 519 | :param str name: The consumer name 520 | :rtype: int 521 | 522 | """ 523 | return self.consumers[name].qty - self.process_count(name) 524 | 525 | def remove_consumer_process(self, consumer, name): 526 | """Remove all details for the specified consumer and process name. 527 | 528 | :param str consumer: The consumer name 529 | :param str name: The process name 530 | 531 | """ 532 | my_pid = os.getpid() 533 | if name in self.consumers[consumer].processes.keys(): 534 | try: 535 | child = self.consumers[consumer].processes[name] 536 | except KeyError: 537 | return 538 | try: 539 | alive = child.is_alive() 540 | except AssertionError: 541 | LOGGER.debug('Tried to test non-child process (%r to %r)', 542 | os.getpid(), child.pid) 543 | else: 544 | if child.pid == my_pid: 545 | LOGGER.debug('Child has my pid? %r, %r', my_pid, child.pid) 546 | elif alive: 547 | try: 548 | child.terminate() 549 | except OSError: 550 | pass 551 | try: 552 | del self.consumers[consumer].processes[name] 553 | except KeyError: 554 | pass 555 | 556 | def run(self): 557 | """When the consumer is ready to start running, kick off all of our 558 | consumer consumers and then loop while we process messages. 559 | 560 | """ 561 | self.set_state(self.STATE_ACTIVE) 562 | self.setup_consumers() 563 | 564 | # Set the SIGCHLD handler for child creation errors 565 | signal.signal(signal.SIGCHLD, self.on_sigchld) 566 | 567 | # Set the SIGALRM handler for poll interval 568 | signal.signal(signal.SIGALRM, self.on_timer) 569 | 570 | # Kick off the poll timer 571 | signal.setitimer(signal.ITIMER_REAL, self.poll_interval, 0) 572 | 573 | # Loop for the lifetime of the app, pausing for a signal to pop up 574 | while self.is_running: 575 | if not self.is_sleeping: 576 | self.set_state(self.STATE_SLEEPING) 577 | signal.pause() 578 | 579 | # Note we're exiting run 580 | LOGGER.info('Exiting Master Control Program') 581 | 582 | @staticmethod 583 | def set_process_name(): 584 | """Set the process name for the top level process so that it shows up 585 | in logs in a more trackable fashion. 586 | 587 | """ 588 | proc = multiprocessing.current_process() 589 | for offset in range(0, len(sys.argv)): 590 | if sys.argv[offset] == '-c': 591 | name = sys.argv[offset + 1].split('/')[-1] 592 | proc.name = name.split('.')[0] 593 | break 594 | 595 | def set_timer(self, duration): 596 | """Setup the next alarm to fire and then wait for it to fire. 597 | 598 | :param int duration: How long to sleep 599 | 600 | """ 601 | # Make sure that the application is not shutting down before sleeping 602 | if self.is_shutting_down: 603 | LOGGER.debug('Not sleeping, application is trying to shutdown') 604 | return 605 | 606 | # Set the signal timer 607 | signal.setitimer(signal.ITIMER_REAL, duration, 0) 608 | 609 | def setup_consumers(self): 610 | """Iterate through each consumer in the configuration and kick off the 611 | minimal amount of processes, setting up the runtime data as well. 612 | 613 | """ 614 | for name in self.consumer_cfg.keys(): 615 | self.consumers[name] = self.new_consumer(self.consumer_cfg[name], 616 | name) 617 | self.start_processes(name, self.consumers[name].qty) 618 | 619 | def start_process(self, name): 620 | """Start a new consumer process for the given consumer name 621 | 622 | :param str name: The consumer name 623 | 624 | """ 625 | process_name, proc = self.new_process(name) 626 | LOGGER.info('Spawning %s process for %s', process_name, name) 627 | 628 | # Append the process to the consumer process list 629 | self.consumers[name].processes[process_name] = proc 630 | 631 | # Start the process 632 | try: 633 | proc.start() 634 | except (OSError, IOError) as error: 635 | LOGGER.critical('Failed to start %s for %s: %r', 636 | process_name, name, error) 637 | try: 638 | del self.consumers[name].process[process_name] 639 | except AttributeError as error: 640 | LOGGER.warning('Could not cleanup consumer process: %s', error) 641 | 642 | def start_processes(self, name, quantity): 643 | """Start the specified quantity of consumer processes for the given 644 | consumer. 645 | 646 | :param str name: The consumer name 647 | :param int quantity: The quantity of processes to start 648 | 649 | """ 650 | [self.start_process(name) for _i in range(0, quantity or 0)] 651 | 652 | def stop_processes(self): 653 | """Iterate through all of the consumer processes shutting them down.""" 654 | self.set_state(self.STATE_SHUTTING_DOWN) 655 | LOGGER.info('Stopping consumer processes') 656 | 657 | signal.signal(signal.SIGABRT, signal.SIG_IGN) 658 | signal.signal(signal.SIGALRM, signal.SIG_IGN) 659 | signal.signal(signal.SIGCHLD, signal.SIG_IGN) 660 | signal.signal(signal.SIGPROF, signal.SIG_IGN) 661 | signal.setitimer(signal.ITIMER_REAL, 0, 0) 662 | 663 | # Send SIGABRT 664 | LOGGER.info('Sending SIGABRT to active children') 665 | for proc in multiprocessing.active_children(): 666 | if int(proc.pid) != os.getpid(): 667 | try: 668 | os.kill(int(proc.pid), signal.SIGABRT) 669 | except OSError: 670 | pass 671 | 672 | # Wait for them to finish up to MAX_SHUTDOWN_WAIT 673 | for iteration in range(0, self.MAX_SHUTDOWN_WAIT): 674 | processes = len(self.active_processes(False)) 675 | if not processes: 676 | break 677 | 678 | LOGGER.info('Waiting on %i active processes to shut down (%i/%i)', 679 | processes, iteration, self.MAX_SHUTDOWN_WAIT) 680 | try: 681 | time.sleep(0.5) 682 | except KeyboardInterrupt: 683 | break 684 | 685 | if len(self.active_processes(False)): 686 | self.kill_processes() 687 | 688 | LOGGER.debug('All consumer processes stopped') 689 | self.set_state(self.STATE_STOPPED) 690 | 691 | @property 692 | def total_process_count(self): 693 | """Returns the active consumer process count 694 | 695 | :rtype: int 696 | 697 | """ 698 | return len(self.active_processes(False)) 699 | -------------------------------------------------------------------------------- /rejected/mixins.py: -------------------------------------------------------------------------------- 1 | import gc 2 | import logging 3 | 4 | LOGGER = logging.getLogger(__name__) 5 | 6 | 7 | class GarbageCollectorMixin(object): 8 | """Consumer mixin to periodically call ``gc.collect`` periodically in the 9 | :meth:`on_finish` method. 10 | 11 | By default, ``gc.collect`` is invoked every 10,000 messages. 12 | 13 | To configure frequency of collection, include a ``gc_collection_frequency`` 14 | setting in the consumer configuration. 15 | 16 | """ 17 | DEFAULT_GC_FREQUENCY = 10000 18 | 19 | def __init__(self, *args, **kwargs): 20 | self._collection_cycle = \ 21 | kwargs.get('settings', {}).get('gc_collection_frequency', 22 | self.DEFAULT_GC_FREQUENCY) 23 | super(GarbageCollectorMixin, self).__init__(*args, **kwargs) 24 | self._cycles_left = self.collection_cycle 25 | 26 | @property 27 | def collection_cycle(self): 28 | """Call :func:`gc.collect` every this many messages.""" 29 | return self._collection_cycle 30 | 31 | @collection_cycle.setter 32 | def collection_cycle(self, value): 33 | """Set the number of messages to process before invoking ``gc.collect`` 34 | 35 | :param int value: Cycle size 36 | 37 | """ 38 | if value is not None: 39 | self._collection_cycle = value 40 | self._cycles_left = min(self._cycles_left, self._collection_cycle) 41 | 42 | def on_finish(self): 43 | """Used to initiate the garbage collection""" 44 | super(GarbageCollectorMixin, self).on_finish() 45 | self._cycles_left -= 1 46 | if self._cycles_left <= 0: 47 | num_collected = gc.collect() 48 | self._cycles_left = self.collection_cycle 49 | LOGGER.debug('garbage collection run, %d objects evicted', 50 | num_collected) 51 | -------------------------------------------------------------------------------- /rejected/state.py: -------------------------------------------------------------------------------- 1 | """ 2 | Base State Tracking Class 3 | 4 | """ 5 | import logging 6 | import time 7 | 8 | LOGGER = logging.getLogger(__name__) 9 | 10 | 11 | class State(object): 12 | """Class that is to be extended by MCP and process for maintaining the 13 | internal state of the application. 14 | 15 | """ 16 | # State constants 17 | STATE_INITIALIZING = 0x01 18 | STATE_CONNECTING = 0x02 19 | STATE_IDLE = 0x03 20 | STATE_ACTIVE = 0x04 21 | STATE_SLEEPING = 0x05 22 | STATE_STOP_REQUESTED = 0x06 23 | STATE_SHUTTING_DOWN = 0x07 24 | STATE_STOPPED = 0x08 25 | 26 | # For reverse lookup 27 | STATES = { 28 | 0x01: 'Initializing', 29 | 0x02: 'Connecting', 30 | 0x03: 'Idle', 31 | 0x04: 'Active', 32 | 0x05: 'Sleeping', 33 | 0x06: 'Stop Requested', 34 | 0x07: 'Shutting down', 35 | 0x08: 'Stopped' 36 | } 37 | 38 | def __init__(self): 39 | """Initialize the state of the object""" 40 | self.state = self.STATE_INITIALIZING 41 | self.state_start = time.time() 42 | 43 | def set_state(self, new_state): 44 | """Assign the specified state to this consumer object. 45 | 46 | :param int new_state: The new state of the object 47 | :raises: ValueError 48 | 49 | """ 50 | # Make sure it's a valid state 51 | if new_state not in self.STATES: 52 | raise ValueError('Invalid state value: %r' % new_state) 53 | 54 | # Set the state 55 | LOGGER.debug('State changing from %s to %s', self.STATES[self.state], 56 | self.STATES[new_state]) 57 | self.state = new_state 58 | self.state_start = time.time() 59 | 60 | @property 61 | def is_active(self): 62 | """Returns a bool specifying if the process is currently active. 63 | 64 | :rtype: bool 65 | 66 | """ 67 | return self.state == self.STATE_ACTIVE 68 | 69 | @property 70 | def is_connecting(self): 71 | """Returns a bool specifying if the process is currently connecting. 72 | 73 | :rtype: bool 74 | 75 | """ 76 | return self.state == self.STATE_CONNECTING 77 | 78 | @property 79 | def is_idle(self): 80 | """Returns a bool specifying if the process is currently idle. 81 | 82 | :rtype: bool 83 | 84 | """ 85 | return self.state == self.STATE_IDLE 86 | 87 | @property 88 | def is_running(self): 89 | """Returns a bool determining if the process is in a running state or 90 | not 91 | 92 | :rtype: bool 93 | 94 | """ 95 | return self.state in [self.STATE_IDLE, self.STATE_ACTIVE, 96 | self.STATE_SLEEPING] 97 | 98 | @property 99 | def is_shutting_down(self): 100 | """Designates if the process is shutting down. 101 | 102 | :rtype: bool 103 | 104 | """ 105 | return self.state == self.STATE_SHUTTING_DOWN 106 | 107 | @property 108 | def is_sleeping(self): 109 | """Returns a bool determining if the process is sleeping 110 | 111 | :rtype: bool 112 | 113 | """ 114 | return self.state == self.STATE_SLEEPING 115 | 116 | @property 117 | def is_stopped(self): 118 | """Returns a bool determining if the process is stopped or stopping 119 | 120 | :rtype: bool 121 | 122 | """ 123 | return self.state == self.STATE_STOPPED 124 | 125 | @property 126 | def is_waiting_to_shutdown(self): 127 | """Designates if the process is waiting to start shutdown 128 | 129 | :rtype: bool 130 | 131 | """ 132 | return self.state == self.STATE_STOP_REQUESTED 133 | 134 | @property 135 | def state_description(self): 136 | """Return the string description of our running state. 137 | 138 | :rtype: str 139 | 140 | """ 141 | return self.STATES[self.state] 142 | 143 | @property 144 | def time_in_state(self): 145 | """Return the time that has been spent in the current state. 146 | 147 | :rtype: float 148 | 149 | """ 150 | return time.time() - self.state_start 151 | -------------------------------------------------------------------------------- /rejected/statsd.py: -------------------------------------------------------------------------------- 1 | """ 2 | Statsd Client that takes configuration first from the rejected configuration 3 | file, falling back to environment variables, and finally default values. 4 | 5 | Environment Variables: 6 | 7 | - STATSD_HOST 8 | - STATSD_PORT 9 | - STATSD_PREFIX 10 | 11 | """ 12 | import logging 13 | import os 14 | import socket 15 | 16 | from tornado import iostream 17 | 18 | LOGGER = logging.getLogger(__name__) 19 | 20 | 21 | class Client(object): 22 | """A simple statsd client that buffers counters to emit fewer UDP packets 23 | than once per incr. 24 | 25 | """ 26 | DEFAULT_HOST = 'localhost' 27 | DEFAULT_PORT = 8125 28 | DEFAULT_PREFIX = 'rejected' 29 | PAYLOAD_HOSTNAME = '{}.{}.{}.{}:{}|{}\n' 30 | PAYLOAD_NO_HOSTNAME = '{}.{}.{}:{}|{}\n' 31 | 32 | def __init__(self, consumer_name, settings, failure_callback): 33 | """ 34 | 35 | :param str consumer_name: The name of the consumer for this client 36 | :param dict settings: statsd Settings 37 | 38 | """ 39 | self._connected = False 40 | self._consumer_name = consumer_name 41 | self._failure_callback = failure_callback 42 | self._hostname = socket.gethostname().split('.')[0] 43 | self._settings_in = settings 44 | self._settings = {} 45 | 46 | self._address = (self._setting('host', self.DEFAULT_HOST), 47 | int(self._setting('port', self.DEFAULT_PORT))) 48 | self._prefix = self._setting('prefix', self.DEFAULT_PREFIX) 49 | self._tcp_sock, self._udp_sock = None, None 50 | if self._setting('tcp', False): 51 | self._tcp_sock = self._tcp_socket() 52 | else: 53 | self._udp_sock = self._udp_socket() 54 | 55 | def add_timing(self, key, value=0): 56 | """Add a timer value to statsd for the specified key 57 | 58 | :param str key: The key to add the timing to 59 | :param int or float value: The value of the timing in seconds 60 | 61 | """ 62 | return self._send(key, value * 1000, 'ms') 63 | 64 | def incr(self, key, value=1): 65 | """Increment the counter value in statsd 66 | 67 | :param str key: The key to increment 68 | :param int value: The value to increment by, defaults to 1 69 | 70 | """ 71 | return self._send(key, value, 'c') 72 | 73 | def set_gauge(self, key, value): 74 | """Set a gauge value in statsd 75 | 76 | :param str key: The key to set the value for 77 | :param int or float value: The value to set 78 | 79 | """ 80 | return self._send(key, value, 'g') 81 | 82 | def stop(self): 83 | """Close the socket if connected via TCP.""" 84 | if self._tcp_sock: 85 | self._tcp_sock.close() 86 | 87 | def _build_payload(self, key, value, metric_type): 88 | """Return the """ 89 | if self._setting('include_hostname', True): 90 | return self.PAYLOAD_HOSTNAME.format( 91 | self._prefix, self._hostname, self._consumer_name, key, value, 92 | metric_type) 93 | return self.PAYLOAD_NO_HOSTNAME.format( 94 | self._prefix, self._consumer_name, key, value, 95 | metric_type) 96 | 97 | def _send(self, key, value, metric_type): 98 | """Send the specified value to the statsd daemon via UDP without a 99 | direct socket connection. 100 | 101 | :param str key: The key name to send 102 | :param int or float value: The value for the key 103 | 104 | """ 105 | payload = self._build_payload(key, value, metric_type) 106 | LOGGER.debug('Sending statsd payload: %r', payload) 107 | try: 108 | if self._tcp_sock: 109 | return self._tcp_sock.write(payload.encode('utf-8')) 110 | else: 111 | self._udp_sock.sendto(payload.encode('utf-8'), self._address) 112 | except (OSError, socket.error) as error: # pragma: nocover 113 | if self._connected: 114 | LOGGER.exception('Error sending statsd metric: %s', error) 115 | self._connected = False 116 | self._failure_callback() 117 | 118 | def _setting(self, key, default): 119 | """Return the setting, checking config, then the appropriate 120 | environment variable, falling back to the default, caching the 121 | results. 122 | 123 | :param str key: The key to get 124 | :param any default: The default value if not set 125 | :return: str 126 | 127 | """ 128 | if key not in self._settings: 129 | value = self._settings_in.get( 130 | key, os.environ.get('STATSD_{}'.format(key).upper(), default)) 131 | self._settings[key] = value 132 | return self._settings[key] 133 | 134 | def _tcp_on_closed(self): 135 | """Invoked when the socket is closed.""" 136 | LOGGER.warning('Disconnected from statsd, reconnecting') 137 | self._connected = False 138 | self._tcp_sock = self._tcp_socket() 139 | 140 | def _tcp_on_connected(self): 141 | """Invoked when the IOStream is connected""" 142 | LOGGER.debug('Connected to statsd at %s via TCP', self._address) 143 | self._connected = True 144 | 145 | def _tcp_socket(self): 146 | """Connect to statsd via TCP and return the IOStream handle. 147 | 148 | :rtype: iostream.IOStream 149 | 150 | """ 151 | sock = iostream.IOStream(socket.socket( 152 | socket.AF_INET, socket.SOCK_STREAM, socket.IPPROTO_TCP)) 153 | sock.set_close_callback(self._tcp_on_closed) 154 | try: 155 | sock.connect(self._address, self._tcp_on_connected) 156 | except (OSError, socket.error) as error: 157 | LOGGER.error('Failed to connect via TCP, triggering shutdown: %s', 158 | error) 159 | self._failure_callback() 160 | else: 161 | self._connected = True 162 | return sock 163 | 164 | @staticmethod 165 | def _udp_socket(): 166 | """Return the UDP socket handle 167 | 168 | :rtype: socket.socket 169 | 170 | """ 171 | return socket.socket(socket.AF_INET, socket.SOCK_DGRAM, 172 | socket.IPPROTO_UDP) 173 | -------------------------------------------------------------------------------- /rejected/testing.py: -------------------------------------------------------------------------------- 1 | """ 2 | The :class:`rejected.testing.AsyncTestCase` provides a based class for the 3 | easy creation of tests for your consumers. The test cases exposes multiple 4 | methods to make it easy to setup a consumer and process messages. It is 5 | build on top of :class:`tornado.testing.AsyncTestCase` which extends 6 | :class:`unittest.TestCase`. 7 | 8 | To get started, override the 9 | :meth:`rejected.testing.AsyncTestCase.get_consumer` method. 10 | 11 | Next, the :meth:`rejected.testing.AsyncTestCase.get_settings` method can be 12 | overridden to define the settings that are passed into the consumer. 13 | 14 | Finally, to invoke your Consumer as if it were receiving a message, the 15 | :meth:`~rejected.testing.AsyncTestCase.process_message` method should be 16 | invoked. 17 | 18 | .. note:: Tests are asynchronous, so each test should be decorated with 19 | :meth:`~rejected.testing.gen_test`. 20 | 21 | Example 22 | ------- 23 | The following example expects that when the message is processed by the 24 | consumer, the consumer will raise a :exc:`~rejected.consumer.MessageException`. 25 | 26 | .. code:: python 27 | 28 | from rejected import consumer, testing 29 | 30 | import my_package 31 | 32 | 33 | class ConsumerTestCase(testing.AsyncTestCase): 34 | 35 | def get_consumer(self): 36 | return my_package.Consumer 37 | 38 | def get_settings(self): 39 | return {'remote_url': 'http://foo'} 40 | 41 | @testing.gen_test 42 | def test_consumer_raises_message_exception(self): 43 | with self.assertRaises(consumer.MessageException): 44 | yield self.process_message({'foo': 'bar'}) 45 | 46 | """ 47 | import json 48 | import logging 49 | import time 50 | import uuid 51 | try: 52 | from unittest import mock 53 | except ImportError: 54 | import mock 55 | 56 | from helper import config 57 | from pika import channel, spec 58 | from pika.adapters import tornado_connection 59 | from tornado import gen, ioloop, testing 60 | 61 | try: 62 | import raven 63 | except ImportError: 64 | raven = None 65 | 66 | from . import consumer, data, process 67 | 68 | LOGGER = logging.getLogger(__name__) 69 | 70 | gen_test = testing.gen_test 71 | """Testing equivalent of :func:`tornado.gen.coroutine`, to be applied to test 72 | methods. 73 | 74 | """ 75 | 76 | 77 | class AsyncTestCase(testing.AsyncTestCase): 78 | """:class:`tornado.testing.AsyncTestCase` subclass for testing 79 | :class:`~rejected.consumer.Consumer` classes. 80 | 81 | """ 82 | _consumer = None 83 | 84 | def setUp(self): 85 | super(AsyncTestCase, self).setUp() 86 | self.correlation_id = str(uuid.uuid4()) 87 | self.process = self._create_process() 88 | self.consumer = self._create_consumer() 89 | self.channel = self.process.connections['mock'].channel 90 | self.exc_info = None 91 | 92 | def tearDown(self): 93 | super(AsyncTestCase, self).tearDown() 94 | if not self.consumer._finished: 95 | self.consumer.finish() 96 | 97 | @property 98 | def published_messages(self): 99 | """Return a list of :class:`~rejected.testing.PublishedMessage` 100 | that are extracted from all calls to 101 | :meth:`~pika.channel.Channel.basic_publish` that are invoked during the 102 | test. The properties attribute is the 103 | :class:`pika.spec.BasicProperties` 104 | instance that was created during publishing. 105 | 106 | .. versionadded:: 3.18.9 107 | 108 | :returns: list([:class:`~rejected.testing.PublishedMessage`]) 109 | 110 | """ 111 | return [ 112 | PublishedMessage( 113 | body=c[2]['body'], 114 | exchange=c[2]['exchange'], 115 | properties=c[2]['properties'], 116 | routing_key=c[2]['routing_key']) 117 | for c in self.channel.basic_publish.mock_calls 118 | ] 119 | 120 | def get_consumer(self): 121 | """Override to return the consumer class for testing. 122 | 123 | :rtype: :class:`rejected.consumer.Consumer` 124 | 125 | """ 126 | return consumer.Consumer 127 | 128 | def get_settings(self): 129 | """Override this method to provide settings to the consumer during 130 | construction. These settings should be from the `config` stanza 131 | of the Consumer configuration. 132 | 133 | :rtype: dict 134 | 135 | """ 136 | return {} 137 | 138 | def create_message(self, message, properties=None, 139 | exchange='rejected', routing_key='test'): 140 | """Create a message instance for use with the consumer in testing. 141 | 142 | :param any message: the body of the message to create 143 | :param dict properties: AMQP message properties 144 | :param str exchange: The exchange the message should appear to be from 145 | :param str routing_key: The message's routing key 146 | :rtype: :class:`rejected.data.Message` 147 | 148 | """ 149 | if not properties: 150 | properties = {} 151 | if isinstance(message, dict) and \ 152 | properties.get('content_type') == 'application/json': 153 | message = json.dumps(message) 154 | return data.Message( 155 | connection='mock', 156 | channel=self.process.connections['mock'].channel, 157 | method=spec.Basic.Deliver( 158 | 'ctag0', 1, False, exchange, routing_key), 159 | properties=spec.BasicProperties( 160 | app_id=properties.get('app_id', 'rejected.testing'), 161 | content_encoding=properties.get('content_encoding'), 162 | content_type=properties.get('content_type'), 163 | correlation_id=properties.get( 164 | 'correlation_id', self.correlation_id), 165 | delivery_mode=properties.get('delivery_mode', 1), 166 | expiration=properties.get('expiration'), 167 | headers=properties.get('headers'), 168 | message_id=properties.get('message_id', str(uuid.uuid4())), 169 | priority=properties.get('priority'), 170 | reply_to=properties.get('reply_to'), 171 | timestamp=properties.get('timestamp', int(time.time())), 172 | type=properties.get('type'), 173 | user_id=properties.get('user_id') 174 | ), body=message, returned=False) 175 | 176 | def log_exception(self, msg_format, *args, exc_info): 177 | """Customize the logging of uncaught exceptions. 178 | 179 | :param str msg_format: format of msg to log with ``self.logger.error`` 180 | :param args: positional arguments to pass to ``self.logger.error`` 181 | :param exc_info: The exc_info for the exception 182 | 183 | This for internal use and should not be extended or used directly. 184 | 185 | By default, this method will log the message using 186 | :meth:`logging.Logger.error` and send the exception to Sentry. 187 | If an exception is currently active, then the traceback will be 188 | logged at the debug level. 189 | 190 | """ 191 | LOGGER.exception(msg_format, exc_info=exc_info, *args) 192 | self.exc_info = exc_info 193 | 194 | @property 195 | def measurement(self): 196 | """Return the :py:class:`rejected.data.Measurement` for the currently 197 | assigned measurement object to the consumer. 198 | 199 | :rtype: :class:`rejected.data.Measurement` 200 | 201 | """ 202 | return self.consumer._measurement 203 | 204 | @gen.coroutine 205 | def process_message(self, 206 | message_body=None, 207 | content_type='application/json', 208 | message_type=None, 209 | properties=None, 210 | exchange='rejected', 211 | routing_key='routing-key'): 212 | """Process a message as if it were being delivered by RabbitMQ. When 213 | invoked, an AMQP message will be locally created and passed into the 214 | consumer. With using the default values for the method, if you pass in 215 | a JSON serializable object, the message body will automatically be 216 | JSON serialized. 217 | 218 | If an exception is not raised, a :class:`~rejected.data.Measurement` 219 | instance is returned that will contain all of the measurements 220 | collected during the processing of the message. 221 | 222 | Example: 223 | 224 | .. code:: python 225 | 226 | class ConsumerTestCase(testing.AsyncTestCase): 227 | 228 | @testing.gen_test 229 | def test_consumer_raises_message_exception(self): 230 | with self.assertRaises(consumer.MessageException): 231 | result = yield self.process_message({'foo': 'bar'}) 232 | 233 | 234 | .. note:: This method is a co-routine and must be yielded to ensure 235 | that your tests are functioning properly. 236 | 237 | :param any message_body: the body of the message to create 238 | :param str content_type: The mime type 239 | :param str message_type: identifies the type of message to create 240 | :param dict properties: AMQP message properties 241 | :param str exchange: The exchange the message should appear to be from 242 | :param str routing_key: The message's routing key 243 | :raises: :exc:`rejected.consumer.ConsumerException` 244 | :raises: :exc:`rejected.consumer.MessageException` 245 | :raises: :exc:`rejected.consumer.ProcessingException` 246 | :rtype: :class:`rejected.data.Measurement` 247 | 248 | """ 249 | properties = properties or {} 250 | properties.setdefault('content_type', content_type) 251 | properties.setdefault('correlation_id', self.correlation_id) 252 | properties.setdefault('timestamp', int(time.time())) 253 | properties.setdefault('type', message_type) 254 | 255 | measurement = data.Measurement() 256 | 257 | self.consumer.log_exception = self.log_exception 258 | result = yield self.consumer.execute( 259 | self.create_message(message_body, properties, 260 | exchange, routing_key), 261 | measurement) 262 | if result == data.CONSUMER_EXCEPTION: 263 | raise consumer.ConsumerException() 264 | elif result == data.MESSAGE_EXCEPTION: 265 | raise consumer.MessageException() 266 | elif result == data.PROCESSING_EXCEPTION: 267 | raise consumer.ProcessingException() 268 | elif result == data.UNHANDLED_EXCEPTION: 269 | if self.exc_info: 270 | raise self.exc_info[1] 271 | raise AssertionError('UNHANDLED_EXCEPTION') 272 | raise gen.Return(measurement) 273 | 274 | @staticmethod 275 | def _create_channel(): 276 | return mock.Mock(spec=channel.Channel) 277 | 278 | def _create_connection(self): 279 | obj = mock.Mock(spec=tornado_connection.TornadoConnection) 280 | obj.ioloop = ioloop.IOLoop.current() 281 | obj.channel = self._create_channel() 282 | obj.channel.connection = obj 283 | return obj 284 | 285 | def _create_consumer(self): 286 | """Creates the per-test instance of the consumer that is going to be 287 | tested. 288 | 289 | :rtype: rejected.consumer.Consumer 290 | 291 | """ 292 | cls = self.get_consumer() 293 | obj = cls(config.Data(self.get_settings()), self.process) 294 | obj._message = self.create_message('dummy') 295 | obj.set_channel('mock', self.process.connections['mock'].channel) 296 | return obj 297 | 298 | def _create_process(self): 299 | obj = mock.Mock(spec=process.Process) 300 | obj.connections = {'mock': self._create_connection()} 301 | obj.sentry_client = mock.Mock(spec=raven.Client) if raven else None 302 | return obj 303 | 304 | 305 | class PublishedMessage(object): 306 | """Contains information about messages published during a test when 307 | using :class:`rejected.testing.AsyncTestCase`. 308 | 309 | :param str exchange: The exchange the message was published to 310 | :param str routing_key: The routing key the message was published with 311 | :param pika.spec.BasicProperties properties: AMQP message properties 312 | :param bytes body: AMQP message body 313 | 314 | .. versionadded:: 3.18.9 315 | 316 | """ 317 | __slots__ = ['exchange', 'routing_key', 'properties', 'body'] 318 | 319 | def __init__(self, exchange, routing_key, properties, body): 320 | """Create a new instance of the object. 321 | 322 | :param str exchange: The exchange the message was published to 323 | :param str routing_key: The routing key the message was published with 324 | :param pika.spec.BasicProperties properties: AMQP message properties 325 | :param bytes body: AMQP message body 326 | 327 | """ 328 | self.exchange = exchange 329 | self.routing_key = routing_key 330 | self.properties = properties 331 | self.body = body 332 | 333 | def __repr__(self): 334 | """Return the string representation of the object. 335 | 336 | :rtype: str 337 | 338 | """ 339 | return ''.format( 340 | self.exchange, self.routing_key) 341 | -------------------------------------------------------------------------------- /rejected/utils.py: -------------------------------------------------------------------------------- 1 | import importlib 2 | import math 3 | 4 | import pkg_resources 5 | 6 | 7 | def get_package_version(module_obj, value): 8 | """Get the version of a package or a module's package. 9 | 10 | :param object module_obj: The module that was imported for the consumer 11 | :param str value: The namespaced module path or package name 12 | :rtype: str or None 13 | 14 | """ 15 | for key in ['version', '__version__']: 16 | if hasattr(module_obj, key): 17 | return getattr(module_obj, key) 18 | parts = value.split('.') 19 | for index, part in enumerate(parts): 20 | try: 21 | return pkg_resources.get_distribution( 22 | '.'.join(parts[0:index + 1])).version 23 | except (pkg_resources.DistributionNotFound, 24 | pkg_resources.RequirementParseError): 25 | continue 26 | 27 | 28 | def import_consumer(value): 29 | """Pass in a string in the format of foo.Bar, foo.bar.Baz, foo.bar.baz.Qux 30 | and it will return a handle to the class, and the version. 31 | 32 | :param str value: The consumer class in module.Consumer format 33 | :return: tuple(Class, str) 34 | 35 | """ 36 | parts = value.split('.') 37 | module_obj = importlib.import_module('.'.join(parts[0:-1])) 38 | return (getattr(module_obj, parts[-1]), 39 | get_package_version(module_obj, value)) 40 | 41 | 42 | def percentile(values, k): 43 | """Find the percentile of a list of values. 44 | 45 | :param list values: The list of values to find the percentile of 46 | :param int k: The percentile to find 47 | :rtype: float or int 48 | 49 | """ 50 | if not values: 51 | return None 52 | values.sort() 53 | index = (len(values) * (float(k) / 100)) - 1 54 | return values[int(math.ceil(index))] 55 | -------------------------------------------------------------------------------- /requires/development.txt: -------------------------------------------------------------------------------- 1 | beautifulsoup4 2 | u-msgpack-python 3 | sphinx 4 | raven 5 | sprockets-influxdb -------------------------------------------------------------------------------- /requires/installation.txt: -------------------------------------------------------------------------------- 1 | helper>=2.5.0,<3 2 | pika>=1.2.0,<3 3 | psutil 4 | pyyaml>=5.3.1,<7 5 | tornado 6 | -------------------------------------------------------------------------------- /requires/rtd.txt: -------------------------------------------------------------------------------- 1 | -r development.txt 2 | -r installation.txt -------------------------------------------------------------------------------- /requires/testing.txt: -------------------------------------------------------------------------------- 1 | coverage 2 | flake8 3 | flake8-comprehensions 4 | flake8-deprecated 5 | flake8-html 6 | flake8-import-order 7 | flake8-quotes 8 | flake8-rst-docstrings 9 | flake8-tuple 10 | mock 11 | nose -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [bdist_wheel] 2 | universal = 1 3 | 4 | [flake8] 5 | application-package-names=rejected 6 | exclude=.eggs,.git,.idea,env,env2,build,dist,rejected.egg-info,requires,venv 7 | ignore=RST303,RST304,W504 8 | import-order-style=google 9 | 10 | [nosetests] 11 | cover-branches=1 12 | cover-package=rejected 13 | cover-xml=true 14 | logging-level=DEBUG 15 | verbosity=2 16 | with-coverage=1 17 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | # coding=utf-8 2 | from os import path 3 | 4 | import setuptools 5 | 6 | 7 | def read_requirements(name): 8 | """Read in the requirements file and return the list of things to return. 9 | 10 | :param str name: The file to return 11 | :rtype: list 12 | 13 | """ 14 | requirements = [] 15 | with open(path.join('requires', name)) as req_file: 16 | for line in req_file: 17 | if '#' in line: 18 | line = line[:line.index('#')] 19 | line = line.strip() 20 | if line.startswith('-r'): 21 | requirements.extend(read_requirements(line[2:].strip())) 22 | elif line and not line.startswith('-'): 23 | requirements.append(line) 24 | return requirements 25 | 26 | 27 | setuptools.setup( 28 | name='rejected', 29 | version='3.23.1', 30 | description='Rejected is a Python RabbitMQ Consumer Framework and ' 31 | 'Controller Daemon', 32 | long_description=open('README.rst').read(), 33 | classifiers=[ 34 | 'Development Status :: 5 - Production/Stable', 35 | 'Intended Audience :: Developers', 36 | 'Programming Language :: Python :: 2', 37 | 'Programming Language :: Python :: 2.7', 38 | 'Programming Language :: Python :: 3', 39 | 'Programming Language :: Python :: 3.5', 40 | 'Programming Language :: Python :: 3.6', 41 | 'Programming Language :: Python :: 3.7', 42 | 'Programming Language :: Python :: 3.8', 43 | 'Programming Language :: Python :: 3.9', 44 | 'Programming Language :: Python :: Implementation :: CPython', 45 | 'Programming Language :: Python :: Implementation :: PyPy', 46 | 'License :: OSI Approved :: BSD License' 47 | ], 48 | keywords='amqp rabbitmq', 49 | author='Gavin M. Roy', 50 | author_email='gavinmroy@gmail.com', 51 | url='https://github.com/gmr/rejected', 52 | license='BSD', 53 | packages=['rejected'], 54 | package_data={'': ['LICENSE', 'README.rst']}, 55 | include_package_data=True, 56 | install_requires=read_requirements('installation.txt'), 57 | extras_require={ 58 | 'html': ['beautifulsoup4'], 59 | 'influxdb': ['sprockets-influxdb'], 60 | 'msgpack': ['u-msgpack-python'], 61 | 'sentry': ['raven'], 62 | 'testing': read_requirements('development.txt') 63 | }, 64 | tests_require=read_requirements('testing.txt'), 65 | entry_points={'console_scripts': ['rejected=rejected.controller:main']}, 66 | zip_safe=True) 67 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- 1 | __author__ = 'gmr' 2 | -------------------------------------------------------------------------------- /tests/mocks.py: -------------------------------------------------------------------------------- 1 | # coding=utf-8 2 | """Common mocks for testing""" 3 | import time 4 | try: 5 | from unittest import mock 6 | except ImportError: 7 | import mock 8 | 9 | from pika import spec 10 | 11 | __version__ = '99.99.99' 12 | 13 | CONNECTION = mock.Mock('pika.process.Connection') 14 | CHANNEL = mock.Mock('pika.channel.Channel') 15 | METHOD = spec.Basic.Deliver('ctag0', 1, False, 'exchange', 'routing_key') 16 | PROPERTIES = spec.BasicProperties(content_type='application/json', 17 | content_encoding='qux', 18 | headers={'foo': 'bar', 'baz': 1}, 19 | delivery_mode=2, 20 | priority=5, 21 | correlation_id='c123', 22 | reply_to='rtrk', 23 | expiration='32768', 24 | message_id='mid123', 25 | timestamp=time.time(), 26 | type='test', 27 | user_id='foo', 28 | app_id='bar') 29 | BODY = '{"qux": true, "foo": "bar", "baz": 1}' 30 | 31 | 32 | class MockConsumer(object): 33 | 34 | def __init__(self, configuration, process): 35 | """Creates a new instance of a Mock Consumer class. To perform 36 | initialization tasks, extend Consumer._initialize 37 | :param dict configuration: The configuration from rejected 38 | """ 39 | # Carry the configuration for use elsewhere 40 | self._configuration = configuration 41 | self._process = process 42 | -------------------------------------------------------------------------------- /tests/test_consumer.py: -------------------------------------------------------------------------------- 1 | # coding=utf-8 2 | """Tests for rejected.consumer""" 3 | import json 4 | import unittest 5 | try: 6 | from unittest import mock 7 | except ImportError: 8 | import mock 9 | 10 | from rejected import consumer, data 11 | from tornado import gen, testing 12 | 13 | from . import mocks 14 | 15 | 16 | class ConsumerInitializationTests(unittest.TestCase): 17 | 18 | def test_configuration_is_assigned(self): 19 | cfg = {'foo': 'bar'} 20 | obj = consumer.Consumer(cfg, None) 21 | self.assertDictEqual(obj._settings, cfg) 22 | 23 | def test_channel_is_none(self): 24 | obj = consumer.Consumer({}, None) 25 | self.assertIsNone(obj._channel) 26 | 27 | def test_message_is_none(self): 28 | obj = consumer.Consumer({}, None) 29 | self.assertIsNone(obj._message) 30 | 31 | def test_initialize_is_invoked(self): 32 | with mock.patch('rejected.consumer.Consumer.initialize') as init: 33 | consumer.Consumer({}, None) 34 | init.assert_called_once_with() 35 | 36 | 37 | class ConsumerDefaultProcessTests(unittest.TestCase): 38 | 39 | def test_process_raises_exception(self): 40 | obj = consumer.Consumer({}, None) 41 | self.assertRaises(NotImplementedError, obj.process) 42 | 43 | 44 | class ConsumerSetChannelTests(unittest.TestCase): 45 | 46 | def test_set_channel_assigns_to_channel(self): 47 | obj = consumer.Consumer({}, None) 48 | channel = mock.Mock() 49 | obj.set_channel('mock', channel) 50 | self.assertEqual(obj._channels['mock'], channel) 51 | 52 | 53 | class TestConsumer(consumer.Consumer): 54 | def process(self): 55 | pass 56 | 57 | 58 | class ConsumerReceiveTests(testing.AsyncTestCase): 59 | 60 | def setUp(self): 61 | super(ConsumerReceiveTests, self).setUp() 62 | self.obj = TestConsumer({}, None) 63 | self.message = data.Message('mock', mocks.CHANNEL, mocks.METHOD, 64 | mocks.PROPERTIES, mocks.BODY, False) 65 | self.measurement = data.Measurement() 66 | 67 | @testing.gen_test 68 | def test_receive_assigns_message(self): 69 | yield self.obj.execute(self.message, self.measurement) 70 | self.assertEqual(self.obj._message, self.message) 71 | 72 | @testing.gen_test 73 | def test_receive_invokes_process(self): 74 | with mock.patch.object(self.obj, 'process') as process: 75 | yield self.obj.execute(self.message, self.measurement) 76 | process.assert_called_once_with() 77 | 78 | @testing.gen_test 79 | def test_receive_drops_invalid_message_type(self): 80 | obj = TestConsumer({}, None, 81 | drop_invalid_messages=True, 82 | message_type='foo') 83 | with mock.patch.object(obj, 'process') as process: 84 | yield self.obj.execute(self.message, self.measurement) 85 | process.assert_not_called() 86 | 87 | @testing.gen_test 88 | def test_raises_with_drop(self): 89 | obj = TestConsumer({}, None, 90 | drop_invalid_messages=True, 91 | message_type='foo') 92 | result = yield obj.execute(self.message, self.measurement) 93 | self.assertEqual(result, data.MESSAGE_DROP) 94 | 95 | 96 | class ConsumerPropertyTests(testing.AsyncTestCase): 97 | 98 | def setUp(self): 99 | super(ConsumerPropertyTests, self).setUp() 100 | self.config = {'foo': 'bar', 'baz': 1, 'qux': True} 101 | self.message = data.Message('mock', mocks.CHANNEL, mocks.METHOD, 102 | mocks.PROPERTIES, mocks.BODY, False) 103 | self.measurement = data.Measurement() 104 | 105 | @gen.coroutine 106 | def run_consumer(self): 107 | self.obj = TestConsumer(self.config, None) 108 | yield self.obj.execute(self.message, self.measurement) 109 | 110 | @testing.gen_test 111 | def test_app_id_property(self): 112 | yield self.run_consumer() 113 | self.assertEqual(self.obj.app_id, mocks.PROPERTIES.app_id) 114 | 115 | @testing.gen_test 116 | def test_body_property(self): 117 | yield self.run_consumer() 118 | self.assertEqual(self.obj.body, mocks.BODY) 119 | 120 | @testing.gen_test 121 | def test_settings_property(self): 122 | yield self.run_consumer() 123 | self.assertDictEqual(self.obj.settings, self.config) 124 | 125 | @testing.gen_test 126 | def test_content_encoding_property(self): 127 | yield self.run_consumer() 128 | self.assertEqual(self.obj.content_encoding, 129 | mocks.PROPERTIES.content_encoding) 130 | 131 | @testing.gen_test 132 | def test_content_type_property(self): 133 | yield self.run_consumer() 134 | self.assertEqual(self.obj.content_type, mocks.PROPERTIES.content_type) 135 | 136 | @testing.gen_test 137 | def test_correlation_id_property(self): 138 | yield self.run_consumer() 139 | self.assertEqual(self.obj.correlation_id, 140 | mocks.PROPERTIES.correlation_id) 141 | 142 | @testing.gen_test 143 | def test_exchange_property(self): 144 | yield self.run_consumer() 145 | self.assertEqual(self.obj.exchange, mocks.METHOD.exchange) 146 | 147 | @testing.gen_test 148 | def test_expiration_property(self): 149 | yield self.run_consumer() 150 | self.assertEqual(self.obj.expiration, mocks.PROPERTIES.expiration) 151 | 152 | @testing.gen_test 153 | def test_headers_property(self): 154 | yield self.run_consumer() 155 | self.assertDictEqual(self.obj.headers, mocks.PROPERTIES.headers) 156 | 157 | @testing.gen_test 158 | def test_message_id_property(self): 159 | yield self.run_consumer() 160 | self.assertEqual(self.obj.message_id, mocks.PROPERTIES.message_id) 161 | 162 | @testing.gen_test 163 | def test_name_property(self): 164 | yield self.run_consumer() 165 | self.assertEqual(self.obj.name, self.obj.__class__.__name__) 166 | 167 | @testing.gen_test 168 | def test_priority_property(self): 169 | yield self.run_consumer() 170 | self.assertEqual(self.obj.priority, mocks.PROPERTIES.priority) 171 | 172 | @testing.gen_test 173 | def test_properties_property(self): 174 | yield self.run_consumer() 175 | self.assertDictEqual(self.obj.properties, 176 | dict(data.Properties(mocks.PROPERTIES))) 177 | 178 | @testing.gen_test 179 | def test_redelivered_property(self): 180 | yield self.run_consumer() 181 | self.assertEqual(self.obj.redelivered, mocks.METHOD.redelivered) 182 | 183 | @testing.gen_test 184 | def test_reply_to_property(self): 185 | yield self.run_consumer() 186 | self.assertEqual(self.obj.reply_to, mocks.PROPERTIES.reply_to) 187 | 188 | @testing.gen_test 189 | def test_routing_key_property(self): 190 | yield self.run_consumer() 191 | self.assertEqual(self.obj.routing_key, mocks.METHOD.routing_key) 192 | 193 | @testing.gen_test 194 | def test_message_type_property(self): 195 | yield self.run_consumer() 196 | self.assertEqual(self.obj.message_type, mocks.PROPERTIES.type) 197 | 198 | @testing.gen_test 199 | def test_timestamp_property(self): 200 | yield self.run_consumer() 201 | self.assertEqual(self.obj.timestamp, mocks.PROPERTIES.timestamp) 202 | 203 | @testing.gen_test 204 | def test_user_id_property(self): 205 | yield self.run_consumer() 206 | self.assertEqual(self.obj.user_id, mocks.PROPERTIES.user_id) 207 | 208 | 209 | class TestSmartConsumer(consumer.SmartConsumer): 210 | def process(self): 211 | pass 212 | 213 | 214 | class TestSmartConsumerWithJSON(testing.AsyncTestCase): 215 | 216 | def setUp(self): 217 | super(TestSmartConsumerWithJSON, self).setUp() 218 | self.body = {'foo': 'bar', 'baz': 1, 'qux': True} 219 | self.message = data.Message('mock', mocks.CHANNEL, mocks.METHOD, 220 | mocks.PROPERTIES, json.dumps(self.body), 221 | False) 222 | self.measurement = data.Measurement() 223 | 224 | @testing.gen_test 225 | def test_message_body_property(self): 226 | self.obj = TestSmartConsumer({}, None) 227 | self.obj.execute(self.message, self.measurement) 228 | self.assertDictEqual(self.obj.body, self.body) 229 | -------------------------------------------------------------------------------- /tests/test_data.py: -------------------------------------------------------------------------------- 1 | """Tests for rejected.data""" 2 | import unittest 3 | 4 | from rejected import data 5 | 6 | from . import mocks 7 | 8 | 9 | class TestProperties(unittest.TestCase): 10 | 11 | def setUp(self): 12 | self._obj = data.Properties(mocks.PROPERTIES) 13 | 14 | def test_app_id(self): 15 | self.assertEqual(self._obj.app_id, mocks.PROPERTIES.app_id) 16 | 17 | def test_content_encoding(self): 18 | self.assertEqual(self._obj.content_encoding, 19 | mocks.PROPERTIES.content_encoding) 20 | 21 | def test_content_type(self): 22 | self.assertEqual(self._obj.content_type, mocks.PROPERTIES.content_type) 23 | 24 | def test_correlation_id(self): 25 | self.assertEqual(self._obj.correlation_id, 26 | mocks.PROPERTIES.correlation_id) 27 | 28 | def test_delivery_mode(self): 29 | self.assertEqual(self._obj.delivery_mode, 30 | mocks.PROPERTIES.delivery_mode) 31 | 32 | def test_expiration(self): 33 | self.assertEqual(self._obj.expiration, mocks.PROPERTIES.expiration) 34 | 35 | def test_message_id(self): 36 | self.assertEqual(self._obj.message_id, mocks.PROPERTIES.message_id) 37 | 38 | def test_priority(self): 39 | self.assertEqual(self._obj.priority, mocks.PROPERTIES.priority) 40 | 41 | def test_reply_to(self): 42 | self.assertEqual(self._obj.reply_to, mocks.PROPERTIES.reply_to) 43 | 44 | def test_timestamp(self): 45 | self.assertEqual(self._obj.timestamp, mocks.PROPERTIES.timestamp) 46 | 47 | def test_type(self): 48 | self.assertEqual(self._obj.type, mocks.PROPERTIES.type) 49 | 50 | def test_user_id(self): 51 | self.assertEqual(self._obj.user_id, mocks.PROPERTIES.user_id) 52 | 53 | 54 | class TestMessage(unittest.TestCase): 55 | 56 | def setUp(self): 57 | self._obj = data.Message( 58 | 'mock', mocks.CHANNEL, mocks.METHOD, mocks.PROPERTIES, mocks.BODY, 59 | False) 60 | 61 | def test_body(self): 62 | self.assertEqual(self._obj.body, mocks.BODY) 63 | 64 | def test_channel(self): 65 | self.assertEqual(self._obj.channel, mocks.CHANNEL) 66 | 67 | def test_consumer_tag(self): 68 | self.assertEqual(self._obj.consumer_tag, mocks.METHOD.consumer_tag) 69 | 70 | def test_delivery_tag(self): 71 | self.assertEqual(self._obj.delivery_tag, mocks.METHOD.delivery_tag) 72 | 73 | def test_exchange(self): 74 | self.assertEqual(self._obj.exchange, mocks.METHOD.exchange) 75 | 76 | def test_method(self): 77 | self.assertEqual(self._obj.method, mocks.METHOD) 78 | 79 | def test_redelivered(self): 80 | self.assertEqual(self._obj.redelivered, mocks.METHOD.redelivered) 81 | 82 | def test_routing_key(self): 83 | self.assertEqual(self._obj.routing_key, mocks.METHOD.routing_key) 84 | 85 | def test_app_id(self): 86 | self.assertEqual(self._obj.properties.app_id, mocks.PROPERTIES.app_id) 87 | 88 | def test_content_encoding(self): 89 | self.assertEqual(self._obj.properties.content_encoding, 90 | mocks.PROPERTIES.content_encoding) 91 | 92 | def test_content_type(self): 93 | self.assertEqual(self._obj.properties.content_type, 94 | mocks.PROPERTIES.content_type) 95 | 96 | def test_correlation_id(self): 97 | self.assertEqual(self._obj.properties.correlation_id, 98 | mocks.PROPERTIES.correlation_id) 99 | 100 | def test_delivery_mode(self): 101 | self.assertEqual(self._obj.properties.delivery_mode, 102 | mocks.PROPERTIES.delivery_mode) 103 | 104 | def test_expiration(self): 105 | self.assertEqual(self._obj.properties.expiration, 106 | mocks.PROPERTIES.expiration) 107 | 108 | def test_message_id(self): 109 | self.assertEqual(self._obj.properties.message_id, 110 | mocks.PROPERTIES.message_id) 111 | 112 | def test_priority(self): 113 | self.assertEqual(self._obj.properties.priority, 114 | mocks.PROPERTIES.priority) 115 | 116 | def test_reply_to(self): 117 | self.assertEqual(self._obj.properties.reply_to, 118 | mocks.PROPERTIES.reply_to) 119 | 120 | def test_timestamp(self): 121 | self.assertEqual(self._obj.properties.timestamp, 122 | mocks.PROPERTIES.timestamp) 123 | 124 | def test_type(self): 125 | self.assertEqual(self._obj.properties.type, 126 | mocks.PROPERTIES.type) 127 | 128 | def test_user_id(self): 129 | self.assertEqual(self._obj.properties.user_id, 130 | mocks.PROPERTIES.user_id) 131 | -------------------------------------------------------------------------------- /tests/test_mcp.py: -------------------------------------------------------------------------------- 1 | """Tests for the MCP""" 2 | import multiprocessing 3 | 4 | try: 5 | from unittest import mock 6 | except ImportError: 7 | import mock 8 | 9 | from helper import config 10 | from rejected import mcp 11 | 12 | from . import test_state 13 | 14 | 15 | class TestMCP(test_state.TestState): 16 | 17 | CONFIG = {'poll_interval': 30.0, 'log_stats': True, 'Consumers': {}} 18 | 19 | @mock.patch.object(multiprocessing, 'Queue') 20 | def setUp(self, _mock_queue_unused): 21 | self.cfg = config.Config() 22 | self.cfg.application.update(self.CONFIG) 23 | self._obj = mcp.MasterControlProgram(self.cfg) 24 | 25 | def test_mcp_init_consumers_dict(self): 26 | self.assertIsInstance(self._obj.consumers, dict) 27 | 28 | def test_mcp_init_consumers_dict_empty(self): 29 | self.assertTrue(not self._obj.consumers, dict) 30 | 31 | def test_mcp_init_queue_initialized(self): 32 | self.assertIsInstance(self._obj.stats_queue, mock.MagicMock) 33 | -------------------------------------------------------------------------------- /tests/test_percentile.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | 3 | from rejected import utils 4 | 5 | 6 | class PercentileTestCase(unittest.TestCase): 7 | 8 | def test_90th_percentile(self): 9 | values = [43, 54, 56, 61, 62, 66, 68, 69, 69, 70, 71, 72, 77, 78, 10 | 79, 85, 87, 88, 89, 93, 95, 96, 98, 99, 99] 11 | self.assertEqual(utils.percentile(values, 90), 98) 12 | 13 | def test_50th_percentile(self): 14 | values = [43, 54, 56, 61, 62, 66, 68, 69, 69, 70, 71, 72, 77, 78, 15 | 79, 85, 87, 88, 89, 93, 95, 96, 98, 99, 99] 16 | self.assertEqual(utils.percentile(values, 50), 77) 17 | -------------------------------------------------------------------------------- /tests/test_process.py: -------------------------------------------------------------------------------- 1 | """Tests for rejected.process""" 2 | import copy 3 | import signal 4 | try: 5 | from unittest import mock 6 | except ImportError: 7 | import mock 8 | 9 | from helper import config as helper_config 10 | from rejected import __version__, consumer, data, process 11 | from tornado import locks, testing 12 | 13 | from . import mocks, test_state 14 | 15 | 16 | class TestProcess(testing.AsyncTestCase, test_state.TestState): 17 | 18 | config = { 19 | 'stats': { 20 | 'influxdb': { 21 | 'enabled': False 22 | }, 23 | 'statsd': { 24 | 'enabled': False 25 | } 26 | }, 27 | 'Connections': { 28 | 'MockConnection': { 29 | 'host': 'localhost', 30 | 'port': 5672, 31 | 'user': 'guest', 32 | 'pass': 'guest', 33 | 'vhost': '/' 34 | }, 35 | 'MockRemoteConnection': { 36 | 'host': 'remotehost', 37 | 'port': 5672, 38 | 'user': 'guest', 39 | 'pass': 'guest', 40 | 'vhost': '/' 41 | }, 42 | 'MockRemoteSSLConnection': { 43 | 'host': 'remotehost', 44 | 'port': 5672, 45 | 'user': 'guest', 46 | 'pass': 'guest', 47 | 'vhost': '/', 48 | 'ssl_options': { 49 | 'prototcol': 2, 50 | } 51 | } 52 | }, 53 | 'Consumers': { 54 | 'MockConsumer': { 55 | 'consumer': 'tests.mocks.MockConsumer', 56 | 'connections': ['MockConnection'], 57 | 'config': {'test_value': True, 58 | 'num_value': 100}, 59 | 'min': 2, 60 | 'max': 5, 61 | 'max_errors': 10, 62 | 'qos_prefetch': 5, 63 | 'ack': True, 64 | 'queue': 'mock_queue' 65 | }, 66 | 'MockConsumer2': { 67 | 'consumer': 'mock_consumer.MockConsumer', 68 | 'connections': ['MockConnection', 'MockRemoteConnection'], 69 | 'config': {'num_value': 50}, 70 | 'min': 1, 71 | 'max': 2, 72 | 'queue': 'mock_you' 73 | }, 74 | 'MockConsumer3': { 75 | 'consumer': 'mock_consumer.MockConsumer', 76 | 'connections': ['MockRemoteSSLConnection'], 77 | 'config': {'num_value': 50}, 78 | 'min': 1, 79 | 'max': 2, 80 | 'queue': 'mock_you2' 81 | } 82 | } 83 | } 84 | logging_config = helper_config.LoggingConfig(helper_config.Config.LOGGING) 85 | 86 | mock_args = { 87 | 'config': config, 88 | 'consumer_name': 'MockConsumer', 89 | 'stats_queue': 'StatsQueue', 90 | 'logging_config': helper_config.Config.LOGGING 91 | } 92 | 93 | def setUp(self): 94 | super(TestProcess, self).setUp() 95 | self._obj = self.new_process() 96 | 97 | def tearDown(self): 98 | del self._obj 99 | 100 | def new_kwargs(self, kwargs): 101 | return copy.deepcopy(kwargs) 102 | 103 | def new_process(self, kwargs=None): 104 | with mock.patch('multiprocessing.Process'): 105 | return process.Process( 106 | group=None, 107 | name='MockProcess', 108 | kwargs=kwargs or self.new_kwargs(self.mock_args)) 109 | 110 | def test_app_id(self): 111 | expectation = 'rejected/%s' % __version__ 112 | self.assertEqual(self._obj.AMQP_APP_ID, expectation) 113 | 114 | def test_startup_state(self): 115 | new_process = self.new_process() 116 | self.assertEqual(new_process.state, process.Process.STATE_INITIALIZING) 117 | 118 | def test_startup_time(self): 119 | mock_time = 123456789.012345 120 | with mock.patch('time.time', return_value=mock_time): 121 | new_process = self.new_process() 122 | self.assertEqual(new_process.state_start, mock_time) 123 | 124 | def test_startup_consumer_is_none(self): 125 | new_process = self.new_process() 126 | self.assertIsNone(new_process.consumer) 127 | 128 | def test_get_config(self): 129 | conn = 'MockConnection' 130 | name = 'MockConsumer' 131 | number = 5 132 | pid = 1234 133 | expectation = { 134 | 'connection': self.config['Connections'][conn], 135 | 'consumer_name': name, 136 | 'process_name': '%s_%i_tag_%i' % (name, pid, number) 137 | } 138 | with mock.patch('os.getpid', return_value=pid): 139 | self.assertEqual(self._obj.get_config(self.config, number, name, 140 | conn), expectation) 141 | 142 | def test_get_consumer_with_invalid_consumer(self): 143 | cfg = self.config['Consumers']['MockConsumer2'] 144 | self.assertIsNone(self._obj.get_consumer(cfg)) 145 | 146 | def test_get_consumer_version_output(self): 147 | config = {'consumer': 'tests.mocks.MockConsumer'} 148 | with mock.patch('logging.Logger.info') as info: 149 | self._obj.get_consumer(config) 150 | info.assert_called_with('Creating consumer %s v%s', 151 | config['consumer'], 152 | mocks.__version__) 153 | 154 | @mock.patch.object(consumer.Consumer, '__init__', side_effect=ImportError) 155 | def test_get_consumer_with_config_is_none(self, mock_method): 156 | config = { 157 | 'consumer': 'rejected.consumer.Consumer', 158 | 'config': {'field': 'value', 159 | 'true': True} 160 | } 161 | new_process = self.new_process() 162 | new_process.get_consumer(config) 163 | self.assertIsNone(new_process.get_consumer(config)) 164 | 165 | @mock.patch.object(consumer.Consumer, '__init__', side_effect=ImportError) 166 | def test_get_consumer_with_no_config_is_none(self, mock_method): 167 | config = {'consumer': 'rejected.consumer.Consumer'} 168 | new_process = self.new_process() 169 | self.assertIsNone(new_process.get_consumer(config)) 170 | 171 | def test_setup_signal_handlers(self): 172 | signals = [mock.call(signal.SIGPROF, self._obj.on_sigprof), 173 | mock.call(signal.SIGABRT, self._obj.stop)] 174 | with mock.patch('signal.signal') as signal_signal: 175 | self._obj.setup_sighandlers() 176 | signal_signal.assert_has_calls(signals, any_order=True) 177 | 178 | def mock_setup(self, new_process=None, side_effect=None): 179 | with mock.patch('signal.signal', side_effect=side_effect): 180 | with mock.patch('rejected.utils.import_consumer', 181 | return_value=(mock.Mock, None)): 182 | if not new_process: 183 | new_process = self.new_process(self.mock_args) 184 | new_process.setup() 185 | 186 | new_process.measurement = mock.Mock() 187 | return new_process 188 | 189 | def test_setup_stats_queue(self): 190 | mock_process = self.mock_setup() 191 | self.assertEqual(mock_process.stats_queue, 192 | self.mock_args['stats_queue']) 193 | 194 | def test_setup_consumer_name(self): 195 | mock_process = self.mock_setup() 196 | self.assertEqual(mock_process.stats_queue, 197 | self.mock_args['stats_queue']) 198 | 199 | def test_setup_config(self): 200 | mock_process = self.mock_setup() 201 | config = self.config['Consumers']['MockConsumer'] 202 | self.assertEqual(mock_process.consumer_config, config) 203 | 204 | def test_setup_config_queue_name(self): 205 | mock_process = self.mock_setup() 206 | self.assertEqual(mock_process.queue_name, 207 | self.config['Consumers']['MockConsumer']['queue']) 208 | 209 | def test_setup_config_no_ack(self): 210 | mock_process = self.mock_setup() 211 | self.assertEqual(mock_process.no_ack, 212 | not self.config['Consumers']['MockConsumer']['ack']) 213 | 214 | def test_setup_max_error_count(self): 215 | mock_process = self.mock_setup() 216 | self.assertEqual( 217 | mock_process.max_error_count, 218 | self.config['Consumers']['MockConsumer']['max_errors']) 219 | 220 | def test_setup_prefetch_count_no_config(self): 221 | args = copy.deepcopy(self.mock_args) 222 | del args['config']['Consumers']['MockConsumer']['qos_prefetch'] 223 | mock_process = self.new_process(args) 224 | mock_process.setup() 225 | self.assertEqual(mock_process.qos_prefetch, 226 | process.Process.QOS_PREFETCH_COUNT) 227 | 228 | def test_setup_prefetch_count_with_config(self): 229 | mock_process = self.mock_setup() 230 | self.assertEqual( 231 | mock_process.qos_prefetch, 232 | self.config['Consumers']['MockConsumer']['qos_prefetch']) 233 | 234 | def test_setup_with_ssl_connection(self): 235 | self.mock_args['consumer_name'] = 'MockConsumer3' 236 | mock_process = self.mock_setup() 237 | 238 | conn = mock_process.connections['MockRemoteSSLConnection'].connection 239 | self.assertTrue(bool(conn.params.ssl_options)) 240 | 241 | def test_setup_with_non_ssl_connection(self): 242 | self.mock_args['consumer_name'] = 'MockConsumer2' 243 | mock_process = self.mock_setup() 244 | 245 | conn = mock_process.connections['MockRemoteConnection'].connection 246 | self.assertFalse(bool(conn.params.ssl_options)) 247 | 248 | def test_is_idle_state_processing(self): 249 | self._obj.state = self._obj.STATE_PROCESSING 250 | self.assertFalse(self._obj.is_idle) 251 | 252 | def test_is_running_state_processing(self): 253 | self._obj.state = self._obj.STATE_PROCESSING 254 | self.assertTrue(self._obj.is_running) 255 | 256 | def test_is_shutting_down_state_processing(self): 257 | self._obj.state = self._obj.STATE_PROCESSING 258 | self.assertFalse(self._obj.is_shutting_down) 259 | 260 | def test_is_stopped_state_processing(self): 261 | self._obj.state = self._obj.STATE_PROCESSING 262 | self.assertFalse(self._obj.is_stopped) 263 | 264 | def test_state_processing_desc(self): 265 | self._obj.state = self._obj.STATE_PROCESSING 266 | self.assertEqual(self._obj.state_description, 267 | self._obj.STATES[self._obj.STATE_PROCESSING]) 268 | 269 | @testing.gen_test 270 | def test_invoke_consumer_when_amqp_conn_is_connected(self): 271 | mock_process = self.mock_setup() 272 | mock_process.counters[mock_process.CLOSED_ON_COMPLETE] = 5 273 | 274 | # force unhandled exception in "execute" 275 | mock_process.consumer.execute.side_effect = Exception('blow up!') 276 | 277 | # mimic running process 278 | mock_process.consumer_lock = locks.Lock() 279 | mock_process.state = mock_process.STATE_IDLE 280 | 281 | # configure mock conn 282 | mock_conn = mock.Mock(spec=process.Connection) 283 | mock_process.connections[mock_conn] = mock_conn 284 | mock_conn.is_running = True 285 | 286 | mocks.CHANNEL.basic_nack = mock.Mock() 287 | message = data.Message(mock_conn, mocks.CHANNEL, mocks.METHOD, 288 | mocks.PROPERTIES, mocks.BODY, False) 289 | 290 | yield mock_process.invoke_consumer(message) 291 | 292 | self.assertEqual(mock_conn.shutdown.call_count, 0) 293 | self.assertEqual(mocks.CHANNEL.basic_nack.call_count, 1) 294 | self.assertEqual( 295 | mock_process.counters[mock_process.CLOSED_ON_COMPLETE], 5) 296 | 297 | @testing.gen_test 298 | def test_invoke_consumer_when_amqp_conn_is_not_connected(self): 299 | mock_process = self.mock_setup() 300 | mock_process.counters[mock_process.CLOSED_ON_COMPLETE] = 5 301 | 302 | # force unhandled exception in "execute" 303 | mock_process.consumer.execute.side_effect = Exception('blow up!') 304 | 305 | # mimic running process 306 | mock_process.consumer_lock = locks.Lock() 307 | mock_process.state = mock_process.STATE_IDLE 308 | 309 | # configure mock conn 310 | mock_conn = mock.Mock(spec=process.Connection) 311 | mock_process.connections[mock_conn] = mock_conn 312 | mock_conn.is_running = False 313 | 314 | mocks.CHANNEL.basic_ack = mock.Mock() 315 | message = data.Message(mock_conn, mocks.CHANNEL, mocks.METHOD, 316 | mocks.PROPERTIES, mocks.BODY, False) 317 | 318 | yield mock_process.invoke_consumer(message) 319 | 320 | self.assertEqual(mock_conn.shutdown.call_count, 1) 321 | self.assertEqual(mocks.CHANNEL.basic_ack.call_count, 0) 322 | self.assertEqual( 323 | mock_process.counters[mock_process.CLOSED_ON_COMPLETE], 6) 324 | 325 | def test_ack_message_when_amqp_conn_is_connected(self): 326 | mock_process = self.mock_setup() 327 | mock_process.counters[mock_process.CLOSED_ON_COMPLETE] = 5 328 | 329 | # configure mock conn 330 | mock_conn = mock.Mock(spec=process.Connection) 331 | mock_process.connections[mock_conn] = mock_conn 332 | mock_conn.is_running = True 333 | 334 | mocks.CHANNEL.basic_ack = mock.Mock() 335 | message = data.Message(mock_conn, mocks.CHANNEL, mocks.METHOD, 336 | mocks.PROPERTIES, mocks.BODY, False) 337 | 338 | mock_process.ack_message(message) 339 | 340 | self.assertEqual(mock_conn.shutdown.call_count, 0) 341 | self.assertEqual(mocks.CHANNEL.basic_ack.call_count, 1) 342 | self.assertEqual( 343 | mock_process.counters[mock_process.CLOSED_ON_COMPLETE], 5) 344 | 345 | def test_ack_message_when_amqp_conn_is_not_connected(self): 346 | mock_process = self.mock_setup() 347 | mock_process.counters[mock_process.CLOSED_ON_COMPLETE] = 5 348 | 349 | # configure mock conn 350 | mock_conn = mock.Mock(spec=process.Connection) 351 | mock_process.connections[mock_conn] = mock_conn 352 | mock_conn.is_running = False 353 | 354 | mocks.CHANNEL.basic_ack = mock.Mock() 355 | message = data.Message(mock_conn, mocks.CHANNEL, mocks.METHOD, 356 | mocks.PROPERTIES, mocks.BODY, False) 357 | 358 | mock_process.ack_message(message) 359 | 360 | self.assertEqual(mock_conn.shutdown.call_count, 1) 361 | self.assertEqual(mocks.CHANNEL.basic_ack.call_count, 0) 362 | self.assertEqual( 363 | mock_process.counters[mock_process.CLOSED_ON_COMPLETE], 6) 364 | -------------------------------------------------------------------------------- /tests/test_state.py: -------------------------------------------------------------------------------- 1 | """Tests for the State Class""" 2 | import unittest 3 | try: 4 | from unittest import mock 5 | except ImportError: 6 | import mock 7 | 8 | from rejected import state 9 | 10 | 11 | class TestState(unittest.TestCase): 12 | 13 | def setUp(self): 14 | self._obj = state.State() 15 | 16 | def test_set_state_invalid_value(self): 17 | self.assertRaises(ValueError, self._obj.set_state, 9999) 18 | 19 | def test_set_state_expected_assignment(self): 20 | self.state = self._obj.STATE_IDLE 21 | self._obj.set_state(self._obj.STATE_CONNECTING) 22 | self.assertEqual(self._obj.state, self._obj.STATE_CONNECTING) 23 | 24 | def test_set_state_state_start(self): 25 | self.state = self._obj.STATE_IDLE 26 | value = 86400 27 | with mock.patch('time.time', return_value=value): 28 | self._obj.set_state(self._obj.STATE_CONNECTING) 29 | self.assertEqual(self._obj.state_start, value) 30 | 31 | def test_state_initializing_desc(self): 32 | self._obj.state = self._obj.STATE_INITIALIZING 33 | self.assertEqual(self._obj.state_description, 34 | self._obj.STATES[self._obj.STATE_INITIALIZING]) 35 | 36 | def test_state_connecting_desc(self): 37 | self._obj.state = self._obj.STATE_CONNECTING 38 | self.assertEqual(self._obj.state_description, 39 | self._obj.STATES[self._obj.STATE_CONNECTING]) 40 | 41 | def test_state_idle_desc(self): 42 | self._obj.state = self._obj.STATE_IDLE 43 | self.assertEqual(self._obj.state_description, 44 | self._obj.STATES[self._obj.STATE_IDLE]) 45 | 46 | def test_state_active_desc(self): 47 | self._obj.state = self._obj.STATE_ACTIVE 48 | self.assertEqual(self._obj.state_description, 49 | self._obj.STATES[self._obj.STATE_ACTIVE]) 50 | 51 | def test_state_stop_requested_desc(self): 52 | self._obj.state = self._obj.STATE_STOP_REQUESTED 53 | self.assertEqual(self._obj.state_description, 54 | self._obj.STATES[self._obj.STATE_STOP_REQUESTED]) 55 | 56 | def test_state_shutting_down_desc(self): 57 | self._obj.state = self._obj.STATE_SHUTTING_DOWN 58 | self.assertEqual(self._obj.state_description, 59 | self._obj.STATES[self._obj.STATE_SHUTTING_DOWN]) 60 | 61 | def test_state_stopped_desc(self): 62 | self._obj.state = self._obj.STATE_STOPPED 63 | self.assertEqual(self._obj.state_description, 64 | self._obj.STATES[self._obj.STATE_STOPPED]) 65 | 66 | def test_is_idle_state_initializing(self): 67 | self._obj.state = self._obj.STATE_INITIALIZING 68 | self.assertFalse(self._obj.is_idle) 69 | 70 | def test_is_idle_state_connecting(self): 71 | self._obj.state = self._obj.STATE_CONNECTING 72 | self.assertFalse(self._obj.is_idle) 73 | 74 | def test_is_idle_state_idle(self): 75 | self._obj.state = self._obj.STATE_IDLE 76 | self.assertTrue(self._obj.is_idle) 77 | 78 | def test_is_idle_state_processing(self): 79 | self._obj.state = self._obj.STATE_ACTIVE 80 | self.assertFalse(self._obj.is_idle) 81 | 82 | def test_is_idle_state_stop_requested(self): 83 | self._obj.state = self._obj.STATE_STOP_REQUESTED 84 | self.assertFalse(self._obj.is_idle) 85 | 86 | def test_is_idle_state_shutting_down(self): 87 | self._obj.state = self._obj.STATE_SHUTTING_DOWN 88 | self.assertFalse(self._obj.is_idle) 89 | 90 | def test_is_idle_state_stopped(self): 91 | self._obj.state = self._obj.STATE_STOPPED 92 | self.assertFalse(self._obj.is_idle) 93 | 94 | def test_is_running_state_initializing(self): 95 | self._obj.state = self._obj.STATE_INITIALIZING 96 | self.assertFalse(self._obj.is_running) 97 | 98 | def test_is_running_state_connecting(self): 99 | self._obj.state = self._obj.STATE_CONNECTING 100 | self.assertFalse(self._obj.is_running) 101 | 102 | def test_is_running_state_idle(self): 103 | self._obj.state = self._obj.STATE_IDLE 104 | self.assertTrue(self._obj.is_running) 105 | 106 | def test_is_running_state_processing(self): 107 | self._obj.state = self._obj.STATE_ACTIVE 108 | self.assertTrue(self._obj.is_running) 109 | 110 | def test_is_running_state_stop_requested(self): 111 | self._obj.state = self._obj.STATE_STOP_REQUESTED 112 | self.assertFalse(self._obj.is_running) 113 | 114 | def test_is_running_state_shutting_down(self): 115 | self._obj.state = self._obj.STATE_SHUTTING_DOWN 116 | self.assertFalse(self._obj.is_running) 117 | 118 | def test_is_running_state_stopped(self): 119 | self._obj.state = self._obj.STATE_STOPPED 120 | self.assertFalse(self._obj.is_running) 121 | 122 | def test_is_shutting_down_state_initializing(self): 123 | self._obj.state = self._obj.STATE_INITIALIZING 124 | self.assertFalse(self._obj.is_shutting_down) 125 | 126 | def test_is_shutting_down_state_connecting(self): 127 | self._obj.state = self._obj.STATE_CONNECTING 128 | self.assertFalse(self._obj.is_shutting_down) 129 | 130 | def test_is_shutting_down_state_idle(self): 131 | self._obj.state = self._obj.STATE_IDLE 132 | self.assertFalse(self._obj.is_shutting_down) 133 | 134 | def test_is_shutting_down_state_processing(self): 135 | self._obj.state = self._obj.STATE_ACTIVE 136 | self.assertFalse(self._obj.is_shutting_down) 137 | 138 | def test_is_shutting_down_state_stop_requested(self): 139 | self._obj.state = self._obj.STATE_STOP_REQUESTED 140 | self.assertFalse(self._obj.is_shutting_down) 141 | 142 | def test_is_shutting_down_state_shutting_down(self): 143 | self._obj.state = self._obj.STATE_SHUTTING_DOWN 144 | self.assertTrue(self._obj.is_shutting_down) 145 | 146 | def test_is_shutting_down_state_stopped(self): 147 | self._obj.state = self._obj.STATE_STOPPED 148 | self.assertFalse(self._obj.is_shutting_down) 149 | 150 | def test_is_stopped_state_initializing(self): 151 | self._obj.state = self._obj.STATE_INITIALIZING 152 | self.assertFalse(self._obj.is_stopped) 153 | 154 | def test_is_stopped_state_connecting(self): 155 | self._obj.state = self._obj.STATE_CONNECTING 156 | self.assertFalse(self._obj.is_stopped) 157 | 158 | def test_is_stopped_state_idle(self): 159 | self._obj.state = self._obj.STATE_IDLE 160 | self.assertFalse(self._obj.is_stopped) 161 | 162 | def test_is_stopped_state_processing(self): 163 | self._obj.state = self._obj.STATE_ACTIVE 164 | self.assertFalse(self._obj.is_stopped) 165 | 166 | def test_is_stopped_state_stop_requested(self): 167 | self._obj.state = self._obj.STATE_STOP_REQUESTED 168 | self.assertFalse(self._obj.is_stopped) 169 | 170 | def test_is_stopped_state_shutting_down(self): 171 | self._obj.state = self._obj.STATE_SHUTTING_DOWN 172 | self.assertFalse(self._obj.is_stopped) 173 | -------------------------------------------------------------------------------- /tests/test_statsd.py: -------------------------------------------------------------------------------- 1 | import socket 2 | import unittest 3 | import uuid 4 | try: 5 | from unittest import mock 6 | except ImportError: 7 | import mock 8 | 9 | from rejected import statsd 10 | from tornado import gen, iostream, locks, tcpserver, testing 11 | 12 | 13 | class TestCase(unittest.TestCase): 14 | 15 | def setUp(self): 16 | self.failure_callback = mock.Mock() 17 | self.name = str(uuid.uuid4()) 18 | self.settings = self.get_settings() 19 | self.statsd = statsd.Client( 20 | self.name, self.settings, self.failure_callback) 21 | 22 | @staticmethod 23 | def get_settings(): 24 | return { 25 | 'host': '10.1.1.1', 26 | 'port': 8124, 27 | 'prefix': str(uuid.uuid4()), 28 | 'tcp': False 29 | } 30 | 31 | def payload_format(self, key, value, metric_type): 32 | return self.statsd._build_payload( 33 | key, value, metric_type).encode('utf-8') 34 | 35 | 36 | class UDPTestCase(TestCase): 37 | 38 | def test_address(self): 39 | self.assertEqual(self.statsd._address, 40 | (self.settings['host'], self.settings['port'])) 41 | 42 | def test_consumer_name(self): 43 | self.assertEqual(self.statsd._consumer_name, self.name) 44 | 45 | def test_prefix(self): 46 | self.assertEqual(self.statsd._prefix, self.settings['prefix']) 47 | 48 | def test_settings(self): 49 | for key in self.settings: 50 | self.assertEqual(self.statsd._setting(key, None), 51 | self.settings[key]) 52 | 53 | 54 | class UDPSendTestCase(TestCase): 55 | 56 | def setUp(self): 57 | super(UDPSendTestCase, self).setUp() 58 | self.socket = mock.Mock() 59 | self.statsd._udp_sock = self.socket 60 | 61 | def test_hostname_in_metric(self): 62 | self.statsd.add_timing('foo', 2.5) 63 | value = self.payload_format('foo', 2500.0, 'ms') 64 | self.assertIn(socket.gethostname().split('.')[0].encode('utf-8'), 65 | value) 66 | 67 | def test_add_timing(self): 68 | self.statsd.add_timing('foo', 2.5) 69 | expectation = self.payload_format('foo', 2500.0, 'ms') 70 | self.socket.sendto.assert_called_once_with(expectation, 71 | self.statsd._address) 72 | 73 | def test_incr(self): 74 | self.statsd.incr('bar', 2) 75 | expectation = self.payload_format('bar', 2, 'c') 76 | self.socket.sendto.assert_called_once_with(expectation, 77 | self.statsd._address) 78 | 79 | def test_set_gauge(self): 80 | self.statsd.set_gauge('baz', 98.5) 81 | expectation = self.payload_format('baz', 98.5, 'g') 82 | self.socket.sendto.assert_called_once_with(expectation, 83 | self.statsd._address) 84 | 85 | 86 | class NoHostnameTestCase(TestCase): 87 | 88 | @staticmethod 89 | def get_settings(): 90 | return { 91 | 'host': '10.1.1.1', 92 | 'port': 8124, 93 | 'prefix': str(uuid.uuid4()), 94 | 'include_hostname': False 95 | } 96 | 97 | def test_hostname_in_metric(self): 98 | self.statsd.add_timing('foo', 2.5) 99 | value = self.payload_format('foo', 2500.0, 'ms') 100 | self.assertNotIn(socket.gethostname().split('.')[0].encode('utf-8'), 101 | value) 102 | 103 | 104 | class StatsdServer(tcpserver.TCPServer): 105 | 106 | PATTERN = br'[a-z0-9._-]+:[0-9.]+\|(?:g|c|ms)\n' 107 | 108 | def __init__(self, ssl_options=None, max_buffer_size=None, 109 | read_chunk_size=None): 110 | self.event = locks.Event() 111 | self.packets = [] 112 | self.reconnect_receive = False 113 | super(StatsdServer, self).__init__( 114 | ssl_options, max_buffer_size, read_chunk_size) 115 | 116 | @gen.coroutine 117 | def handle_stream(self, stream, address): 118 | print('Connected', address) 119 | while True: 120 | try: 121 | result = yield stream.read_until_regex(self.PATTERN) 122 | except iostream.StreamClosedError: 123 | break 124 | else: 125 | self.event.set() 126 | print('Received %r' % result) 127 | self.packets.append(result) 128 | if b'reconnect' in result: 129 | self.reconnect_receive = True 130 | stream.close() 131 | return 132 | 133 | 134 | class TCPTestCase(testing.AsyncTestCase): 135 | 136 | def setUp(self): 137 | super(TCPTestCase, self).setUp() 138 | self.failure_callback = mock.Mock() 139 | self.sock, self.port = testing.bind_unused_port() 140 | self.name = str(uuid.uuid4()) 141 | self.settings = self.get_settings() 142 | print(self.settings) 143 | self.statsd = statsd.Client( 144 | self.name, self.settings, self.failure_callback) 145 | self.server = StatsdServer() 146 | self.server.add_socket(self.sock) 147 | 148 | def get_settings(self): 149 | return { 150 | 'host': self.sock.getsockname()[0], 151 | 'port': self.port, 152 | 'prefix': str(uuid.uuid4()), 153 | 'tcp': True 154 | } 155 | 156 | def payload_format(self, key, value, metric_type): 157 | return self.statsd._build_payload( 158 | key, value, metric_type).encode('utf-8') 159 | 160 | @testing.gen_test 161 | def test_add_timing(self): 162 | self.statsd.add_timing('foo', 2.5) 163 | yield self.server.event.wait() 164 | self.assertIn(self.payload_format('foo', 2500.0, 'ms'), 165 | self.server.packets) 166 | 167 | @testing.gen_test 168 | def test_incr(self): 169 | self.statsd.incr('bar', 2) 170 | yield self.server.event.wait() 171 | self.assertIn(self.payload_format('bar', 2, 'c'), self.server.packets) 172 | 173 | @testing.gen_test 174 | def test_set_gauge(self): 175 | self.statsd.set_gauge('baz', 98.5) 176 | yield self.server.event.wait() 177 | self.assertIn(self.payload_format('baz', 98.5, 'g'), 178 | self.server.packets) 179 | 180 | @testing.gen_test 181 | def test_reconnect(self): 182 | self.statsd.set_gauge('baz', 98.5) 183 | yield self.server.event.wait() 184 | self.server.event.clear() 185 | self.statsd.set_gauge('reconnect', 100) 186 | yield self.server.event.wait() 187 | self.server.event.clear() 188 | yield gen.sleep(2) 189 | self.assertTrue(self.server.reconnect_receive) 190 | self.statsd.set_gauge('bar', 10) 191 | yield self.server.event.wait() 192 | self.assertTrue(self.server.reconnect_receive) 193 | 194 | self.assertIn(self.payload_format('baz', 98.5, 'g'), 195 | self.server.packets) 196 | self.assertIn(self.payload_format('reconnect', 100, 'g'), 197 | self.server.packets) 198 | self.assertIn(self.payload_format('bar', 10, 'g'), 199 | self.server.packets) 200 | -------------------------------------------------------------------------------- /tests/test_testing.py: -------------------------------------------------------------------------------- 1 | # coding=utf-8 2 | """Tests for rejected.testing""" 3 | from tornado import gen 4 | 5 | from rejected import consumer, testing 6 | 7 | 8 | class TestPublishedMessages(testing.AsyncTestCase): 9 | 10 | def get_consumer(self): 11 | class Consumer(consumer.SmartConsumer): 12 | 13 | @gen.coroutine 14 | def process(self): 15 | for i in range(10): 16 | self.publish_message( 17 | exchange='my_exchange', 18 | routing_key='my_routing_key', 19 | body=i, 20 | properties={ 21 | 'type': 'my_type', 22 | 'content_type': 'my_content_type' 23 | }) 24 | 25 | return Consumer 26 | 27 | @testing.gen_test 28 | def test_order_preserved(self): 29 | yield self.process_message() 30 | self.assertEqual(10, len(self.published_messages)) 31 | for i, published_message in zip(range(10), self.published_messages): 32 | self.assertEqual(i, published_message.body) 33 | self.assertEqual('my_exchange', published_message.exchange) 34 | self.assertEqual('my_routing_key', published_message.routing_key) 35 | self.assertEqual('my_type', 36 | published_message.properties.type) 37 | self.assertEqual('my_content_type', 38 | published_message.properties.content_type) 39 | 40 | 41 | class TestProcessingException(testing.AsyncTestCase): 42 | 43 | def get_consumer(self): 44 | class Consumer(consumer.SmartConsumer): 45 | 46 | @gen.coroutine 47 | def process(self): 48 | raise consumer.ProcessingException 49 | 50 | return Consumer 51 | 52 | @testing.gen_test 53 | def test_republished(self): 54 | with self.assertRaises(consumer.ProcessingException): 55 | yield self.process_message() 56 | self.assertEqual(1, len(self.published_messages)) 57 | published_message = self.published_messages[0] 58 | 59 | self.assertEqual( 60 | self.consumer._message.routing_key, 61 | published_message.routing_key) 62 | self.assertEqual( 63 | self.consumer._error_exchange, 64 | published_message.exchange) 65 | self.assertEqual( 66 | self.consumer._message.body, 67 | published_message.body) 68 | for (attr, value) in self.consumer._message.properties: 69 | if attr == 'headers': 70 | self.assertEqual( 71 | {'X-Original-Exchange': 'rejected', 72 | 'X-Original-Queue': self.process.queue_name, 73 | 'X-Processing-Exception': 'ProcessingException', 74 | 'X-Processing-Exceptions': 1}, 75 | published_message.properties.headers) 76 | else: 77 | self.assertEqual( 78 | value, getattr(published_message.properties, attr)) 79 | 80 | 81 | class TestMessageException(testing.AsyncTestCase): 82 | 83 | def get_consumer(self): 84 | class Consumer(consumer.SmartConsumer): 85 | MESSAGE_TYPE = 'a_type' 86 | return Consumer 87 | 88 | @testing.gen_test 89 | def test_no_drop(self): 90 | with self.assertRaises(consumer.MessageException): 91 | yield self.process_message() 92 | self.assertEqual(0, len(self.published_messages)) 93 | 94 | @testing.gen_test 95 | def test_drop(self): 96 | self.consumer._drop_exchange = 'drop' 97 | self.consumer._drop_invalid = True 98 | yield self.process_message(message_type='bad_type') 99 | self.assertEqual(1, len(self.published_messages)) 100 | published_message = self.published_messages[0] 101 | 102 | self.assertEqual( 103 | self.consumer._message.routing_key, 104 | published_message.routing_key) 105 | self.assertEqual( 106 | self.consumer._drop_exchange, 107 | published_message.exchange) 108 | self.assertEqual( 109 | self.consumer._message.body, 110 | published_message.body) 111 | for (attr, value) in self.consumer._message.properties: 112 | if attr == 'headers': 113 | headers = published_message.properties.headers 114 | self.assertTrue(headers.pop('X-Dropped-Timestamp')) 115 | self.assertEqual( 116 | {'X-Dropped-By': 'Consumer', 117 | 'X-Dropped-Reason': 'invalid type', 118 | 'X-Original-Exchange': 'rejected', 119 | 'X-Original-Queue': self.process.queue_name}, 120 | headers) 121 | else: 122 | self.assertEqual( 123 | value, getattr(published_message.properties, attr)) 124 | 125 | 126 | class TestUnhandledException(testing.AsyncTestCase): 127 | 128 | def get_consumer(self): 129 | class Consumer(consumer.Consumer): 130 | 131 | @gen.coroutine 132 | def process(self): 133 | raise ValueError('This is a test exception') 134 | 135 | return Consumer 136 | 137 | @testing.gen_test 138 | def test_stacktrace(self): 139 | with self.assertRaises(ValueError): 140 | yield self.process_message({'foo': 'bar'}) 141 | -------------------------------------------------------------------------------- /tests/test_utils.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | 3 | from rejected import utils 4 | 5 | 6 | class TestImportNamspacedClass(unittest.TestCase): 7 | 8 | def test_import_consumer(self): 9 | import logging 10 | (result_class, 11 | result_version) = utils.import_consumer('logging.Logger') 12 | self.assertEqual(result_class, logging.Logger) 13 | 14 | def test_import_consumer_version(self): 15 | import logging 16 | (result_class, 17 | result_version) = utils.import_consumer('logging.Logger') 18 | self.assertEqual(result_version, logging.__version__) 19 | 20 | def test_import_consumer_no_version(self): 21 | (result_class, 22 | result_version) = utils.import_consumer('signal.ItimerError') 23 | self.assertIsNone(result_version) 24 | 25 | def test_import_consumer_failure(self): 26 | self.assertRaises(ImportError, utils.import_consumer, 27 | 'rejected.fake_module.Classname') 28 | -------------------------------------------------------------------------------- /utils/test_generator.py: -------------------------------------------------------------------------------- 1 | """Generate test messages for the example consumer.""" 2 | import random 3 | import time 4 | import uuid 5 | 6 | from pika import BasicProperties 7 | from pika.adapters import BlockingConnection 8 | from pika.connection import ConnectionParameters 9 | 10 | MESSAGE_COUNT = 100 11 | 12 | HTML_VALUE = 'HiHello %i' 13 | JSON_VALUE = '{"json_encoded": true, "value": "here", "random": %i}' 14 | XML_VALUE = 'True' \ 15 | 'Bar%i' 16 | YAML_VALUE = """%%YAML 1.2 17 | --- 18 | Application: 19 | poll_interval: 10.0 20 | log_stats: True 21 | name: Example 22 | value: %i 23 | """ 24 | 25 | if __name__ == '__main__': 26 | connection = BlockingConnection(ConnectionParameters()) 27 | 28 | # Open the channel 29 | channel = connection.channel() 30 | 31 | channel.exchange_declare(exchange='example', type='topic', durable=True) 32 | 33 | # Declare the queue 34 | channel.queue_declare(queue='generated_messages', durable=True, 35 | exclusive=False, auto_delete=False) 36 | 37 | channel.queue_bind(exchange='example', queue='generated_messages', 38 | routing_key='rejected_example') 39 | 40 | channel.queue_declare(queue='consumer_replies', durable=True, 41 | exclusive=False, auto_delete=False) 42 | 43 | channel.queue_bind(exchange='example', queue='consumer_replies', 44 | routing_key='rejected_reply') 45 | 46 | # Initialize our timers and loop until external influence stops us 47 | for iteration in range(0, MESSAGE_COUNT): 48 | msg_type = random.randint(1, 4) 49 | if msg_type == 1: 50 | body = HTML_VALUE % random.randint(1, 32768) 51 | content_type = 'text/html' 52 | elif msg_type == 2: 53 | body = JSON_VALUE % random.randint(1, 32768) 54 | content_type = 'application/json' 55 | elif msg_type == 3: 56 | body = XML_VALUE % random.randint(1, 32768) 57 | content_type = 'text/xml' 58 | elif msg_type == 4: 59 | body = YAML_VALUE % random.randint(1, 32768) 60 | content_type = 'text/x-yaml' 61 | else: 62 | body = 'Plain text value %i' % random.randint(1, 32768) 63 | content_type = 'text/text' 64 | 65 | properties = BasicProperties(timestamp=int(time.time()), 66 | app_id=__file__, 67 | user_id='guest', 68 | content_type=content_type, 69 | message_id=str(uuid.uuid4()), 70 | type='Example message', 71 | reply_to='rejected_reply', 72 | delivery_mode=1) 73 | 74 | # Send the message 75 | channel.basic_publish( 76 | exchange='example', routing_key='rejected_example', 77 | body=body, properties=properties) 78 | 79 | connection.close() 80 | --------------------------------------------------------------------------------