├── test_app ├── __init__.py ├── dj111 │ ├── __init__.py │ ├── urls.py │ └── settings.py ├── dj22 │ ├── __init__.py │ ├── urls.py │ └── settings.py ├── models.py ├── factories.py └── settings.py ├── event_consumer ├── conf │ ├── __init__.py │ └── settings.py ├── test_utils │ ├── __init__.py │ └── handlers.py ├── __init__.py ├── __about__.py ├── types.py ├── errors.py └── handlers.py ├── requirements-base.txt ├── requirements-dev.txt ├── requirements.txt ├── requirements-test.txt ├── docker-compose.yml ├── tests ├── __init__.py ├── test_retry_handler.py ├── base.py ├── test_retry_handler_django.py ├── test_consume_handler.py └── test_consumer_step.py ├── Makefile ├── README_DEPOP_DEV.md ├── setup.py ├── .gitignore ├── README.rst └── LICENSE /test_app/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /test_app/dj111/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /test_app/dj22/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /event_consumer/conf/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /event_consumer/test_utils/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /requirements-base.txt: -------------------------------------------------------------------------------- 1 | six>=1.16.0,<2.0.0 -------------------------------------------------------------------------------- /requirements-dev.txt: -------------------------------------------------------------------------------- 1 | -r requirements-test.txt 2 | 3 | ipython 4 | ipdb 5 | twine 6 | -------------------------------------------------------------------------------- /event_consumer/__init__.py: -------------------------------------------------------------------------------- 1 | from event_consumer.handlers import message_handler # noqa 2 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | -r requirements-base.txt 2 | 3 | celery>=5.2.7,<6.0.0 4 | kombu>=5.2.4,<6.0.0 -------------------------------------------------------------------------------- /event_consumer/__about__.py: -------------------------------------------------------------------------------- 1 | __version__ = '2.0.1' 2 | 3 | 4 | if __name__ == '__main__': 5 | print(__version__) 6 | -------------------------------------------------------------------------------- /requirements-test.txt: -------------------------------------------------------------------------------- 1 | -r requirements.txt 2 | 3 | pytest 4 | mock 5 | flaky 6 | pytest-django 7 | factory-boy>=3.2.1,<4.0.0 8 | Django>=4.1.7,<5.0.0 9 | -------------------------------------------------------------------------------- /test_app/models.py: -------------------------------------------------------------------------------- 1 | from django.db import models 2 | 3 | 4 | class User(models.Model): 5 | username = models.CharField(max_length=24, unique=True) 6 | -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: '3' 2 | 3 | services: 4 | 5 | rabbitmq: 6 | image: rabbitmq:3.6-management 7 | ports: 8 | - "5672:5672" 9 | - "15672:15672" 10 | -------------------------------------------------------------------------------- /test_app/factories.py: -------------------------------------------------------------------------------- 1 | import factory 2 | 3 | 4 | class UserFactory(factory.django.DjangoModelFactory): 5 | class Meta: 6 | model = 'test_app.User' 7 | 8 | username = factory.Sequence(lambda n: 'user_%s' % n) 9 | -------------------------------------------------------------------------------- /test_app/dj111/urls.py: -------------------------------------------------------------------------------- 1 | from django.conf.urls import patterns, include, url 2 | 3 | urlpatterns = patterns( 4 | '', 5 | # Examples: 6 | # url(r'^$', 'dj17_testproject.views.home', name='home'), 7 | # url(r'^blog/', include('blog.urls')), 8 | ) 9 | -------------------------------------------------------------------------------- /test_app/dj22/urls.py: -------------------------------------------------------------------------------- 1 | from django.conf.urls import patterns, include, url 2 | 3 | urlpatterns = patterns( 4 | '', 5 | # Examples: 6 | # url(r'^$', 'dj17_testproject.views.home', name='home'), 7 | # url(r'^blog/', include('blog.urls')), 8 | ) 9 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- 1 | import six 2 | 3 | 4 | six.add_move( 5 | six.MovedAttribute( 6 | name='string_letters', 7 | old_mod='string', 8 | new_mod='string', 9 | old_attr='letters', 10 | new_attr='ascii_letters', 11 | ) 12 | ) 13 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | .PHONY: pypi, tag, test 2 | 3 | pypi: 4 | rm -f dist/* 5 | python setup.py sdist 6 | twine upload --config-file=.pypirc dist/* 7 | make tag 8 | 9 | tag: 10 | git tag $$(python event_consumer/__about__.py) 11 | git push --tags 12 | 13 | test: 14 | PYTHONPATH=. py.test -v -s tests/ 15 | -------------------------------------------------------------------------------- /test_app/settings.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | 4 | EVENT_CONSUMER_BACKOFF_FUNC = lambda count: 0.5 # noqa 5 | 6 | EVENT_CONSUMER_BROKER_URL = 'amqp://{0}:5672'.format( 7 | os.getenv('BROKER_HOST', 'localhost') 8 | ) 9 | 10 | EVENT_CONSUMER_EXCHANGES = { 11 | 'custom': { 12 | 'name': 'custom', 13 | 'type': 'topic', # required for wildcard routing_keys 14 | } 15 | } 16 | 17 | EVENT_CONSUMER_WHATEVER = 'WTF' 18 | -------------------------------------------------------------------------------- /event_consumer/types.py: -------------------------------------------------------------------------------- 1 | from typing import NamedTuple, Dict, Callable 2 | 3 | 4 | # Used by handlers.REGISTRY as keys 5 | QueueKey = NamedTuple( 6 | 'QueueKey', 7 | [ 8 | ('queue', str), 9 | ('exchange', str), 10 | ], 11 | ) 12 | 13 | 14 | HandlerRegistration = NamedTuple( 15 | 'HandlerRegistration', 16 | [ 17 | ('routing_key', str), 18 | ('queue_arguments', Dict[str, str]), 19 | ('handler', Callable), 20 | ], 21 | ) 22 | -------------------------------------------------------------------------------- /README_DEPOP_DEV.md: -------------------------------------------------------------------------------- 1 | ## info for Depop devs 2 | 3 | To release a new version of this to PyPI you should use the `depop` PyPI user account. 4 | 5 | If you already have your own personal PyPI account you probably have that saved in your `/.pypirc` file. 6 | 7 | In order to release to PyPI under a different user you need to: 8 | 9 | 1. `pip install twine` 10 | 2. create a `.pypirc` file in the project root containing the `depop` username and password 11 | 3. `make pypi` 12 | 13 | #### Note: 14 | 15 | Since recent changes to PyPI you need to have the following section in your `.pypirc` file for twine upload to work: 16 | 17 | ```ini 18 | [pypi] 19 | repository:https://upload.pypi.org/legacy/ 20 | ``` 21 | -------------------------------------------------------------------------------- /event_consumer/errors.py: -------------------------------------------------------------------------------- 1 | from six import text_type 2 | 3 | 4 | class PermanentFailure(Exception): 5 | """ 6 | Raising `PermanentFailure` in a handler function causes the Kombu message 7 | to be archived and not retried. 8 | 9 | Do this when it's obvious a retry will not fix a problem (bad message 10 | format, programmer error, etc). 11 | """ 12 | 13 | @property 14 | def message(self): 15 | try: 16 | return text_type(self.args[0]) 17 | except IndexError: 18 | return '' 19 | 20 | def __str__(self): 21 | return "<{}>: {}".format(self.__class__.__name__, self.message) 22 | 23 | 24 | class NoExchange(Exception): 25 | """ 26 | Raised if during the instantiation of an AMQPRetryHandler the requested 27 | exchange has not been defined in settings.EXCHANGES 28 | """ 29 | 30 | 31 | class InvalidQueueRegistration(Exception): 32 | """ 33 | Raised if you try to connect a message handler with a combination of 34 | queue and exchange that cannot work. 35 | """ 36 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | from setuptools import setup 2 | from codecs import open # To use a consistent encoding 3 | from os import path 4 | 5 | here = path.abspath(path.dirname(__file__)) 6 | 7 | # Get the long description from the README file 8 | with open(path.join(here, 'README.rst'), encoding='utf-8') as f: 9 | long_description = f.read() 10 | 11 | # Get content from __about__.py 12 | about = {} 13 | with open(path.join(here, 'event_consumer', '__about__.py'), 'r', 'utf-8') as f: 14 | exec(f.read(), about) 15 | 16 | 17 | setup( 18 | name='celery-message-consumer', 19 | # Versions should comply with PEP440. For a discussion on single-sourcing 20 | # the version across setup.py and the project code, see 21 | # https://packaging.python.org/en/latest/single_source_version.html 22 | version=about['__version__'], 23 | description='Tool for using the bin/celery worker to consume vanilla AMQP messages (i.e. not Celery tasks)', 24 | long_description=long_description, 25 | url='https://github.com/depop/celery-message-consumer', 26 | author='Depop', 27 | author_email='dev@depop.com', 28 | license='Apache 2.0', 29 | classifiers=[ 30 | 'Environment :: Web Environment', 31 | 'Intended Audience :: Developers', 32 | 'License :: OSI Approved :: Apache Software License', 33 | 'Operating System :: OS Independent', 34 | 'Programming Language :: Python', 35 | 'Programming Language :: Python :: 3.6', 36 | ], 37 | install_requires=[ 38 | 'six==1.16.0', 39 | ], 40 | packages=[ 41 | 'event_consumer', 42 | 'event_consumer.conf', 43 | 'event_consumer.test_utils', 44 | ], 45 | ) 46 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | env/ 12 | build/ 13 | develop-eggs/ 14 | dist/ 15 | downloads/ 16 | eggs/ 17 | .eggs/ 18 | lib/ 19 | lib64/ 20 | parts/ 21 | sdist/ 22 | var/ 23 | wheels/ 24 | *.egg-info/ 25 | .installed.cfg 26 | *.egg 27 | .pypirc 28 | 29 | # PyInstaller 30 | # Usually these files are written by a python script from a template 31 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 32 | *.manifest 33 | *.spec 34 | 35 | # Installer logs 36 | pip-log.txt 37 | pip-delete-this-directory.txt 38 | 39 | # Unit test / coverage reports 40 | htmlcov/ 41 | .tox/ 42 | .coverage 43 | .coverage.* 44 | .cache 45 | nosetests.xml 46 | coverage.xml 47 | *.cover 48 | .hypothesis/ 49 | 50 | # Translations 51 | *.mo 52 | *.pot 53 | 54 | # Django stuff: 55 | *.log 56 | local_settings.py 57 | 58 | # Flask stuff: 59 | instance/ 60 | .webassets-cache 61 | 62 | # Scrapy stuff: 63 | .scrapy 64 | 65 | # Sphinx documentation 66 | docs/_build/ 67 | 68 | # PyBuilder 69 | target/ 70 | 71 | # Jupyter Notebook 72 | .ipynb_checkpoints 73 | 74 | # pyenv 75 | .python-version 76 | 77 | # celery beat schedule file 78 | celerybeat-schedule 79 | 80 | # SageMath parsed files 81 | *.sage.py 82 | 83 | # dotenv 84 | .env 85 | 86 | # virtualenv 87 | .venv 88 | venv/ 89 | ENV/ 90 | .tox/ 91 | 92 | # Spyder project settings 93 | .spyderproject 94 | .spyproject 95 | 96 | # Rope project settings 97 | .ropeproject 98 | 99 | # mkdocs documentation 100 | /site 101 | 102 | # mypy 103 | .mypy_cache/ 104 | -------------------------------------------------------------------------------- /event_consumer/test_utils/handlers.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | from event_consumer.conf import settings 4 | from event_consumer.errors import PermanentFailure 5 | from event_consumer.handlers import message_handler 6 | 7 | 8 | _logger = logging.getLogger(__name__) 9 | 10 | 11 | class IntegrationTestHandlers(object): 12 | """ 13 | Basic message handlers that log or raise known exceptions to allow 14 | interactive testing of the RabbitMQ config. 15 | """ 16 | 17 | @staticmethod 18 | def py_integration_ok(body): 19 | """ 20 | Should always succeed, never retry, never archive. 21 | """ 22 | msg = 'py_integration_ok, {}'.format(body) 23 | _logger.info(msg) 24 | 25 | @staticmethod 26 | def py_integration_raise(body): 27 | """ 28 | Should retry until there are no attempts left, then archive. 29 | """ 30 | msg = 'py_integration_raise, {}'.format(body) 31 | _logger.info(msg) 32 | raise Exception(msg) 33 | 34 | @staticmethod 35 | def py_integration_raise_permanent(body): 36 | """ 37 | Should cause the message to be archived on first go. 38 | """ 39 | msg = 'py_integration_raise_permanent, {}'.format(body) 40 | _logger.info(msg) 41 | raise PermanentFailure(msg) 42 | 43 | 44 | if settings.TEST_ENABLED: 45 | # Add tasks for interactive testing (call decorators directly) 46 | message_handler('py.integration.ok')(IntegrationTestHandlers.py_integration_ok) 47 | message_handler('py.integration.raise')( 48 | IntegrationTestHandlers.py_integration_raise 49 | ) 50 | message_handler('py.integration.raise.permanent')( 51 | IntegrationTestHandlers.py_integration_raise_permanent 52 | ) 53 | -------------------------------------------------------------------------------- /test_app/dj111/settings.py: -------------------------------------------------------------------------------- 1 | """ 2 | Django settings for dj17_testproject project. 3 | 4 | For more information on this file, see 5 | https://docs.djangoproject.com/en/dev/topics/settings/ 6 | 7 | For the full list of settings and their values, see 8 | https://docs.djangoproject.com/en/dev/ref/settings/ 9 | """ 10 | 11 | # Build paths inside the project like this: os.path.join(BASE_DIR, ...) 12 | import os 13 | 14 | BASE_DIR = os.path.dirname(__file__) 15 | 16 | MANAGE_PY_PATH = os.path.join(BASE_DIR, 'manage.py') 17 | 18 | 19 | # Quick-start development settings - unsuitable for production 20 | # See https://docs.djangoproject.com/en/dev/howto/deployment/checklist/ 21 | 22 | # SECURITY WARNING: keep the secret key used in production secret! 23 | SECRET_KEY = '#+*qjewrm$8o_)uxz6boz+gqp*pztxa84_41$fd8xbi!72$&2)' 24 | 25 | # SECURITY WARNING: don't run with debug turned on in production! 26 | DEBUG = True 27 | 28 | TEMPLATE_DEBUG = True 29 | 30 | ALLOWED_HOSTS = [] 31 | 32 | 33 | # Application definition 34 | 35 | INSTALLED_APPS = ( 36 | 'django.contrib.admin', 37 | 'django.contrib.auth', 38 | 'django.contrib.contenttypes', 39 | 'django.contrib.sessions', 40 | 'django.contrib.messages', 41 | 'django.contrib.staticfiles', 42 | 'test_app', 43 | ) 44 | 45 | MIDDLEWARE_CLASSES = ( 46 | 'django.contrib.sessions.middleware.SessionMiddleware', 47 | 'django.middleware.common.CommonMiddleware', 48 | 'django.middleware.csrf.CsrfViewMiddleware', 49 | 'django.contrib.auth.middleware.AuthenticationMiddleware', 50 | 'django.contrib.messages.middleware.MessageMiddleware', 51 | 'django.middleware.clickjacking.XFrameOptionsMiddleware', 52 | ) 53 | 54 | ROOT_URLCONF = 'test_app.dj111.urls' 55 | 56 | 57 | # Database 58 | # https://docs.djangoproject.com/en/dev/ref/settings/#databases 59 | 60 | DATABASES = { 61 | 'default': { 62 | 'ENGINE': 'django.db.backends.sqlite3', 63 | 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'), 64 | 'TEST': { 65 | 'NAME': os.path.join(BASE_DIR, 'test_db.sqlite3'), 66 | }, 67 | } 68 | } 69 | 70 | # Internationalization 71 | # https://docs.djangoproject.com/en/dev/topics/i18n/ 72 | 73 | LANGUAGE_CODE = 'en-us' 74 | 75 | TIME_ZONE = 'UTC' 76 | 77 | USE_I18N = True 78 | 79 | USE_L10N = True 80 | 81 | USE_TZ = True 82 | 83 | 84 | # Static files (CSS, JavaScript, Images) 85 | # https://docs.djangoproject.com/en/dev/howto/static-files/ 86 | 87 | STATIC_URL = '/static/' 88 | -------------------------------------------------------------------------------- /test_app/dj22/settings.py: -------------------------------------------------------------------------------- 1 | """ 2 | Django settings for dj17_testproject project. 3 | 4 | For more information on this file, see 5 | https://docs.djangoproject.com/en/dev/topics/settings/ 6 | 7 | For the full list of settings and their values, see 8 | https://docs.djangoproject.com/en/dev/ref/settings/ 9 | """ 10 | 11 | # Build paths inside the project like this: os.path.join(BASE_DIR, ...) 12 | import os 13 | 14 | BASE_DIR = os.path.dirname(__file__) 15 | 16 | MANAGE_PY_PATH = os.path.join(BASE_DIR, 'manage.py') 17 | 18 | 19 | # Quick-start development settings - unsuitable for production 20 | # See https://docs.djangoproject.com/en/dev/howto/deployment/checklist/ 21 | 22 | # SECURITY WARNING: keep the secret key used in production secret! 23 | SECRET_KEY = '#+*qjewrm$8o_)uxz6boz+gqp*pztxa84_41$fd8xbi!72$&2)' 24 | 25 | # SECURITY WARNING: don't run with debug turned on in production! 26 | DEBUG = True 27 | 28 | TEMPLATE_DEBUG = True 29 | 30 | ALLOWED_HOSTS = [] 31 | 32 | 33 | # Application definition 34 | 35 | INSTALLED_APPS = ( 36 | 'django.contrib.admin', 37 | 'django.contrib.auth', 38 | 'django.contrib.contenttypes', 39 | 'django.contrib.sessions', 40 | 'django.contrib.messages', 41 | 'django.contrib.staticfiles', 42 | 'test_app', 43 | ) 44 | 45 | MIDDLEWARE_CLASSES = ( 46 | 'django.contrib.sessions.middleware.SessionMiddleware', 47 | 'django.middleware.common.CommonMiddleware', 48 | 'django.middleware.csrf.CsrfViewMiddleware', 49 | 'django.contrib.auth.middleware.AuthenticationMiddleware', 50 | 'django.contrib.messages.middleware.MessageMiddleware', 51 | 'django.middleware.clickjacking.XFrameOptionsMiddleware', 52 | ) 53 | 54 | ROOT_URLCONF = 'test_app.dj22.urls' 55 | 56 | 57 | # Database 58 | # https://docs.djangoproject.com/en/dev/ref/settings/#databases 59 | 60 | DATABASES = { 61 | 'default': { 62 | 'ENGINE': 'django.db.backends.sqlite3', 63 | 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'), 64 | 'TEST': { 65 | 'NAME': os.path.join(BASE_DIR, 'test_db.sqlite3'), 66 | }, 67 | } 68 | } 69 | 70 | # Internationalization 71 | # https://docs.djangoproject.com/en/dev/topics/i18n/ 72 | 73 | LANGUAGE_CODE = 'en-us' 74 | 75 | TIME_ZONE = 'UTC' 76 | 77 | USE_I18N = True 78 | 79 | USE_L10N = True 80 | 81 | USE_TZ = True 82 | 83 | 84 | # Static files (CSS, JavaScript, Images) 85 | # https://docs.djangoproject.com/en/dev/howto/static-files/ 86 | 87 | STATIC_URL = '/static/' 88 | -------------------------------------------------------------------------------- /event_consumer/conf/settings.py: -------------------------------------------------------------------------------- 1 | from datetime import timedelta 2 | import os 3 | from typing import Optional, Callable 4 | 5 | try: 6 | from django.conf import settings 7 | except ImportError: 8 | settings = None 9 | 10 | CONFIG_NAMESPACE: str = "EVENT_CONSUMER" 11 | 12 | # safety var to prevent accidentally enabling the handlers in `test_utils.handlers` 13 | # set to True and then import the module to enable them 14 | TEST_ENABLED: bool = getattr(settings, f"{CONFIG_NAMESPACE}_TEST_ENABLED", False) 15 | 16 | # http://docs.celeryproject.org/en/latest/userguide/configuration.html#std:setting-task_serializer 17 | SERIALIZER: str = getattr(settings, f"{CONFIG_NAMESPACE}_SERIALIZER", 'json') 18 | ACCEPT = [SERIALIZER] 19 | 20 | QUEUE_NAME_PREFIX: str = getattr(settings, f"{CONFIG_NAMESPACE}_QUEUE_NAME_PREFIX", '') 21 | 22 | MAX_RETRIES: int = getattr(settings, f"{CONFIG_NAMESPACE}_MAX_RETRIES", 4) 23 | 24 | # By default will use `AMQPRetryHandler.backoff`, otherwise supply your own. 25 | # Should accept a single arg and return a delay time (seconds). 26 | BACKOFF_FUNC: Optional[Callable[[int], float]] = getattr( 27 | settings, f"{CONFIG_NAMESPACE}_BACKOFF_FUNC", None 28 | ) 29 | 30 | RETRY_HEADER: str = getattr( 31 | settings, f"{CONFIG_NAMESPACE}_RETRY_HEADER", 'x-retry-count' 32 | ) 33 | 34 | # Set the consumer prefetch limit 35 | PREFETCH_COUNT: int = getattr(settings, f"{CONFIG_NAMESPACE}_PREFETCH_COUNT", 1) 36 | 37 | # to set TTL for archived message (milliseconds) 38 | twenty_four_days = int(timedelta(days=24).total_seconds() * 1000) 39 | ARCHIVE_EXPIRY: int = getattr( 40 | settings, f"{CONFIG_NAMESPACE}_ARCHIVE_EXPIRY", twenty_four_days 41 | ) 42 | ARCHIVE_MAX_LENGTH: int = getattr( 43 | settings, f"{CONFIG_NAMESPACE}_ARCHIVE_MAX_LENGTH", 1000000 44 | ) 45 | 46 | ARCHIVE_QUEUE_ARGS = getattr(settings, f"{CONFIG_NAMESPACE}_ARCHIVE_QUEUE_ARGS", { 47 | "x-message-ttl": ARCHIVE_EXPIRY, # Messages dropped after this 48 | "x-max-length": ARCHIVE_MAX_LENGTH, # Maximum size of the queue 49 | "x-queue-mode": "lazy", # Keep messages on disk (reqs. rabbitmq 3.6.0+) 50 | }) 51 | 52 | USE_DJANGO: bool = getattr(settings, f"{CONFIG_NAMESPACE}_USE_DJANGO", False) 53 | 54 | EXCHANGES: dict[str, dict[str, str]] = getattr( 55 | settings, f"{CONFIG_NAMESPACE}_EXCHANGES", {} 56 | ) 57 | # EXCHANGES = { 58 | # 'default': { # a reference name for this config, used when attaching handlers 59 | # 'name': 'data', # actual name of exchange in RabbitMQ 60 | # 'type': 'topic', # an AMQP exchange type 61 | # }, 62 | # ... 63 | # } 64 | 65 | BROKER_URL = 'amqp://{0}:5672'.format(os.getenv('BROKER_HOST', 'localhost')) 66 | -------------------------------------------------------------------------------- /tests/test_retry_handler.py: -------------------------------------------------------------------------------- 1 | import socket 2 | 3 | import mock 4 | 5 | from event_consumer.conf import settings 6 | from event_consumer import handlers as ec 7 | 8 | from .base import BaseRetryHandlerIntegrationTest 9 | 10 | 11 | class AMQPRetryHandlerIntegrationTest(BaseRetryHandlerIntegrationTest): 12 | def test_wrapped_func(self): 13 | """Should run the wrapped function when a message arrives with its routing key""" 14 | with mock.patch.object(self.handler, 'func') as f: 15 | body = self.body() 16 | 17 | self.producer.publish(body) 18 | self.connection.drain_events(timeout=0.3) 19 | f.assert_called_once_with(body) 20 | self.assertEqual(len(self.archives), 0) 21 | 22 | # no retries: 23 | e = None 24 | try: 25 | self.connection.drain_events(timeout=0.3) 26 | except socket.timeout as exc: 27 | e = exc 28 | self.assertIsNotNone( 29 | e, msg="e=None here means task was unexpectedly retried" 30 | ) 31 | f.call_count = 1 32 | 33 | def test_wrapped_func_raises_exception(self): 34 | """Should archive messages that have been retried too many times""" 35 | expected_attempts = settings.MAX_RETRIES + 1 36 | 37 | with mock.patch.object(self.handler, 'func') as f: 38 | f.side_effect = Exception('This should be retried') 39 | body = self.body() 40 | 41 | self.producer.publish(body) 42 | 43 | for _ in range(expected_attempts): 44 | self.connection.drain_events(timeout=0.3) 45 | self.assertEqual(len(self.archives), 0) 46 | 47 | # Expect the final attempt to have placed the message in the 48 | # archive which we now drain... 49 | self.connection.drain_events(timeout=0.3) 50 | self.assertEqual(len(self.archives), 1) 51 | 52 | archived_body, archived_message = self.archives[0] 53 | self.assertEqual(body, archived_body) 54 | self.assertEqual( 55 | archived_message.headers[settings.RETRY_HEADER], settings.MAX_RETRIES 56 | ) 57 | self.assertEqual(f.call_count, expected_attempts) 58 | 59 | def test_wrapped_func_raises_permanent_failure_exception(self): 60 | """Should catch PermanentFailure exceptions and archive these messages""" 61 | with mock.patch.object(self.handler, 'func') as f: 62 | f.side_effect = ec.PermanentFailure('This should be archived first go') 63 | body = self.body() 64 | 65 | self.assertEqual(len(self.archives), 0) 66 | 67 | # Publish and run handler once 68 | self.producer.publish(body) 69 | self.connection.drain_events() 70 | 71 | # Drain from archive once 72 | self.connection.drain_events() 73 | self.assertEqual(len(self.archives), 1) 74 | 75 | archived_body, archived_message = self.archives[0] 76 | self.assertEqual(body, archived_body) 77 | self.assertEqual(f.call_count, 1) 78 | -------------------------------------------------------------------------------- /tests/base.py: -------------------------------------------------------------------------------- 1 | import random 2 | import unittest 3 | 4 | from six.moves import string_letters 5 | 6 | import kombu.common as common 7 | import kombu 8 | 9 | from event_consumer.conf import settings 10 | from event_consumer.handlers import AMQPRetryHandler, AMQPRetryConsumerStep 11 | 12 | 13 | def random_body(): 14 | return dict(body=''.join([random.choice(string_letters) for _ in range(25)])) 15 | 16 | 17 | class BaseRetryHandlerIntegrationTest(unittest.TestCase): 18 | # Must not collide with real queue names! 19 | routing_key = 'RetryHandlerIntegrationTest' 20 | exchange = 'default' # see settings.EXCHANGES 21 | 22 | body = staticmethod(random_body) 23 | 24 | def setUp(self): 25 | super(BaseRetryHandlerIntegrationTest, self).setUp() 26 | 27 | # NOTE: 28 | # must be a real rabbitmq instance, we rely on rabbitmq 29 | # features (dead-letter exchange) for our retry queue logic 30 | self.connection = kombu.Connection( 31 | settings.BROKER_URL, 32 | connect_timeout=1, 33 | ) 34 | self.connection.ensure_connection() 35 | self.connection.connect() 36 | self.channel = self.connection.channel() 37 | 38 | self.handler = AMQPRetryHandler( 39 | self.channel, 40 | routing_key=self.routing_key, 41 | queue=self.routing_key, 42 | exchange=self.exchange, 43 | queue_arguments={}, 44 | func=lambda body: None, 45 | backoff_func=lambda attempt: 0, 46 | ) 47 | self.handler.declare_queues() 48 | 49 | queues = [ 50 | self.handler.worker_queue, 51 | self.handler.retry_queue, 52 | self.handler.archive_queue, 53 | ] 54 | for queue in queues: 55 | queue.purge() 56 | 57 | self.archive_consumer = kombu.Consumer( 58 | channel=self.channel, 59 | queues=[self.handler.archive_queue], 60 | callbacks=[self.handle_archive], 61 | ) 62 | 63 | for consumer in [self.handler.consumer, self.archive_consumer]: 64 | consumer.consume() 65 | 66 | self.producer = kombu.Producer( 67 | self.channel, 68 | exchange=self.handler.exchanges[self.handler.exchange], 69 | routing_key=self.routing_key, 70 | serializer='json', 71 | ) 72 | self.archives = [] 73 | 74 | def tearDown(self): 75 | queues = [ 76 | self.handler.worker_queue, 77 | self.handler.retry_queue, 78 | self.handler.archive_queue, 79 | ] 80 | 81 | for consumer in [self.handler.consumer, self.archive_consumer]: 82 | common.ignore_errors(self.connection, consumer.cancel) 83 | 84 | for queue in queues: 85 | # Carefully delete test queues which must be empty and have no consumers running. 86 | queue.delete( 87 | if_unused=True, 88 | if_empty=True, 89 | ) 90 | 91 | for name, exchange_settings in settings.EXCHANGES.items(): 92 | self.handler.exchanges[name].delete(if_unused=True) 93 | 94 | self.connection.close() 95 | super(BaseRetryHandlerIntegrationTest, self).tearDown() 96 | 97 | def handle_archive(self, body, message): 98 | self.archives.append((body, message)) 99 | message.ack() 100 | 101 | 102 | class BaseConsumerIntegrationTest(unittest.TestCase): 103 | exchange = 'default' # see settings.EXCHANGES 104 | 105 | body = staticmethod(random_body) 106 | 107 | def setUp(self): 108 | super(BaseConsumerIntegrationTest, self).setUp() 109 | 110 | # NOTE: 111 | # must be a real rabbitmq instance, we rely on rabbitmq 112 | # features (dead-letter exchange) for our retry queue logic 113 | self.connection = kombu.Connection( 114 | settings.BROKER_URL, 115 | connect_timeout=1, 116 | ) 117 | self.connection.ensure_connection() 118 | self.connection.connect() 119 | self.channel = self.connection.channel() 120 | 121 | def configure_handlers(self): 122 | """ 123 | Call from inside the test, *after* you have decorated your message handlers 124 | """ 125 | step = AMQPRetryConsumerStep(None) 126 | self.handlers = step.get_handlers(channel=self.channel) 127 | for handler in self.handlers: 128 | handler.declare_queues() 129 | 130 | queues = [ 131 | handler.worker_queue, 132 | handler.retry_queue, 133 | handler.archive_queue, 134 | ] 135 | for queue in queues: 136 | queue.purge() 137 | 138 | handler.consumer.consume() 139 | 140 | def tearDown(self): 141 | for handler in self.handlers: 142 | common.ignore_errors(self.connection, handler.consumer.cancel) 143 | 144 | for handler in self.handlers: 145 | # Carefully delete test queues and exchanges 146 | # We require them to be empty and unbound to be sure all our cleanup 147 | # is being done correctly (i.e. nothing got left behind by mistake) 148 | queues = [ 149 | handler.worker_queue, 150 | handler.retry_queue, 151 | handler.archive_queue, 152 | ] 153 | for queue in queues: 154 | queue.delete( 155 | if_unused=True, 156 | if_empty=True, 157 | ) 158 | 159 | for handler in self.handlers: 160 | for name, exchange_settings in settings.EXCHANGES.items(): 161 | handler.exchanges[name].delete(if_unused=True) 162 | 163 | self.connection.close() 164 | super(BaseConsumerIntegrationTest, self).tearDown() 165 | 166 | def get_producer(self, handler, routing_key=None): 167 | return kombu.Producer( 168 | handler.channel, 169 | exchange=handler.exchanges[handler.exchange], 170 | routing_key=handler.routing_key if routing_key is None else routing_key, 171 | serializer='json', 172 | ) 173 | 174 | def get_handlers_for_key(self, routing_key): 175 | return [ 176 | handler for handler in self.handlers if handler.routing_key == routing_key 177 | ] 178 | -------------------------------------------------------------------------------- /tests/test_retry_handler_django.py: -------------------------------------------------------------------------------- 1 | import socket 2 | 3 | from django.db import transaction 4 | import mock 5 | import pytest 6 | 7 | from test_app.factories import UserFactory 8 | from test_app.models import User 9 | 10 | from .base import BaseRetryHandlerIntegrationTest 11 | 12 | 13 | def cleanup(): 14 | User.objects.all().delete() 15 | 16 | 17 | try: 18 | transaction_context = transaction.atomic 19 | except AttributeError: 20 | transaction_context = transaction.commit_on_success 21 | 22 | 23 | @pytest.mark.django_db(transaction=True) 24 | class DjangoDBTransactionIntegrationTest(BaseRetryHandlerIntegrationTest): 25 | def setUp(self): 26 | self.addCleanup(cleanup) 27 | super(DjangoDBTransactionIntegrationTest, self).setUp() 28 | 29 | def test_wrapped_func_raises_database_error_implicit_transaction(self): 30 | """ 31 | If consumer encounters a DatabaseError it should be able to re-enqueue 32 | the message and continue successfully, without falling foul of: 33 | 'current transaction is aborted, commands ignored until end of transaction block' 34 | """ 35 | user = UserFactory() 36 | 37 | def broken(*args, **kwargs): 38 | return UserFactory(username=user.username) # IntegrityError 39 | 40 | def good(*args, **kwargs): 41 | return UserFactory() 42 | 43 | self.assertEqual(len(User.objects.all()), 1) 44 | 45 | with mock.patch.object(self.handler, 'func') as f: 46 | # Publish and run handler once, with good func, to prove good works 47 | 48 | f.side_effect = good 49 | body = self.body() 50 | 51 | self.producer.publish(body) 52 | self.connection.drain_events(timeout=0.3) 53 | f.assert_called_once_with(body) 54 | self.assertEqual(len(self.archives), 0) 55 | 56 | # no retries: 57 | e1 = None 58 | try: 59 | self.connection.drain_events(timeout=0.3) 60 | except socket.timeout as exc: 61 | e1 = exc 62 | self.assertIsNotNone( 63 | e1, msg="e1=None here means task was unexpectedly retried" 64 | ) 65 | f.call_count = 1 66 | 67 | with mock.patch.object(self.handler, 'func') as f: 68 | # Publish and run handler once, with broken func 69 | # (expecting retry enqueued due to IntegrityError - if we 70 | # don't handle it this leads to hanging transaction context 71 | # causing subsequent failures) 72 | 73 | f.side_effect = broken 74 | body = self.body() 75 | self.assertEqual(len(self.archives), 0) 76 | 77 | self.producer.publish(body) 78 | self.connection.drain_events(timeout=0.3) 79 | f.assert_called_once_with(body) 80 | self.assertEqual(len(self.archives), 0) 81 | 82 | with mock.patch.object(self.handler, 'func') as f: 83 | # attempt to process the retry with good func - should succeed 84 | 85 | f.side_effect = good 86 | 87 | self.connection.drain_events(timeout=0.3) 88 | f.assert_called_once_with(body) # previous body, i.e. retry 89 | self.assertEqual(len(self.archives), 0) 90 | 91 | # no further retries: 92 | e2 = None 93 | try: 94 | self.connection.drain_events(timeout=0.3) 95 | except socket.timeout as exc: 96 | e2 = exc 97 | self.assertIsNotNone( 98 | e2, msg="e2=None here means task was unexpectedly retried" 99 | ) 100 | f.call_count = 1 101 | 102 | self.assertEqual(len(User.objects.all()), 3) 103 | 104 | def test_wrapped_func_raises_database_error_manual_transaction(self): 105 | """ 106 | Ensure that consumer commit/rollback, to avoid problem with Django 107 | implicit transactions, does not break code that manages its own tx 108 | """ 109 | user = UserFactory() 110 | 111 | def broken(*args, **kwargs): 112 | with transaction_context(): 113 | return UserFactory(username=user.username) # IntegrityError 114 | 115 | def good(*args, **kwargs): 116 | with transaction_context(): 117 | return UserFactory() 118 | 119 | self.assertEqual(len(User.objects.all()), 1) 120 | 121 | with mock.patch.object(self.handler, 'func') as f: 122 | # Publish and run handler once, with good func, to prove good works 123 | 124 | f.side_effect = good 125 | body = self.body() 126 | 127 | self.producer.publish(body) 128 | self.connection.drain_events(timeout=0.3) 129 | f.assert_called_once_with(body) 130 | self.assertEqual(len(self.archives), 0) 131 | 132 | # no retries: 133 | e1 = None 134 | try: 135 | self.connection.drain_events(timeout=0.3) 136 | except socket.timeout as exc: 137 | e1 = exc 138 | self.assertIsNotNone( 139 | e1, msg="e1=None here means task was unexpectedly retried" 140 | ) 141 | f.call_count = 1 142 | 143 | with mock.patch.object(self.handler, 'func') as f: 144 | # Publish and run handler once, with broken func 145 | # (expecting retry enqueued due to IntegrityError - if we 146 | # don't handle it this leads to hanging transaction context 147 | # causing subsequent failures) 148 | 149 | f.side_effect = broken 150 | body = self.body() 151 | self.assertEqual(len(self.archives), 0) 152 | 153 | self.producer.publish(body) 154 | self.connection.drain_events(timeout=0.3) 155 | f.assert_called_once_with(body) 156 | self.assertEqual(len(self.archives), 0) 157 | 158 | with mock.patch.object(self.handler, 'func') as f: 159 | # attempt to process the retry with good func - should succeed 160 | 161 | f.side_effect = good 162 | 163 | self.connection.drain_events(timeout=0.3) 164 | f.assert_called_once_with(body) # previous body, i.e. retry 165 | self.assertEqual(len(self.archives), 0) 166 | 167 | # no further retries: 168 | e2 = None 169 | try: 170 | self.connection.drain_events(timeout=0.3) 171 | except socket.timeout as exc: 172 | e2 = exc 173 | self.assertIsNotNone( 174 | e2, msg="e2=None here means task was unexpectedly retried" 175 | ) 176 | f.call_count = 1 177 | 178 | self.assertEqual(len(User.objects.all()), 3) 179 | -------------------------------------------------------------------------------- /tests/test_consume_handler.py: -------------------------------------------------------------------------------- 1 | import socket 2 | 3 | from flaky import flaky 4 | import mock 5 | 6 | from event_consumer import message_handler 7 | from event_consumer import handlers as ec 8 | 9 | from .base import BaseConsumerIntegrationTest 10 | from unittest.mock import patch 11 | 12 | 13 | class ConsumeMessageHandlerTest(BaseConsumerIntegrationTest): 14 | @flaky(max_runs=5, min_passes=5) 15 | def test_consume_basic(self): 16 | """ 17 | Should run the wrapped function when a message arrives with its routing key. 18 | """ 19 | with mock.patch.object(ec, 'REGISTRY', new=dict()) as reg: 20 | f1 = message_handler('my.routing.key1')( 21 | mock.MagicMock(__name__='mock_handler1') 22 | ) 23 | f2 = message_handler('my.routing.key2')( 24 | mock.MagicMock(__name__='mock_handler2') 25 | ) 26 | 27 | assert len(reg) == 2 28 | 29 | self.configure_handlers() 30 | 31 | assert len(self.handlers) == len(reg) 32 | 33 | h1 = self.get_handlers_for_key('my.routing.key1')[0] 34 | h2 = self.get_handlers_for_key('my.routing.key2')[0] 35 | 36 | p1 = self.get_producer(h1) 37 | p2 = self.get_producer(h2) 38 | body1 = self.body() 39 | body2 = self.body() 40 | 41 | p1.publish(body1) 42 | p2.publish(body2) 43 | for _ in range(2): 44 | self.connection.drain_events(timeout=0.3) 45 | 46 | f1.assert_called_once_with(body1) 47 | f2.assert_called_once_with(body2) 48 | 49 | # no retries: 50 | e = None 51 | try: 52 | self.connection.drain_events(timeout=0.3) 53 | except socket.timeout as exc: 54 | e = exc 55 | self.assertIsNotNone( 56 | e, msg="e=None here means task was unexpectedly retried" 57 | ) 58 | # no further calls 59 | f1.call_count = 1 60 | f2.call_count = 1 61 | 62 | @flaky(max_runs=5, min_passes=5) 63 | def test_consume_custom_queue_name(self): 64 | """ 65 | Should run the wrapped function when a message arrives with its routing key. 66 | Test that we can connect multiple routing keys on the same queue and the 67 | appropriate handler will be called in each case. 68 | """ 69 | 70 | with ( 71 | mock.patch.object(ec, 'REGISTRY', new=dict()) as reg, 72 | patch( 73 | 'event_consumer.handlers.settings.EXCHANGES', 74 | { 75 | 'custom': { 76 | 'name': 'custom', 77 | 'type': 'topic', 78 | } 79 | }, 80 | ), 81 | ): 82 | # we have to use a named exchange to be able to bind a custom queue name 83 | f1 = message_handler( 84 | 'my.routing.key1', queue='custom_queue', exchange='custom' 85 | )(mock.MagicMock(__name__='mock_handler1')) 86 | 87 | assert len(reg) == 1 88 | 89 | self.configure_handlers() 90 | 91 | assert len(self.handlers) == len(reg) 92 | 93 | h1 = self.get_handlers_for_key('my.routing.key1')[0] 94 | 95 | p1 = self.get_producer(h1) 96 | body1 = self.body() 97 | 98 | p1.publish(body1) 99 | self.connection.drain_events(timeout=0.3) 100 | 101 | f1.assert_called_once_with(body1) 102 | 103 | # no retries: 104 | e = None 105 | try: 106 | self.connection.drain_events(timeout=0.3) 107 | except socket.timeout as exc: 108 | e = exc 109 | self.assertIsNotNone( 110 | e, msg="e=None here means task was unexpectedly retried" 111 | ) 112 | # no further calls 113 | f1.call_count = 1 114 | 115 | @flaky(max_runs=5, min_passes=5) 116 | def test_consume_wildcard_route(self): 117 | """ 118 | Should run the wrapped function when a message arrives with its routing key. 119 | Test that we can connect multiple routing keys on the same queue and the 120 | appropriate handler will be called in each case. 121 | """ 122 | with mock.patch.object(ec, 'REGISTRY', new=dict()) as reg, patch( 123 | 'event_consumer.handlers.settings.EXCHANGES', 124 | { 125 | 'custom': { 126 | 'name': 'custom', 127 | 'type': 'topic', 128 | } 129 | }, 130 | ): 131 | f1 = message_handler('my.routing.*', exchange='custom')( 132 | mock.MagicMock(__name__='mock_handler1') 133 | ) 134 | 135 | assert len(reg) == 1 136 | 137 | self.configure_handlers() 138 | 139 | assert len(self.handlers) == len(reg) 140 | 141 | h1 = self.get_handlers_for_key('my.routing.*')[0] 142 | 143 | p1 = self.get_producer(h1, 'my.routing.key1') 144 | p2 = self.get_producer(h1, 'my.routing.key2') 145 | body1 = self.body() 146 | body2 = self.body() 147 | 148 | p1.publish(body1) 149 | p2.publish(body2) 150 | for _ in range(2): 151 | self.connection.drain_events(timeout=0.3) 152 | 153 | f1.assert_has_calls([mock.call(body1), mock.call(body2)], any_order=True) 154 | 155 | # no retries: 156 | e = None 157 | try: 158 | self.connection.drain_events(timeout=0.3) 159 | except socket.timeout as exc: 160 | e = exc 161 | self.assertIsNotNone( 162 | e, msg="e=None here means task was unexpectedly retried" 163 | ) 164 | # no further calls 165 | f1.call_count = 2 166 | 167 | @flaky(max_runs=5, min_passes=5) 168 | def test_consume_multiple_routes(self): 169 | """ 170 | Should run the wrapped function when a message arrives with its routing key. 171 | Test that we can connect multiple routing keys on the same queue and the 172 | appropriate handler will be called in each case. 173 | """ 174 | with mock.patch.object(ec, 'REGISTRY', new=dict()) as reg, patch( 175 | 'event_consumer.handlers.settings.EXCHANGES', 176 | { 177 | 'custom': { 178 | 'name': 'custom', 179 | 'type': 'topic', 180 | } 181 | }, 182 | ): 183 | decorator = message_handler( 184 | ['my.routing.key1', 'my.routing.key2'], 185 | exchange='custom', 186 | ) 187 | f1 = decorator(mock.MagicMock(__name__='mock_handler1')) 188 | 189 | assert len(reg) == 2 190 | 191 | self.configure_handlers() 192 | 193 | assert len(self.handlers) == len(reg) 194 | 195 | h1 = self.get_handlers_for_key('my.routing.key1')[0] 196 | h2 = self.get_handlers_for_key('my.routing.key2')[0] 197 | 198 | p1 = self.get_producer(h1) 199 | p2 = self.get_producer(h2) 200 | body1 = self.body() 201 | body2 = self.body() 202 | 203 | p1.publish(body1) 204 | p2.publish(body2) 205 | for _ in range(2): 206 | self.connection.drain_events(timeout=0.3) 207 | 208 | f1.assert_has_calls([mock.call(body1), mock.call(body2)], any_order=True) 209 | 210 | # no retries: 211 | e = None 212 | try: 213 | self.connection.drain_events(timeout=0.3) 214 | except socket.timeout as exc: 215 | e = exc 216 | self.assertIsNotNone( 217 | e, msg="e=None here means task was unexpectedly retried" 218 | ) 219 | # no further calls 220 | f1.call_count = 2 221 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | celery-message-consumer 2 | ======================= 3 | 4 | |PyPI Version| 5 | 6 | .. |PyPI Version| image:: http://img.shields.io/pypi/v/celery-message-consumer.svg?style=flat 7 | :target: https://pypi.python.org/pypi/celery-message-consumer/ 8 | :alt: Latest PyPI version 9 | 10 | Tool for using the ``bin/celery`` worker to consume vanilla AMQP 11 | messages (i.e. not Celery tasks) 12 | 13 | While `writing a simple consumer 14 | script `__ 15 | using Kombu can be quite easy, the Celery worker provides many features 16 | around process pools, queue/routing connections etc as well as being 17 | known to run reliably over long term. 18 | 19 | It seems safer to re-use this battle-tested consumer than try to write 20 | our own and have to learn from scratch all the ways that such a thing 21 | can fail. 22 | 23 | Usage 24 | ----- 25 | 26 | .. code:: bash 27 | 28 | pip install celery-message-consumer 29 | 30 | 31 | Handlers 32 | ~~~~~~~~ 33 | 34 | In your code, you can define a message handler by decorating a python 35 | function, in much the same way as you would a Celery task: 36 | 37 | .. code:: python 38 | 39 | from event_consumer import message_handler 40 | 41 | @message_handler('my.routing.key') 42 | def process_message(body): 43 | # `body` has been deserialized for us by the Celery worker 44 | print(body) 45 | 46 | @message_handler(['my.routing.key1', 'my.routing.key2']) 47 | def process_messages(body): 48 | # you can register handler for multiple routing keys 49 | 50 | @message_handler('my.routing.*') 51 | def process_all_messages(body): 52 | # or wildcard routing keys, if using a 'topic' exchange 53 | 54 | Like a Celery task, the module it is defined in must actually get 55 | imported at some point for the handler to be registered. 56 | 57 | A queue (in fact, three queues - see below) will be created to receive 58 | messages matching the routing key. 59 | 60 | Celery 61 | ~~~~~~ 62 | 63 | Elsewhere in your code you will need to instantiate a Celery app and 64 | apply our custom 'ConsumerStep' which hooks our message handlers into 65 | the worker. If you are already using Celery *as Celery* in your project 66 | then you probably want separate Celery apps for tasks and for the 67 | message consumer. 68 | 69 | .. code:: python 70 | 71 | from celery import Celery 72 | from event_consumer.handlers import AMQPRetryConsumerStep 73 | 74 | main_app = Celery() 75 | 76 | consumer_app = Celery() 77 | consumer_app.steps['consumer'].add(AMQPRetryConsumerStep) 78 | 79 | You likely will want separate config for each app. See 80 | `Celery docs `__. 81 | 82 | In the config for your message consumer app, add the modules containing 83 | your decorated message handler functions to ``CELERY_IMPORTS``, exactly 84 | as you would for Celery tasks - this ensures they get imported and 85 | registered when the worker starts up. 86 | 87 | Then from the command-line, run the Celery worker just like you usually 88 | would, attaching to the consumer app: 89 | 90 | .. code:: bash 91 | 92 | bin/celery worker -A myproject.mymodule:consumer_app 93 | 94 | Configuration 95 | ~~~~~~~~~~~~~ 96 | 97 | Settings are intended to be configured primarily via a python file, such 98 | as your existing Django ``settings.py`` or Celery ``celeryconfig.py``. 99 | To bootstrap this, there are a couple of env vars to control how config 100 | is loaded: 101 | 102 | - ``EVENT_CONSUMER_APP_CONFIG`` 103 | should be an import path to a python module, for example: 104 | ``EVENT_CONSUMER_APP_CONFIG=django.conf.settings`` 105 | - ``EVENT_CONSUMER_CONFIG_NAMESPACE`` 106 | Sets the prefix used for loading further config values from env and 107 | config file. Defaults to ``EVENT_CONSUMER``. 108 | 109 | See source of ``event_consumer/conf/`` for more details. 110 | 111 | Some useful config keys (all of which are prefixed with 112 | ``EVENT_CONSUMER_`` by default): 113 | 114 | - ``SERIALIZER`` this is the name of a Celery serializer name, e.g. 115 | ``'json'``. The consumer will only accept messages serialized in this 116 | format. 117 | - ``QUEUE_NAME_PREFIX`` if using default queue name (routing-key) then 118 | this prefix will be added to the queue name. If you supply a custom 119 | queue name in the handler decorator the prefix will not be applied. 120 | - ``MAX_RETRIES`` defaults to ``4`` (i.e. 1 attempt + 4 retries = 5 121 | strikes) 122 | - ``BACKOFF_FUNC`` takes a function ``(int) -> float`` which returns 123 | the retry delay (in seconds) based on current retry counter for the 124 | message. 125 | - ``ARCHIVE_EXPIRY`` time in milliseconds to keep messages in the 126 | "archive" queue, after which the exchange will delete them. Defaults 127 | to 24 days. 128 | - ``USE_DJANGO`` set to ``True`` if your message handler uses the 129 | Django db connection, so that the worker is able to cope with the 130 | dreaded *"current transaction is aborted"* error and continue. 131 | - ``EXCHANGES`` if you need your message handlers to connect their 132 | queues to specific exchanges then you can provide a dict like: 133 | 134 | .. code:: python 135 | 136 | EXCHANGES = { 137 | # a reference name for this config, used when attaching handlers 138 | 'default': { 139 | 'name': 'data', # actual name of exchange in RabbitMQ 140 | 'type': 'topic', # an AMQP exchange type 141 | }, 142 | 'other': { 143 | ... 144 | }, 145 | ... 146 | } 147 | 148 | The ``'default'`` config will be used... by default. You can attach 149 | handler to a specific exchange when decorating: 150 | 151 | .. code:: python 152 | 153 | @message_handler('my.routing.key', exchange='other') 154 | def process_message(body): 155 | pass 156 | 157 | Queue layout 158 | ------------ 159 | 160 | While all of the broker, exchange and queue naming is configurable (see 161 | source code) this project implements a *very specific queue pattern*. 162 | 163 | Briefly: for each routing key it listens to, the consumer sets up 164 | *three* queues and a 'dead-letter exchange' (DLX). 165 | 166 | #. The "main" message queue 167 | #. If any unhandled exceptions occur, and we have retried less than 168 | ``settings.MAX_RETRIES``, the message will be put on the "retry" 169 | queue with a TTL. After the TTL expires, the DLX will put the message 170 | back on the main queue. 171 | #. If all retries are exhausted (or ``PermanentFailure`` is raised) then 172 | the consumer will put the message on the "archive" queue. This gives 173 | opportunity for someone to manually retry the archived messages, 174 | perhaps after a code fix has been deployed. 175 | 176 | | You will of course note that this is *totally different and separate* 177 | from Celery's own ``task.retry`` mechanism. 178 | | **Pros:** matches pattern we were already using for non-Celery, 179 | non-Python apps, "archive" queue provides an extra safety net. 180 | | **Cons:** Relies on RabbitMQ-specific feature, more queues (more 181 | complicated). 182 | 183 | Compatibility 184 | ------------- 185 | 186 | **Only** RabbitMQ transport is supported. 187 | 188 | We depend on Celery and Kombu. Their versioning seems to be loosely in 189 | step so that Celery 3.x goes with Kombu 3.x and Celery 4.x goes with 190 | Kombu 4.x. We test against both v3 and v4. 191 | 192 | Django is not required, but when used we have some extra integration 193 | which is needed if your event handlers use the Django db connection. 194 | This must be enabled if required via the ``settings.USE_DJANGO`` flag. 195 | 196 | 197 | Running the tests 198 | ----------------- 199 | 200 | 201 | py.test (single combination of dependency versions) 202 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 203 | 204 | It's also possible to run the tests locally, allowing for debugging of 205 | errors that occur. 206 | 207 | We rely on some RabbitMQ features for our retry queues so we need a 208 | rabbit instance to test against. A ``docker-compose.yml`` file is 209 | provided. 210 | 211 | .. code:: bash 212 | 213 | docker-compose up -d 214 | export BROKER_HOST=0.0.0.0 215 | 216 | (adjust the last line to suit your local Docker installation) 217 | 218 | The ``rabbitmqadmin`` web UI is available to aid in debugging queue issues: 219 | 220 | .. code:: bash 221 | 222 | http://{BROKER_HOST}:15672/ 223 | 224 | 225 | You will need to create a virtualenv then 226 | you can install everything via: 227 | 228 | .. code:: bash 229 | pyenv virtualenv 3.11.1 celery-message-consumer 230 | pip install -r requirements-test.txt 231 | 232 | Set an env to point to the target Django version's settings in the test 233 | app (for Django-dependent tests) and for general app settings: 234 | 235 | .. code:: bash 236 | 237 | export DJANGO_SETTINGS_MODULE=test_app.dj111.settings 238 | export EVENT_CONSUMER_APP_CONFIG=test_app.settings 239 | 240 | Now we can run the tests: 241 | 242 | .. code:: bash 243 | 244 | PYTHONPATH=. py.test -v -s --pdb tests/ 245 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "{}" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright {yyyy} {name of copyright owner} 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /tests/test_consumer_step.py: -------------------------------------------------------------------------------- 1 | import mock 2 | import pytest 3 | 4 | from event_consumer import message_handler 5 | from event_consumer import handlers as ec 6 | from event_consumer.errors import InvalidQueueRegistration 7 | from event_consumer.types import QueueKey 8 | from unittest.mock import patch 9 | 10 | 11 | def test_get_handlers_with_defaults(): 12 | """ 13 | Should build handlers from tasks decorated with `@message_handler` 14 | and use defaults for routing key and exchange if none provided 15 | """ 16 | with mock.patch.object(ec, 'REGISTRY', new=dict()) as reg: 17 | 18 | @message_handler('my.routing.key1') 19 | def f1(body): 20 | return None 21 | 22 | @message_handler('my.routing.key2') 23 | def f2(body): 24 | return None 25 | 26 | assert len(reg) == 2 27 | 28 | handler_reg1 = reg[QueueKey(queue='my.routing.key1', exchange='default')] 29 | assert handler_reg1.handler is f1 30 | assert handler_reg1.routing_key == 'my.routing.key1' 31 | assert handler_reg1.queue_arguments == {} 32 | 33 | handler_reg2 = reg[QueueKey(queue='my.routing.key2', exchange='default')] 34 | assert handler_reg2.handler is f2 35 | assert handler_reg2.routing_key == 'my.routing.key2' 36 | assert handler_reg2.queue_arguments == {} 37 | 38 | step = ec.AMQPRetryConsumerStep(None) 39 | handlers = step.get_handlers(channel=mock.MagicMock()) 40 | 41 | assert len(handlers) == len(reg) 42 | 43 | for handler in handlers: 44 | assert isinstance(handler, ec.AMQPRetryHandler) 45 | assert len(handler.consumer.queues) == 1 46 | assert len(handler.consumer.callbacks) == 1 47 | assert isinstance(handler.consumer.callbacks[0], ec.AMQPRetryHandler) 48 | key = QueueKey(queue=handler.queue, exchange=handler.exchange) 49 | assert handler.consumer.callbacks[0].func is reg[key].handler 50 | 51 | 52 | def test_get_handlers_queue_prefix(*mocks): 53 | """ 54 | Should build handlers from tasks decorated with `@message_handler` 55 | and use defaults for routing key and exchange if none provided 56 | """ 57 | with mock.patch.object(ec, 'REGISTRY', new=dict()) as reg: 58 | # named exchange is required if using QUEUE_NAME_PREFIX 59 | with ( 60 | pytest.raises(InvalidQueueRegistration), 61 | patch('event_consumer.handlers.settings.QUEUE_NAME_PREFIX', 'myapp:'), 62 | patch( 63 | 'event_consumer.handlers.settings.EXCHANGES', 64 | { 65 | 'custom': { 66 | 'name': 'custom', 67 | 'type': 'topic', 68 | } 69 | }, 70 | ), 71 | ): 72 | 73 | @message_handler('my.routing.key1') 74 | def bad(body): 75 | return None 76 | 77 | @message_handler('my.routing.key1', exchange='custom') 78 | def f1(body): 79 | return None 80 | 81 | @message_handler('my.routing.key2', exchange='custom') 82 | def f2(body): 83 | return None 84 | 85 | assert len(reg) == 2 86 | 87 | handler_reg1 = reg[ 88 | QueueKey(queue='myapp:my.routing.key1', exchange='custom') 89 | ] 90 | assert handler_reg1.handler is f1 91 | assert handler_reg1.routing_key == 'my.routing.key1' 92 | assert handler_reg1.queue_arguments == {} 93 | 94 | handler_reg2 = reg[ 95 | QueueKey(queue='myapp:my.routing.key2', exchange='custom') 96 | ] 97 | assert handler_reg2.handler is f2 98 | assert handler_reg2.routing_key == 'my.routing.key2' 99 | assert handler_reg2.queue_arguments == {} 100 | 101 | step = ec.AMQPRetryConsumerStep(None) 102 | handlers = step.get_handlers(channel=mock.MagicMock()) 103 | 104 | assert len(handlers) == len(reg) 105 | 106 | for handler in handlers: 107 | assert isinstance(handler, ec.AMQPRetryHandler) 108 | assert len(handler.consumer.queues) == 1 109 | assert len(handler.consumer.callbacks) == 1 110 | assert isinstance(handler.consumer.callbacks[0], ec.AMQPRetryHandler) 111 | key = QueueKey(queue=handler.queue, exchange=handler.exchange) 112 | assert handler.consumer.callbacks[0].func is reg[key].handler 113 | 114 | 115 | def test_get_handlers_with_queue_and_exchange(*mocks): 116 | """ 117 | Should build handlers from tasks decorated with `@message_handler` 118 | using the specified routing key, queue and exchange 119 | """ 120 | with mock.patch.object(ec, 'REGISTRY', new=dict()) as reg, patch( 121 | 'event_consumer.handlers.settings.EXCHANGES', 122 | {'my.exchange1': {}, 'my.exchange2': {}}, 123 | ): 124 | # named exchange is required if using custom queue name 125 | with pytest.raises(InvalidQueueRegistration): 126 | 127 | @message_handler('my.routing.key1', 'my.queue1') 128 | def bad(body): 129 | return None 130 | 131 | @message_handler('my.routing.key1', 'my.queue1', 'my.exchange1') 132 | def f1(body): 133 | return None 134 | 135 | @message_handler('my.routing.key2', 'my.queue2', 'my.exchange1') 136 | def f2(body): 137 | return None 138 | 139 | # can register same queue name on different exchange 140 | @message_handler('my.routing.key2', 'my.queue2', 'my.exchange2') 141 | def f3(body): 142 | return None 143 | 144 | assert len(reg) == 3 145 | 146 | handler_reg1 = reg[QueueKey(queue='my.queue1', exchange='my.exchange1')] 147 | assert handler_reg1.handler is f1 148 | assert handler_reg1.routing_key == 'my.routing.key1' 149 | assert handler_reg1.queue_arguments == {} 150 | 151 | handler_reg2 = reg[QueueKey(queue='my.queue2', exchange='my.exchange1')] 152 | assert handler_reg2.handler is f2 153 | assert handler_reg2.routing_key == 'my.routing.key2' 154 | assert handler_reg2.queue_arguments == {} 155 | 156 | handler_reg3 = reg[QueueKey(queue='my.queue2', exchange='my.exchange2')] 157 | assert handler_reg3.handler is f3 158 | assert handler_reg3.routing_key == 'my.routing.key2' 159 | assert handler_reg3.queue_arguments == {} 160 | 161 | step = ec.AMQPRetryConsumerStep(None) 162 | handlers = step.get_handlers(channel=mock.MagicMock()) 163 | 164 | assert len(handlers) == len(reg) 165 | 166 | for handler in handlers: 167 | assert isinstance(handler, ec.AMQPRetryHandler) 168 | assert len(handler.consumer.queues) == 1 169 | assert len(handler.consumer.callbacks) == 1 170 | assert isinstance(handler.consumer.callbacks[0], ec.AMQPRetryHandler) 171 | key = QueueKey(queue=handler.queue, exchange=handler.exchange) 172 | assert handler.consumer.callbacks[0].func is reg[key].handler 173 | 174 | 175 | def test_get_handlers_with_queue_arguments(): 176 | """ 177 | Should build handlers from tasks decorated with `@message_handler` 178 | and pass the `queue_arguments` through to `kombu.Queue` constructor. 179 | """ 180 | with mock.patch.object(ec, 'REGISTRY', new=dict()) as reg: 181 | 182 | @message_handler('my.routing.key1', queue_arguments={'x-fake-header': 'wtf'}) 183 | def f1(body): 184 | return None 185 | 186 | handler_reg1 = reg[QueueKey(queue='my.routing.key1', exchange='default')] 187 | assert handler_reg1.handler is f1 188 | assert handler_reg1.routing_key == 'my.routing.key1' 189 | assert handler_reg1.queue_arguments == {'x-fake-header': 'wtf'} 190 | 191 | with mock.patch('kombu.Queue'): 192 | step = ec.AMQPRetryConsumerStep(None) 193 | handlers = step.get_handlers(channel=mock.MagicMock()) 194 | 195 | assert len(reg) == 1 196 | assert len(handlers) == len(reg) 197 | 198 | handler = handlers[0] 199 | assert isinstance(handler, ec.AMQPRetryHandler) 200 | assert isinstance(handler.worker_queue, mock.MagicMock) 201 | handler.worker_queue.queue_arguments = {'x-fake-header': 'wtf'} 202 | 203 | 204 | def test_get_handlers_no_exchange(): 205 | """ 206 | If an exchange is specified in the `message_handler` decorator it 207 | must have been configured in the settings. 208 | """ 209 | with mock.patch.object(ec, 'REGISTRY', new=dict()): 210 | 211 | @message_handler('my.routing.key1', exchange='nonexistent') 212 | def f1(body): 213 | return None 214 | 215 | with pytest.raises(ec.NoExchange): 216 | step = ec.AMQPRetryConsumerStep(None) 217 | step.get_handlers(channel=mock.MagicMock()) 218 | 219 | 220 | def test_get_handlers_same_queue_name_and_exchange(): 221 | """ 222 | Attempt to attach handler with same queue name + exchange should fail. 223 | """ 224 | with mock.patch.object(ec, 'REGISTRY', new=dict()) as reg, patch( 225 | 'event_consumer.handlers.settings.EXCHANGES', 226 | { 227 | 'custom': { 228 | 'name': 'custom', 229 | 'type': 'topic', 230 | } 231 | }, 232 | ): 233 | 234 | @message_handler('my.routing.key1', queue='custom_queue', exchange='custom') 235 | def f1(body): 236 | return None 237 | 238 | with pytest.raises(InvalidQueueRegistration): 239 | 240 | @message_handler('my.routing.key2', queue='custom_queue', exchange='custom') 241 | def f2(body): 242 | return None 243 | 244 | assert len(reg) == 1 245 | 246 | handler_reg1 = reg[QueueKey(queue='custom_queue', exchange='custom')] 247 | assert handler_reg1.handler is f1 248 | assert handler_reg1.routing_key == 'my.routing.key1' 249 | assert handler_reg1.queue_arguments == {} 250 | 251 | step = ec.AMQPRetryConsumerStep(None) 252 | handlers = step.get_handlers(channel=mock.MagicMock()) 253 | 254 | assert len(handlers) == len(reg) 255 | 256 | for handler in handlers: 257 | assert isinstance(handler, ec.AMQPRetryHandler) 258 | assert len(handler.consumer.queues) == 1 259 | assert len(handler.consumer.callbacks) == 1 260 | assert isinstance(handler.consumer.callbacks[0], ec.AMQPRetryHandler) 261 | key = QueueKey(queue=handler.queue, exchange=handler.exchange) 262 | assert handler.consumer.callbacks[0].func is reg[key].handler 263 | 264 | 265 | def test_get_handlers_with_multiple_routes(*mocks): 266 | """ 267 | Can connect the handler to multiple routing keys, each having a queue. 268 | """ 269 | with mock.patch.object(ec, 'REGISTRY', new=dict()) as reg: 270 | # custom queue name is not possible with multiple routes, even with named exchange 271 | with pytest.raises(InvalidQueueRegistration), patch( 272 | 'event_consumer.handlers.settings.EXCHANGES', 273 | {'my.exchange1': {}, 'my.exchange2': {}}, 274 | ): 275 | 276 | @message_handler( 277 | ['my.routing.key1', 'my.routing.key2'], 'my.queue1', 'my.exchange1' 278 | ) 279 | def bad(body): 280 | return None 281 | 282 | @message_handler(['my.routing.key1', 'my.routing.key2']) 283 | def f1(body): 284 | return None 285 | 286 | assert len(reg) == 2 287 | 288 | handler_reg1 = reg[QueueKey(queue='my.routing.key1', exchange='default')] 289 | assert handler_reg1.handler is f1 290 | assert handler_reg1.routing_key == 'my.routing.key1' 291 | assert handler_reg1.queue_arguments == {} 292 | 293 | handler_reg2 = reg[QueueKey(queue='my.routing.key2', exchange='default')] 294 | assert handler_reg2.handler is f1 295 | assert handler_reg2.routing_key == 'my.routing.key2' 296 | assert handler_reg2.queue_arguments == {} 297 | 298 | step = ec.AMQPRetryConsumerStep(None) 299 | handlers = step.get_handlers(channel=mock.MagicMock()) 300 | 301 | assert len(handlers) == len(reg) 302 | 303 | for handler in handlers: 304 | assert isinstance(handler, ec.AMQPRetryHandler) 305 | assert len(handler.consumer.queues) == 1 306 | assert len(handler.consumer.callbacks) == 1 307 | assert isinstance(handler.consumer.callbacks[0], ec.AMQPRetryHandler) 308 | key = QueueKey(queue=handler.queue, exchange=handler.exchange) 309 | assert handler.consumer.callbacks[0].func is reg[key].handler 310 | -------------------------------------------------------------------------------- /event_consumer/handlers.py: -------------------------------------------------------------------------------- 1 | """ 2 | Apparatus for consuming 'vanilla' AMQP messages (i.e. not Celery tasks) making use 3 | of the battle-tested `bin/celery worker` utility rather than writing our own. 4 | 5 | NOTE: 6 | We don't access any Celery config in this file. That is because the config is 7 | loaded by the bin/celery worker itself, according to usual mechanism. 8 | """ 9 | import logging 10 | import traceback 11 | 12 | import six 13 | from typing import Any, Callable, Dict, Iterable, List, Optional, Union # noqa 14 | 15 | import amqp # noqa 16 | import celery.bootsteps as bootsteps 17 | import kombu 18 | import kombu.message 19 | import kombu.common as common 20 | 21 | from event_consumer.conf import settings 22 | from event_consumer.errors import InvalidQueueRegistration, NoExchange, PermanentFailure 23 | from event_consumer.types import HandlerRegistration, QueueKey 24 | 25 | if settings.USE_DJANGO: 26 | from django.core.signals import request_finished 27 | 28 | 29 | _logger = logging.getLogger(__name__) 30 | 31 | 32 | # Maps routing-keys to handlers 33 | REGISTRY = {} # type: Dict[QueueKey, HandlerRegistration] 34 | 35 | DEFAULT_EXCHANGE = 'default' 36 | 37 | 38 | def _validate_registration(register_key): # type: (QueueKey) -> None 39 | """ 40 | Raises: 41 | InvalidQueueRegistration 42 | """ 43 | global REGISTRY 44 | if register_key in REGISTRY: 45 | raise InvalidQueueRegistration( 46 | 'Attempted duplicate registrations for messages with the queue name ' 47 | '"{queue}" and exchange "{exchange}"'.format( 48 | queue=register_key.queue, 49 | exchange=register_key.exchange, 50 | ) 51 | ) 52 | 53 | 54 | def message_handler( 55 | routing_keys, # type: Union[str, Iterable] 56 | queue=None, # type: Optional[str] 57 | exchange=DEFAULT_EXCHANGE, # type: str 58 | queue_arguments=None, # Optional[Dict[str, object]] 59 | ): 60 | # type: (...) -> Callable[[Callable], Any] 61 | """ 62 | Register a function as a handler for messages on a rabbitmq exchange with 63 | the given routing-key. Default behaviour is to use `routing_key` as the 64 | queue name and attach it to the 'default' exchange. If this key is not 65 | present in `settings.EXCHANGES` with your own config then you will get the 66 | underlying AMQP default exchange - this has some restrictions (you cannot 67 | bind custom queue names, only auto-bound same-as-routing-key queues are 68 | possible).Kwargs 69 | 70 | Otherwise Queues and Exchanges are automatically created on the broker 71 | by Kombu and you don't have to worry about it. 72 | 73 | Kwargs: 74 | routing_keys: The routing key/s of messages to be handled by the 75 | decorated task. 76 | queue: The name of the main queue from which messages 77 | will be consumed. Defaults to '{QUEUE_NAME_PREFIX}{routing_key}` 78 | if not supplied. Special case is '' this will give you Kombu 79 | default queue name without prepending `QUEUE_NAME_PREFIX`. 80 | exchange: The AMQP exchange config to use. This is a *key name* 81 | in the `settings.EXCHANGES` dict. 82 | queue_arguments: Arbitrary arguments to be passed to the *primary* queue 83 | at creation time. 84 | 85 | Returns: 86 | Callable: function decorator 87 | 88 | Raises: 89 | InvalidQueueRegistration 90 | 91 | Usage: 92 | @message_handler('my.routing.key', 'my.queue', 'my.exchange') 93 | def process_message(body): 94 | print(body) # Whatever 95 | 96 | Note that this is an import side-effect (as is Celery's @task decorator). 97 | In order for the event handler to be registered, its containing module must 98 | be imported before starting the AMQPRetryConsumerStep. 99 | """ 100 | if ( 101 | queue or (queue is None and settings.QUEUE_NAME_PREFIX) 102 | ) and exchange not in settings.EXCHANGES: 103 | raise InvalidQueueRegistration( 104 | "You must use a named exchange from settings.EXCHANGES " 105 | "if you want to bind a custom queue name." 106 | ) 107 | 108 | if isinstance(routing_keys, six.string_types): 109 | routing_keys = [routing_keys] 110 | else: 111 | if queue is not None: 112 | raise InvalidQueueRegistration( 113 | "We need a queue-per-routing-key so you can't specify a " 114 | "custom queue name when attaching mutiple routes. Use " 115 | "separate handlers for each routing key in this case." 116 | ) 117 | 118 | queue_arguments = queue_arguments or {} 119 | 120 | def decorator(f): # type: (Callable) -> Callable 121 | global REGISTRY 122 | 123 | for routing_key in routing_keys: 124 | queue_name = ( 125 | (settings.QUEUE_NAME_PREFIX + routing_key) if queue is None else queue 126 | ) 127 | 128 | # kombu.Consumer has no concept of routing-key (only queue name) so 129 | # so handler registrations must be unique on queue+exchange (otherwise 130 | # messages from the queue would be randomly sent to the duplicate handlers) 131 | register_key = QueueKey(queue=queue_name, exchange=exchange) 132 | _validate_registration(register_key) 133 | 134 | handler_registration = HandlerRegistration( 135 | routing_key=routing_key, 136 | queue_arguments=queue_arguments, 137 | handler=f, 138 | ) 139 | REGISTRY[register_key] = handler_registration 140 | 141 | _logger.debug( 142 | 'registered: %s to handler: %s.%s', 143 | register_key, 144 | f.__module__, 145 | f.__name__, 146 | ) 147 | 148 | return f 149 | 150 | return decorator 151 | 152 | 153 | class AMQPRetryConsumerStep(bootsteps.StartStopStep): 154 | """ 155 | An integration hook with Celery which is adapted from the built in class 156 | `bootsteps.ConsumerStep`. Instead of registering a `kombu.Consumer` on 157 | startup, we create instances of `AMQPRetryHandler` passing in a channel 158 | which is used to create all the queues/exchanges/etc. needed to 159 | implement our try-retry-archive scheme. 160 | 161 | See http://docs.celeryproject.org/en/latest/userguide/extending.html 162 | """ 163 | 164 | requires = ('celery.worker.consumer:Connection',) 165 | 166 | def __init__(self, *args, **kwargs): 167 | self.handlers = [] # type: List[AMQPRetryHandler] 168 | self._tasks = kwargs.pop( 169 | 'tasks', REGISTRY 170 | ) # type: Dict[QueueKey, HandlerRegistration] 171 | super(AMQPRetryConsumerStep, self).__init__(*args, **kwargs) 172 | 173 | def start(self, c): 174 | channel = c.connection.channel() 175 | self.handlers = self.get_handlers(channel) 176 | 177 | for handler in self.handlers: 178 | handler.declare_queues() 179 | handler.consumer.consume() 180 | _logger.debug('AMQPRetryConsumerStep: Started handler: %s', handler) 181 | 182 | def stop(self, c): 183 | self._close(c, True) 184 | 185 | def shutdown(self, c): 186 | self._close(c, False) 187 | 188 | def _close(self, c, cancel_consumers=True): 189 | channels = set() 190 | for handler in self.handlers: 191 | if cancel_consumers: 192 | common.ignore_errors(c.connection, handler.consumer.cancel) 193 | if handler.consumer.channel: 194 | channels.add(handler.consumer.channel) 195 | for channel in channels: 196 | common.ignore_errors(c.connection, channel.close) 197 | 198 | def get_handlers(self, channel): 199 | return [ 200 | AMQPRetryHandler( 201 | channel=channel, 202 | routing_key=handler_registration.routing_key, 203 | queue=queue_key.queue, 204 | exchange=queue_key.exchange, 205 | queue_arguments=handler_registration.queue_arguments, 206 | func=handler_registration.handler, 207 | backoff_func=settings.BACKOFF_FUNC, 208 | ) 209 | for queue_key, handler_registration in self._tasks.items() 210 | ] 211 | 212 | 213 | class AMQPRetryHandler(object): 214 | """ 215 | Implements Depop's try-retry-archive message queue pattern. 216 | 217 | Briefly - messages are processed and may be retried by placing them on a separate retry 218 | queue on a dead-letter-exchange. Messages on the DLX are automatically re-queued by Rabbit 219 | once they expire. The expiry is set on a message-by-message basis to allow exponential 220 | backoff on retries. 221 | """ 222 | 223 | def __init__( 224 | self, 225 | channel, # type: amqp.channel.Channel 226 | routing_key, # type: str 227 | queue, # type: str 228 | exchange, # type: str 229 | queue_arguments, # type: Dict[str, str] 230 | func, # type: Callable[[Any], Any] 231 | backoff_func=None, # type: Optional[Callable[[int], float]] 232 | ): 233 | # type: (...) -> None 234 | self.channel = channel 235 | self.routing_key = routing_key 236 | self.queue = queue # queue name 237 | self.exchange = exchange # `settings.EXCHANGES` config key 238 | self.func = func 239 | self.backoff_func = backoff_func or self.backoff 240 | 241 | self.exchanges = {DEFAULT_EXCHANGE: kombu.Exchange(channel=self.channel)} 242 | 243 | for name, exchange_settings in settings.EXCHANGES.items(): 244 | self.exchanges[name] = kombu.Exchange( 245 | channel=self.channel, **exchange_settings 246 | ) 247 | 248 | try: 249 | self.worker_queue = kombu.Queue( 250 | name=self.queue, 251 | exchange=self.exchanges[exchange], 252 | routing_key=self.routing_key, 253 | channel=self.channel, 254 | queue_arguments=queue_arguments, 255 | ) 256 | 257 | self.retry_queue = kombu.Queue( 258 | name='{queue}.retry'.format(queue=queue), 259 | exchange=self.exchanges[DEFAULT_EXCHANGE], 260 | routing_key='{queue}.retry'.format(queue=queue), 261 | # N.B. default exchange automatically routes messages to a queue 262 | # with the same name as the routing key provided. 263 | queue_arguments={ 264 | "x-dead-letter-exchange": "", 265 | "x-dead-letter-routing-key": self.queue, 266 | }, 267 | channel=self.channel, 268 | ) 269 | 270 | self.archive_queue = kombu.Queue( 271 | name='{queue}.archived'.format(queue=queue), 272 | exchange=self.exchanges[DEFAULT_EXCHANGE], 273 | routing_key='{queue}.archived'.format(queue=queue), 274 | queue_arguments=settings.ARCHIVE_QUEUE_ARGS, 275 | channel=self.channel, 276 | ) 277 | except KeyError as key_exc: 278 | raise NoExchange( 279 | "The exchange {exchange} was not found in settings.EXCHANGES.\n" 280 | "settings.EXCHANGES = {exchanges}".format( 281 | exchange=key_exc, exchanges=settings.EXCHANGES 282 | ) 283 | ) 284 | 285 | self.retry_producer = kombu.Producer( 286 | channel, 287 | exchange=self.retry_queue.exchange, 288 | routing_key=self.retry_queue.routing_key, 289 | serializer=settings.SERIALIZER, 290 | ) 291 | 292 | self.archive_producer = kombu.Producer( 293 | channel, 294 | exchange=self.archive_queue.exchange, 295 | routing_key=self.archive_queue.routing_key, 296 | serializer=settings.SERIALIZER, 297 | ) 298 | 299 | self.consumer = kombu.Consumer( 300 | channel, 301 | queues=[self.worker_queue], 302 | callbacks=[self], 303 | accept=settings.ACCEPT, 304 | ) 305 | 306 | self.consumer.qos(prefetch_count=settings.PREFETCH_COUNT) 307 | 308 | def __repr__(self): 309 | return ( 310 | "AMQPRetryHandler(" 311 | "routing_key={routing_key}, " 312 | "queue={queue}, " 313 | "exchange={exchange}, " 314 | "func={func.__module__}.{func.__name__}" 315 | ")".format( 316 | routing_key=self.routing_key, 317 | queue=self.queue, 318 | exchange=self.exchange, 319 | func=self.func, 320 | ) 321 | ) 322 | 323 | def __call__(self, body, message): 324 | """ 325 | Handle a vanilla AMQP message, called by the Celery framework. 326 | 327 | Raising an exception in this method will crash the Celery worker. Ensure 328 | that all Exceptions are caught and messages acknowledged or rejected 329 | as they are processed. 330 | 331 | Args: 332 | body (Any): the message content, which has been deserialized by Kombu 333 | message (kombu.message.Message) 334 | 335 | Returns: 336 | None 337 | """ 338 | retry_count = self.retry_count(message) 339 | 340 | try: 341 | _logger.debug( 342 | 'Received: (key={routing_key}, retry_count={retry_count})'.format( 343 | routing_key=self.routing_key, 344 | retry_count=retry_count, 345 | ) 346 | ) 347 | self.func(body) 348 | 349 | except Exception as e: 350 | if isinstance(e, PermanentFailure): 351 | self.archive( 352 | body, 353 | message, 354 | "Task '{routing_key}' raised '{cls}, {error}'\n" 355 | "{traceback}".format( 356 | routing_key=self.routing_key, 357 | cls=e.__class__.__name__, 358 | error=e, 359 | traceback=traceback.format_exc(), 360 | ), 361 | ) 362 | elif retry_count >= settings.MAX_RETRIES: 363 | self.archive( 364 | body, 365 | message, 366 | "Task '{routing_key}' ran out of retries ({retries}) on exception " 367 | "'{cls}, {error}'\n" 368 | "{traceback}".format( 369 | routing_key=self.routing_key, 370 | retries=retry_count, 371 | cls=e.__class__.__name__, 372 | error=e, 373 | traceback=traceback.format_exc(), 374 | ), 375 | ) 376 | else: 377 | self.retry( 378 | body, 379 | message, 380 | "Task '{routing_key}' raised the exception '{cls}, {error}', but there are " 381 | "{retries} retries left\n" 382 | "{traceback}".format( 383 | routing_key=self.routing_key, 384 | retries=settings.MAX_RETRIES - retry_count, 385 | cls=e.__class__.__name__, 386 | error=e, 387 | traceback=traceback.format_exc(), 388 | ), 389 | ) 390 | else: 391 | message.ack() 392 | _logger.debug( 393 | "Task '{routing_key}' processed and ack() sent".format( 394 | routing_key=self.routing_key 395 | ) 396 | ) 397 | 398 | finally: 399 | if settings.USE_DJANGO: 400 | # avoid various problems with db connections, due to long-lived 401 | # worker not automatically participating in Django request lifecycle 402 | request_finished.send(sender="AMQPRetryHandler") 403 | 404 | if not message.acknowledged: 405 | message.requeue() 406 | _logger.critical( 407 | "Messages for task '{routing_key}' are not sending an ack() or a reject(). " 408 | "This needs attention. Assuming some kind of error and requeueing the " 409 | "message.".format(routing_key=self.routing_key) 410 | ) 411 | 412 | def retry(self, body, message, reason=''): 413 | """ 414 | Put the message onto the retry queue 415 | """ 416 | _logger.warning(reason) 417 | try: 418 | retry_count = self.retry_count(message) 419 | headers = message.headers.copy() 420 | headers.update({settings.RETRY_HEADER: retry_count + 1}) 421 | self.retry_producer.publish( 422 | body, 423 | headers=headers, 424 | retry=True, 425 | declares=[self.retry_queue], 426 | expiration=self.backoff_func(retry_count), 427 | ) 428 | except Exception as e: 429 | message.requeue() 430 | _logger.error( 431 | "Retry failure: retry-reason='{reason}' " 432 | "exception='{cls}, {error}'\n" 433 | "{traceback}".format( 434 | reason=reason, 435 | cls=e.__class__.__name__, 436 | error=e, 437 | traceback=traceback.format_exc(), 438 | ) 439 | ) 440 | 441 | else: 442 | message.ack() 443 | _logger.debug("Retry: {reason}".format(reason=reason)) 444 | 445 | def archive(self, body, message, reason=''): 446 | """ 447 | Put the message onto the archive queue 448 | """ 449 | _logger.warning(reason) 450 | try: 451 | self.archive_producer.publish( 452 | body, 453 | headers=message.headers, 454 | retry=True, 455 | declares=[self.archive_queue], 456 | ) 457 | 458 | except Exception as e: 459 | message.requeue() 460 | _logger.error( 461 | "Archive failure: retry-reason='{reason}' " 462 | "exception='{cls}, {error}'\n" 463 | "{traceback}".format( 464 | reason=reason, 465 | cls=e.__class__.__name__, 466 | error=e, 467 | traceback=traceback.format_exc(), 468 | ) 469 | ) 470 | else: 471 | message.ack() 472 | _logger.debug("Archive: {reason}".format(reason=reason)) 473 | 474 | def declare_queues(self): 475 | queues = [self.worker_queue, self.retry_queue, self.archive_queue] 476 | for queue in queues: 477 | queue.declare() 478 | 479 | @classmethod 480 | def retry_count(cls, message): 481 | return message.headers.get(settings.RETRY_HEADER, 0) 482 | 483 | @staticmethod 484 | def backoff(retry_count): 485 | # type: (int) -> float 486 | """ 487 | Given the number of attempted retries at delivering a message, return 488 | an increasing TTL for the message for the next retry (in seconds). 489 | """ 490 | # First retry after 200 ms, then 1s, then 1m, then every 30m 491 | retry_delay = [0.2, 1, 60, 1800] 492 | try: 493 | return retry_delay[retry_count] 494 | except IndexError: 495 | return retry_delay[-1] 496 | --------------------------------------------------------------------------------