├── VERSION ├── eb_sqs ├── __init__.py ├── auto_tasks │ ├── __init__.py │ ├── exceptions.py │ └── service.py ├── management │ ├── __init__.py │ └── commands │ │ ├── __init__.py │ │ ├── process_queue.py │ │ └── healthcheck.py ├── tests │ ├── auto_tasks │ │ ├── __init__.py │ │ └── tests_auto_tasks.py │ ├── __init__.py │ ├── aws │ │ ├── __init__.py │ │ └── tests_aws_queue_client.py │ ├── worker │ │ ├── __init__.py │ │ ├── tests_worker_task.py │ │ └── tests_worker.py │ └── tests_decorators.py ├── aws │ ├── __init__.py │ └── sqs_queue_client.py ├── worker │ ├── __init__.py │ ├── commons.py │ ├── queue_client.py │ ├── sqs_worker_factory.py │ ├── worker_factory.py │ ├── worker_exceptions.py │ ├── worker_task.py │ ├── worker.py │ └── service.py ├── apps.py ├── test_settings.py ├── settings.py └── decorators.py ├── setup.cfg ├── requirements-pypi.txt ├── development.txt ├── .gitignore ├── .circleci └── config.yml ├── setup.py ├── LICENSE ├── publish-pypi.sh ├── update-version.sh └── README.md /VERSION: -------------------------------------------------------------------------------- 1 | 1.44 2 | -------------------------------------------------------------------------------- /eb_sqs/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /eb_sqs/auto_tasks/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /eb_sqs/management/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /eb_sqs/management/commands/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /eb_sqs/tests/auto_tasks/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [metadata] 2 | description-file = README.md 3 | -------------------------------------------------------------------------------- /requirements-pypi.txt: -------------------------------------------------------------------------------- 1 | setuptools>=44.0.0 2 | wheel>=0.33.6 3 | twine>=3.1.1 -------------------------------------------------------------------------------- /eb_sqs/aws/__init__.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import, unicode_literals 2 | -------------------------------------------------------------------------------- /eb_sqs/tests/__init__.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import, unicode_literals 2 | -------------------------------------------------------------------------------- /eb_sqs/worker/__init__.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import, unicode_literals 2 | -------------------------------------------------------------------------------- /eb_sqs/tests/aws/__init__.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import, unicode_literals 2 | -------------------------------------------------------------------------------- /eb_sqs/tests/worker/__init__.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import, unicode_literals 2 | -------------------------------------------------------------------------------- /development.txt: -------------------------------------------------------------------------------- 1 | boto3==1.26.99 2 | Django==4.1.7 3 | mock==5.0.1 4 | moto==4.1.6 5 | requests==2.28.2 6 | -------------------------------------------------------------------------------- /eb_sqs/apps.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import, unicode_literals 2 | 3 | from django.apps import AppConfig 4 | 5 | 6 | class EbSqsConfig(AppConfig): 7 | name = 'eb_sqs' 8 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Ignore all compiled python files. 2 | *.pyc 3 | 4 | # all intellij stuff 5 | .idea 6 | 7 | # Ignore OSX cache files 8 | .DS_Store 9 | 10 | # Ignore EGG files 11 | *.egg-info 12 | *.egg 13 | 14 | # Ignore built files for PyPi 15 | build/ 16 | dist/ 17 | -------------------------------------------------------------------------------- /eb_sqs/worker/commons.py: -------------------------------------------------------------------------------- 1 | from contextlib import contextmanager 2 | 3 | from django.db import reset_queries, close_old_connections 4 | 5 | 6 | @contextmanager 7 | def django_db_management(): 8 | reset_queries() 9 | close_old_connections() 10 | try: 11 | yield 12 | finally: 13 | close_old_connections() 14 | -------------------------------------------------------------------------------- /eb_sqs/auto_tasks/exceptions.py: -------------------------------------------------------------------------------- 1 | from typing import Any 2 | 3 | 4 | class RetryableTaskException(Exception): 5 | def __init__(self, inner: Exception, delay: int = None, count_retries: bool = None, max_retries_func: Any = None): 6 | self._inner = inner 7 | 8 | self.delay = delay 9 | self.count_retries = count_retries 10 | self.max_retries_func = max_retries_func 11 | 12 | def __repr__(self) -> str: 13 | return repr(self._inner) 14 | -------------------------------------------------------------------------------- /eb_sqs/worker/queue_client.py: -------------------------------------------------------------------------------- 1 | from abc import ABCMeta, abstractmethod 2 | 3 | 4 | class QueueClientException(Exception): 5 | pass 6 | 7 | 8 | class QueueDoesNotExistException(QueueClientException): 9 | def __init__(self, queue_name: str): 10 | super(QueueDoesNotExistException, self).__init__() 11 | self.queue_name = queue_name 12 | 13 | 14 | class QueueClient(metaclass=ABCMeta): 15 | 16 | @abstractmethod 17 | def add_message(self, queue_name: str, msg: str, delay: int): 18 | pass 19 | -------------------------------------------------------------------------------- /eb_sqs/worker/sqs_worker_factory.py: -------------------------------------------------------------------------------- 1 | from eb_sqs.aws.sqs_queue_client import SqsQueueClient 2 | from eb_sqs.worker.worker import Worker 3 | from eb_sqs.worker.worker_factory import WorkerFactory 4 | 5 | 6 | class SqsWorkerFactory(WorkerFactory): 7 | _WORKER = None # type: Worker 8 | 9 | def __init__(self): 10 | super(SqsWorkerFactory, self).__init__() 11 | 12 | def create(self): 13 | if not SqsWorkerFactory._WORKER: 14 | SqsWorkerFactory._WORKER = Worker(SqsQueueClient()) 15 | return SqsWorkerFactory._WORKER 16 | -------------------------------------------------------------------------------- /eb_sqs/worker/worker_factory.py: -------------------------------------------------------------------------------- 1 | from abc import ABCMeta, abstractmethod 2 | 3 | from eb_sqs import settings 4 | from eb_sqs.worker.worker import Worker 5 | 6 | 7 | class WorkerFactory(metaclass=ABCMeta): 8 | @abstractmethod 9 | def create(self) -> Worker: 10 | pass 11 | 12 | @staticmethod 13 | def default(): 14 | if not settings.WORKER_FACTORY: 15 | from eb_sqs.worker.sqs_worker_factory import SqsWorkerFactory 16 | return SqsWorkerFactory() 17 | else: 18 | return settings.WORKER_FACTORY 19 | -------------------------------------------------------------------------------- /.circleci/config.yml: -------------------------------------------------------------------------------- 1 | version: 2.1 2 | workflows: 3 | version: 2 4 | eb-sqs-jobs: 5 | jobs: 6 | - test-python-3-9 7 | jobs: 8 | test-python-3-9: 9 | docker: 10 | - image: cimg/python:3.9.16 11 | steps: 12 | - add_ssh_keys 13 | - checkout 14 | - run: 15 | name: Install pip packages 16 | command: | 17 | python3 -m venv venv 18 | . venv/bin/activate 19 | pip install -r development.txt 20 | - run: 21 | name: Run tests 22 | command: | 23 | . venv/bin/activate 24 | python -m django test --settings=eb_sqs.test_settings 25 | -------------------------------------------------------------------------------- /eb_sqs/management/commands/process_queue.py: -------------------------------------------------------------------------------- 1 | from django.core.management import BaseCommand, CommandError 2 | 3 | from eb_sqs.worker.service import WorkerService 4 | 5 | 6 | class Command(BaseCommand): 7 | help = 'Command to process tasks from one or more SQS queues' 8 | 9 | def add_arguments(self, parser): 10 | parser.add_argument('--queues', '-q', 11 | dest='queue_names', 12 | help='Name of queues to process, separated by commas') 13 | 14 | def handle(self, *args, **options): 15 | if not options['queue_names']: 16 | raise CommandError('Queue names (--queues) not specified') 17 | 18 | queue_names = [queue_name.rstrip() for queue_name in options['queue_names'].split(',')] 19 | 20 | WorkerService().process_queues(queue_names) 21 | -------------------------------------------------------------------------------- /eb_sqs/worker/worker_exceptions.py: -------------------------------------------------------------------------------- 1 | class WorkerException(Exception): 2 | pass 3 | 4 | 5 | class InvalidMessageFormatException(WorkerException): 6 | def __init__(self, msg: str, caught: Exception): 7 | super(InvalidMessageFormatException, self).__init__() 8 | self.msg = msg 9 | self.caught = caught 10 | 11 | 12 | class ExecutionFailedException(WorkerException): 13 | def __init__(self, task_name: str, caught: Exception): 14 | super(ExecutionFailedException, self).__init__() 15 | self.task_name = task_name 16 | self.caught = caught 17 | 18 | 19 | class MaxRetriesReachedException(WorkerException): 20 | def __init__(self, retries: int): 21 | super(MaxRetriesReachedException, self).__init__() 22 | self.retries = retries 23 | 24 | 25 | class QueueException(WorkerException): 26 | pass 27 | 28 | 29 | class InvalidQueueException(QueueException): 30 | def __init__(self, queue_name: str): 31 | super(InvalidQueueException, self).__init__() 32 | self.queue_name = queue_name 33 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | import os 2 | from setuptools import setup, find_packages 3 | 4 | here = os.path.abspath(os.path.dirname(__file__)) 5 | VERSION = open(os.path.join(here, 'VERSION')).read() 6 | README = open(os.path.join(here, 'README.md')).read() 7 | 8 | setup( 9 | name='django-eb-sqs', 10 | version=VERSION, 11 | package_dir={'eb_sqs': 'eb_sqs'}, 12 | include_package_data=True, 13 | packages=find_packages(), 14 | description='A simple task manager for AWS SQS', 15 | long_description=README, 16 | long_description_content_type="text/markdown", 17 | url='https://github.com/cuda-networks/django-eb-sqs', 18 | install_requires=[ 19 | 'boto3>=1.9.86', 20 | 'Django>=1.10.6', 21 | 'requests>=2.10.0', 22 | ], 23 | classifiers=[ 24 | 'Intended Audience :: Developers', 25 | 'Programming Language :: Python :: 3.9', 26 | 'Topic :: Software Development', 27 | 'License :: OSI Approved :: MIT License', 28 | 'Operating System :: OS Independent', 29 | 'Framework :: Django' 30 | ] 31 | ) 32 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2017 Barracuda Networks Inc. 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /eb_sqs/management/commands/healthcheck.py: -------------------------------------------------------------------------------- 1 | import sys 2 | 3 | import logging 4 | from datetime import timedelta 5 | 6 | from django.core.management import BaseCommand 7 | from django.utils import timezone 8 | from django.utils.dateparse import parse_datetime 9 | 10 | from eb_sqs import settings 11 | 12 | logger = logging.getLogger(__name__) 13 | 14 | 15 | class Command(BaseCommand): 16 | help = 'Checks the SQS worker is healthy, and if not returns a failure code' 17 | 18 | def add_arguments(self, parser): 19 | pass 20 | 21 | def handle(self, *args, **options): 22 | try: 23 | with open(settings.HEALTHCHECK_FILE_NAME, 'r') as file: 24 | last_healthcheck_date_str = file.readlines()[0] 25 | 26 | if parse_datetime(last_healthcheck_date_str) < timezone.now() - timedelta(seconds=settings.HEALTHCHECK_UNHEALTHY_PERIOD_S): 27 | self._return_failure() 28 | except Exception: 29 | self._return_failure() 30 | 31 | @staticmethod 32 | def _return_failure(): 33 | logger.warning('[django-eb-sqs] Health check failed') 34 | sys.exit(1) 35 | -------------------------------------------------------------------------------- /publish-pypi.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -e 4 | 5 | function log() 6 | { 7 | echo "$(date): $1" 8 | } 9 | 10 | function usage() 11 | { 12 | echo "Helper to publish the package to PyPI. 13 | Usage: $0 -p 14 | -p : Github Tag value, semantic realease format eg: v1.36 or v2.0.7 15 | -h : help 16 | " 17 | exit 18 | } 19 | 20 | function publish() 21 | { 22 | 23 | log "Installing required dependencies" 24 | pip install -r requirements-pypi.txt 25 | 26 | # checkout specific tag version 27 | git checkout tags/"$TAG_PARAM" 28 | 29 | # creating the distribution package 30 | rm -rf dist/ 31 | python setup.py sdist 32 | python setup.py bdist_wheel 33 | 34 | log "Publishing package version/tag: $TAG_PARAM" 35 | twine upload dist/* # add --repository-url https://test.pypi.org/legacy/ to push to TestPyPI 36 | 37 | exit 38 | } 39 | 40 | # Parse and handle command line options 41 | while getopts ":p:h" OPTION; do 42 | case $OPTION in 43 | p) 44 | TAG_PARAM=$OPTARG 45 | publish 46 | ;; 47 | *) 48 | usage 49 | ;; 50 | esac 51 | done 52 | 53 | # if no args specified 54 | if [ "$#" -ne 1 ] 55 | then 56 | usage 57 | fi 58 | -------------------------------------------------------------------------------- /update-version.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -e 4 | 5 | VERSION_PARAM=${1:-AUTO} 6 | 7 | function log() 8 | { 9 | echo "$(date): $1" 10 | } 11 | 12 | function usage() 13 | { 14 | echo "Helper to update the package version and commit to GitHub. 15 | Usage: $0 16 | : Version is usually a semantic release eg 1.20, 1.1.7. 17 | VERSION value is optional, if not passed then an auto version update occurs. 18 | -h : help 19 | " 20 | exit 21 | } 22 | 23 | function update() 24 | { 25 | # update codebase 26 | git checkout master 27 | git pull 28 | 29 | if [ "$VERSION_PARAM" = "AUTO" ] 30 | then 31 | OLD_VER=$(cat VERSION) 32 | NEW_VER=$(echo "$OLD_VER" + .01 | bc) 33 | else 34 | NEW_VER=$VERSION_PARAM 35 | fi 36 | 37 | # bump the version 38 | echo "$NEW_VER" > VERSION 39 | log "Version bumped to $NEW_VER" 40 | 41 | # push VERSION file and new TAG to git 42 | log "Pushing to GitHub..." 43 | git add VERSION 44 | git commit -a -m "Bump the version to $NEW_VER" 45 | git push 46 | 47 | # adding tag 48 | log "Adding Tag..." 49 | TAG="v$NEW_VER" 50 | log "New tag : $TAG" 51 | git tag -a "$TAG" -m "Bumped the version to $NEW_VER" 52 | git push origin "$TAG" 53 | 54 | exit 55 | } 56 | 57 | # Parse and handle command line options 58 | while getopts ":h" OPTION; do 59 | case $OPTION in 60 | h) 61 | usage 62 | ;; 63 | *) 64 | usage 65 | ;; 66 | esac 67 | done 68 | 69 | update 70 | -------------------------------------------------------------------------------- /eb_sqs/test_settings.py: -------------------------------------------------------------------------------- 1 | SECRET_KEY = "secret-test-key" 2 | 3 | INSTALLED_APPS = ( 4 | 'eb_sqs', 5 | ) 6 | 7 | DATABASES = { 8 | 'default': { 9 | 'ENGINE': 'django.db.backends.sqlite3', 10 | 'NAME': ':memory:', 11 | } 12 | } 13 | 14 | TEMPLATES = [ 15 | { 16 | 'BACKEND': 'django.template.backends.django.DjangoTemplates', 17 | 'DIRS': [ 18 | # insert your TEMPLATE_DIRS here 19 | ], 20 | 'APP_DIRS': True, 21 | 'OPTIONS': { 22 | 'context_processors': [ 23 | # Insert your TEMPLATE_CONTEXT_PROCESSORS here or use this 24 | # list if you haven't customized them: 25 | 'django.contrib.auth.context_processors.auth', 26 | 'django.template.context_processors.debug', 27 | 'django.template.context_processors.i18n', 28 | 'django.template.context_processors.media', 29 | 'django.template.context_processors.static', 30 | 'django.template.context_processors.tz', 31 | 'django.contrib.messages.context_processors.messages', 32 | ], 33 | }, 34 | }, 35 | ] 36 | 37 | LOGGING = { 38 | 'version': 1, 39 | 'disable_existing_loggers': False, 40 | 'handlers': { 41 | 'console': { 42 | 'level': 'DEBUG', 43 | 'class': 'logging.StreamHandler' 44 | }, 45 | }, 46 | 'loggers': { 47 | 'eb_sqs': { 48 | 'handlers': ['console'], 49 | 'level': 'CRITICAL' 50 | }, 51 | } 52 | } 53 | -------------------------------------------------------------------------------- /eb_sqs/tests/worker/tests_worker_task.py: -------------------------------------------------------------------------------- 1 | import json 2 | from unittest import TestCase 3 | 4 | from eb_sqs.worker.worker_task import WorkerTask 5 | 6 | 7 | class TestObject(object): 8 | def __init__(self): 9 | super(TestObject, self).__init__() 10 | self.message = 'Test' 11 | 12 | 13 | def dummy_function(): 14 | pass 15 | 16 | 17 | class WorkerTaskTest(TestCase): 18 | def setUp(self): 19 | self.dummy_msg = '{"queue": "default", "retryId": "retry-uuid", "retry": 0, "func": "eb_sqs.tests.worker.tests_worker_task.dummy_function", "kwargs": {}, "maxRetries": 5, "args": [], "pickle": false, "id": "id-1", "groupId": "group-5"}' 20 | 21 | def test_serialize_worker_task(self): 22 | worker_task = WorkerTask('id-1', 'group-5', 'default', dummy_function, [], {}, 5, 0, 'retry-uuid', False) 23 | msg = worker_task.serialize() 24 | 25 | self.assertDictEqual(json.loads(msg), json.loads(self.dummy_msg)) 26 | 27 | def test_deserialize_worker_task(self): 28 | worker_task = WorkerTask.deserialize(self.dummy_msg) 29 | 30 | self.assertEqual(worker_task.id, 'id-1') 31 | self.assertEqual(worker_task.group_id, 'group-5') 32 | self.assertEqual(worker_task.queue, 'default') 33 | self.assertEqual(worker_task.func, dummy_function) 34 | self.assertEqual(worker_task.args, []) 35 | self.assertEqual(worker_task.kwargs, {}) 36 | self.assertEqual(worker_task.max_retries, 5) 37 | self.assertEqual(worker_task.retry, 0) 38 | self.assertEqual(worker_task.retry_id, 'retry-uuid') 39 | 40 | def test_serialize_pickle(self): 41 | worker_task1 = WorkerTask('id-1', None, 'default', dummy_function, [], {'object': TestObject()}, 5, 0, None, True) 42 | msg = worker_task1.serialize() 43 | 44 | worker_task2 = WorkerTask.deserialize(msg) 45 | self.assertEqual(worker_task2.args, worker_task1.args) 46 | self.assertEqual(worker_task2.kwargs['object'].message, worker_task1.kwargs['object'].message) 47 | -------------------------------------------------------------------------------- /eb_sqs/settings.py: -------------------------------------------------------------------------------- 1 | from django.conf import settings 2 | 3 | AWS_REGION = getattr(settings, 'EB_AWS_REGION', 'us-east-1') # type: str 4 | 5 | MAX_NUMBER_OF_MESSAGES = getattr(settings, 'EB_SQS_MAX_NUMBER_OF_MESSAGES', 10) # type: int 6 | WAIT_TIME_S = getattr(settings, 'EB_SQS_WAIT_TIME_S', 2) # type: int 7 | NO_QUEUES_WAIT_TIME_S = getattr(settings, 'NO_QUEUES_WAIT_TIME_S', 5) # type: int 8 | 9 | AUTO_ADD_QUEUE = getattr(settings, 'EB_SQS_AUTO_ADD_QUEUE', False) # type: bool 10 | QUEUE_PREFIX = getattr(settings, 'EB_SQS_QUEUE_PREFIX', '') # type: str 11 | DEFAULT_QUEUE = getattr(settings, 'EB_SQS_DEFAULT_QUEUE', 'eb-sqs-default') # type: str 12 | 13 | EXECUTE_INLINE = getattr(settings, 'EB_SQS_EXECUTE_INLINE', False) # type: bool 14 | FORCE_SERIALIZATION = getattr(settings, 'EB_SQS_FORCE_SERIALIZATION', False) # type: bool 15 | 16 | DEFAULT_DELAY = getattr(settings, 'EB_SQS_DEFAULT_DELAY', 0) # type: int 17 | DEFAULT_MAX_RETRIES = getattr(settings, 'EB_SQS_DEFAULT_MAX_RETRIES', 0) # type: int 18 | DEFAULT_COUNT_RETRIES = getattr(settings, 'EB_SQS_DEFAULT_COUNT_RETRIES', True) # type: bool 19 | 20 | USE_PICKLE = getattr(settings, 'EB_SQS_USE_PICKLE', False) # type: bool 21 | 22 | WORKER_FACTORY = getattr(settings, 'EB_SQS_WORKER_FACTORY', None) # type: WorkerFactory 23 | 24 | DEAD_LETTER_MODE = getattr(settings, 'EB_SQS_DEAD_LETTER_MODE', False) # type: bool 25 | 26 | AWS_MAX_RETRIES = getattr(settings, 'EB_SQS_AWS_MAX_RETRIES', 30) # type: int 27 | 28 | REFRESH_PREFIX_QUEUES_S = getattr(settings, 'EB_SQS_REFRESH_PREFIX_QUEUES_S', 10) # type: int 29 | 30 | QUEUE_MESSAGE_RETENTION = getattr(settings, 'EB_SQS_QUEUE_MESSAGE_RETENTION', '1209600') # type: str 31 | QUEUE_VISIBILITY_TIMEOUT = getattr(settings, 'EB_SQS_QUEUE_VISIBILITY_TIMEOUT', '300') # type: str 32 | 33 | MIN_HEALTHCHECK_WRITE_PERIOD_S = getattr(settings, 'EB_SQS_MIN_HEALTHCHECK_WRITE_PERIOD_S', 10) # type: int 34 | HEALTHCHECK_UNHEALTHY_PERIOD_S = getattr(settings, 'EB_SQS_HEALTHCHECK_UNHEALTHY_PERIOD_S', int(QUEUE_VISIBILITY_TIMEOUT)) # type: int 35 | HEALTHCHECK_FILE_NAME = getattr(settings, 'EB_SQS_HEALTHCHECK_FILE_NAME', 'healthcheck.txt') # type: str 36 | -------------------------------------------------------------------------------- /eb_sqs/tests/tests_decorators.py: -------------------------------------------------------------------------------- 1 | from unittest import TestCase 2 | 3 | from mock import Mock 4 | 5 | from eb_sqs import settings 6 | from eb_sqs.decorators import task 7 | from eb_sqs.worker.worker import Worker 8 | from eb_sqs.worker.worker_factory import WorkerFactory 9 | 10 | 11 | @task() 12 | def dummy_task(msg: str): 13 | if not msg: 14 | raise Exception('No message') 15 | 16 | 17 | @task(queue_name='CustomQueue') 18 | def dummy_task_custom_queue(): 19 | pass 20 | 21 | 22 | @task() 23 | def dummy_retry_task(msg: str): 24 | if dummy_retry_task.retry_num == 0: 25 | dummy_retry_task.retry() 26 | else: 27 | if not msg: 28 | raise Exception('No message') 29 | 30 | 31 | class DecoratorsTest(TestCase): 32 | def setUp(self): 33 | self.worker_mock = Mock(autospec=Worker) 34 | 35 | factory_mock = Mock(autospec=WorkerFactory) 36 | factory_mock.create.return_value = self.worker_mock 37 | settings.WORKER_FACTORY = factory_mock 38 | 39 | def test_delay_decorator(self): 40 | dummy_task.delay('Hello World!') 41 | self.worker_mock.delay.assert_called_once() 42 | 43 | def test_delay_custom_queue_decorator(self): 44 | dummy_task_custom_queue.delay() 45 | 46 | call_args = self.worker_mock.delay.call_args 47 | self.assertTrue('CustomQueue' in call_args[0]) 48 | 49 | def test_delay_custom_queue_as_param_decorator(self): 50 | dummy_task_custom_queue.delay(queue_name='OtherQueue') 51 | 52 | call_args = self.worker_mock.delay.call_args 53 | self.assertTrue('OtherQueue' in call_args[0]) 54 | 55 | def test_delay_decorator_parameters(self): 56 | dummy_task.delay('Hello World!', group_id='group-5', delay=5, execute_inline=True) 57 | 58 | self.worker_mock.delay.assert_called_once() 59 | 60 | # Parameter should have been removed from kwargs before being passed to the actual function 61 | kwargs = self.worker_mock.delay.call_args[0][4] 62 | self.assertEqual(kwargs, {}) 63 | 64 | def test_retry_decorator(self): 65 | dummy_retry_task.delay('Hello World!') 66 | self.worker_mock.delay.assert_called_once() 67 | -------------------------------------------------------------------------------- /eb_sqs/decorators.py: -------------------------------------------------------------------------------- 1 | from typing import Any 2 | 3 | from eb_sqs import settings 4 | from eb_sqs.worker.worker_factory import WorkerFactory 5 | from eb_sqs.worker.worker_task import WorkerTask 6 | 7 | 8 | def _get_kwarg_val(kwargs: dict, key: str, default: Any) -> Any: 9 | return kwargs.pop(key, default) if kwargs else default 10 | 11 | 12 | def func_delay_decorator(func: Any, queue_name: str, max_retries_count: int, use_pickle: bool) -> (tuple, dict): 13 | def wrapper(*args: tuple, **kwargs: dict) -> Any: 14 | queue = _get_kwarg_val(kwargs, 'queue_name', queue_name if queue_name else settings.DEFAULT_QUEUE) 15 | max_retries = _get_kwarg_val(kwargs, 'max_retries', max_retries_count if max_retries_count else settings.DEFAULT_MAX_RETRIES) 16 | pickle = _get_kwarg_val(kwargs, 'use_pickle', use_pickle if use_pickle else settings.USE_PICKLE) 17 | 18 | execute_inline = _get_kwarg_val(kwargs, 'execute_inline', False) or settings.EXECUTE_INLINE 19 | delay = _get_kwarg_val(kwargs, 'delay', settings.DEFAULT_DELAY) 20 | group_id = _get_kwarg_val(kwargs, 'group_id', None) 21 | 22 | worker = WorkerFactory.default().create() 23 | return worker.delay(group_id, queue, func, args, kwargs, max_retries, pickle, delay, execute_inline) 24 | 25 | return wrapper 26 | 27 | 28 | def func_retry_decorator(worker_task: WorkerTask) -> (tuple, dict): 29 | def wrapper(*args: tuple, **kwargs: dict) -> Any: 30 | execute_inline = _get_kwarg_val(kwargs, 'execute_inline', False) or settings.EXECUTE_INLINE 31 | delay = _get_kwarg_val(kwargs, 'delay', settings.DEFAULT_DELAY) 32 | count_retries = _get_kwarg_val(kwargs, 'count_retries', settings.DEFAULT_COUNT_RETRIES) 33 | 34 | worker = WorkerFactory.default().create() 35 | return worker.retry(worker_task, delay, execute_inline, count_retries) 36 | return wrapper 37 | 38 | 39 | class task(object): 40 | def __init__(self, queue_name: str = None, max_retries: int = None, use_pickle: bool = None): 41 | self.queue_name = queue_name 42 | self.max_retries = max_retries 43 | self.use_pickle = use_pickle 44 | 45 | def __call__(self, *args: tuple, **kwargs: dict) -> Any: 46 | func = args[0] 47 | func.retry_num = 0 48 | func.delay = func_delay_decorator(func, self.queue_name, self.max_retries, self.use_pickle) 49 | return func 50 | -------------------------------------------------------------------------------- /eb_sqs/tests/auto_tasks/tests_auto_tasks.py: -------------------------------------------------------------------------------- 1 | from unittest import TestCase 2 | 3 | from mock import Mock, call 4 | 5 | from eb_sqs import settings 6 | from eb_sqs.auto_tasks.exceptions import RetryableTaskException 7 | from eb_sqs.auto_tasks.service import AutoTaskService, _auto_task_wrapper 8 | 9 | 10 | class TestService: 11 | TEST_MOCK = None 12 | MAX_RETRY_NUM = 5 13 | 14 | def __init__(self, auto_task_service=None): 15 | self._auto_task_service = auto_task_service or AutoTaskService() 16 | 17 | self._auto_task_service.register_task(self.task_method) 18 | self._auto_task_service.register_task(self.task_retry_method, max_retries=self.MAX_RETRY_NUM) 19 | 20 | self._auto_task_service.register_task(self.task_recursive_method) 21 | self._auto_task_service.register_task(self.task_other_method) 22 | 23 | def task_method(self, *args, **kwargs): 24 | self.TEST_MOCK.task_method(*args, **kwargs) 25 | 26 | def task_retry_method(self, *args, **kwargs): 27 | self.TEST_MOCK.task_retry_method(*args, **kwargs) 28 | 29 | def max_retry_fun(): 30 | self.TEST_MOCK.task_max_retry_method(*args, **kwargs) 31 | 32 | raise RetryableTaskException(Exception('Test'), max_retries_func=max_retry_fun) 33 | 34 | def non_task_method(self): 35 | self.TEST_MOCK.non_task_method() 36 | 37 | def task_recursive_method(self, tries=2): 38 | if tries > 0: 39 | self.task_recursive_method(tries=tries - 1) 40 | else: 41 | self.task_other_method() 42 | 43 | def task_other_method(self): 44 | self.TEST_MOCK.task_other_method() 45 | 46 | 47 | class AutoTasksTest(TestCase): 48 | def setUp(self): 49 | self._test_service = TestService() 50 | 51 | self._args = [5, '6'] 52 | self._kwargs = {'p1': 'bla', 'p2': 130} 53 | 54 | settings.EXECUTE_INLINE = True 55 | 56 | TestService.TEST_MOCK = Mock() 57 | 58 | def test_task_method(self): 59 | self._test_service.task_method(*self._args, **self._kwargs) 60 | 61 | TestService.TEST_MOCK.task_method.assert_called_once_with(*self._args, **self._kwargs) 62 | 63 | def test_task_retry_method(self): 64 | self._test_service.task_retry_method(*self._args, **self._kwargs) 65 | 66 | TestService.TEST_MOCK.task_retry_method.assert_has_calls([call(*self._args, **self._kwargs)] * TestService.MAX_RETRY_NUM) 67 | 68 | TestService.TEST_MOCK.task_max_retry_method.assert_called_once_with(*self._args, **self._kwargs) 69 | 70 | def test_non_task_method(self): 71 | _auto_task_wrapper.delay( 72 | self._test_service.__class__.__module__, 73 | self._test_service.__class__.__name__, 74 | TestService.non_task_method.__name__, 75 | execute_inline=True 76 | ) 77 | 78 | TestService.TEST_MOCK.non_task_method.assert_not_called() 79 | 80 | def test_task_recursive_method(self): 81 | self._test_service.task_recursive_method() 82 | 83 | TestService.TEST_MOCK.task_other_method.assert_called_once_with() 84 | -------------------------------------------------------------------------------- /eb_sqs/aws/sqs_queue_client.py: -------------------------------------------------------------------------------- 1 | from typing import Any 2 | 3 | import boto3 4 | from botocore.config import Config 5 | from botocore.exceptions import ClientError 6 | 7 | from eb_sqs import settings 8 | from eb_sqs.worker.queue_client import QueueClient, QueueDoesNotExistException, QueueClientException 9 | 10 | 11 | class SqsQueueClient(QueueClient): 12 | def __init__(self): 13 | self.sqs = boto3.resource('sqs', 14 | region_name=settings.AWS_REGION, 15 | config=Config(retries={'max_attempts': settings.AWS_MAX_RETRIES}) 16 | ) 17 | self.queue_cache = {} 18 | 19 | def _get_queue(self, queue_name: str, use_cache: bool = True) -> Any: 20 | full_queue_name = '{}{}'.format(settings.QUEUE_PREFIX, queue_name) 21 | 22 | queue = self._get_sqs_queue(full_queue_name, use_cache) 23 | if not queue: 24 | queue = self._add_sqs_queue(full_queue_name) 25 | 26 | return queue 27 | 28 | def _get_sqs_queue(self, queue_name: str, use_cache: bool) -> Any: 29 | if use_cache and self.queue_cache.get(queue_name): 30 | return self.queue_cache[queue_name] 31 | 32 | try: 33 | queue = self.sqs.get_queue_by_name(QueueName=queue_name) 34 | self.queue_cache[queue_name] = queue 35 | return queue 36 | except ClientError as ex: 37 | error_code = ex.response.get('Error', {}).get('Code', None) 38 | if error_code == 'AWS.SimpleQueueService.NonExistentQueue': 39 | return None 40 | else: 41 | raise ex 42 | 43 | def _add_sqs_queue(self, queue_name: str) -> Any: 44 | if settings.AUTO_ADD_QUEUE: 45 | queue = self.sqs.create_queue( 46 | QueueName=queue_name, 47 | Attributes={ 48 | 'MessageRetentionPeriod': settings.QUEUE_MESSAGE_RETENTION, 49 | 'VisibilityTimeout': settings.QUEUE_VISIBILITY_TIMEOUT 50 | } 51 | ) 52 | self.queue_cache[queue_name] = queue 53 | return queue 54 | else: 55 | raise QueueDoesNotExistException(queue_name) 56 | 57 | def add_message(self, queue_name: str, msg: str, delay: int): 58 | try: 59 | queue = self._get_queue(queue_name) 60 | try: 61 | queue.send_message( 62 | MessageBody=msg, 63 | DelaySeconds=delay 64 | ) 65 | except ClientError as ex: 66 | if ex.response.get('Error', {}).get('Code', None) == 'AWS.SimpleQueueService.NonExistentQueue': 67 | queue = self._get_queue(queue_name, use_cache=False) 68 | queue.send_message( 69 | MessageBody=msg, 70 | DelaySeconds=delay 71 | ) 72 | else: 73 | raise ex 74 | except QueueDoesNotExistException: 75 | raise 76 | except Exception as ex: 77 | raise QueueClientException(ex) 78 | -------------------------------------------------------------------------------- /eb_sqs/tests/aws/tests_aws_queue_client.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import, unicode_literals 2 | 3 | import time 4 | from unittest import TestCase 5 | from mock import patch 6 | 7 | import boto3 8 | from botocore.exceptions import ClientError 9 | from moto import mock_sqs 10 | 11 | from eb_sqs import settings 12 | from eb_sqs.aws.sqs_queue_client import SqsQueueClient 13 | from eb_sqs.worker.queue_client import QueueDoesNotExistException 14 | 15 | 16 | class AwsQueueClientTest(TestCase): 17 | def setUp(self): 18 | settings.QUEUE_PREFIX = 'eb-sqs-' 19 | 20 | @mock_sqs() 21 | def test_add_message(self): 22 | sqs = boto3.resource('sqs', 23 | region_name=settings.AWS_REGION) 24 | queue = sqs.create_queue(QueueName='eb-sqs-default') 25 | 26 | queue_client = SqsQueueClient() 27 | 28 | queue_client.add_message('default', 'msg', 0) 29 | 30 | queue.reload() 31 | self.assertEqual(queue.attributes["ApproximateNumberOfMessages"], '1') 32 | 33 | @mock_sqs() 34 | def test_add_message_delayed(self): 35 | delay = 1 36 | sqs = boto3.resource('sqs', 37 | region_name=settings.AWS_REGION) 38 | queue = sqs.create_queue(QueueName='eb-sqs-default') 39 | queue_client = SqsQueueClient() 40 | 41 | queue_client.add_message('default', 'msg', delay) 42 | 43 | queue.reload() 44 | self.assertEqual(queue.attributes["ApproximateNumberOfMessages"], '0') 45 | 46 | time.sleep(delay + 0.1) 47 | 48 | queue.reload() 49 | self.assertEqual(queue.attributes["ApproximateNumberOfMessages"], '1') 50 | 51 | @mock_sqs() 52 | def test_add_message_wrong_queue(self): 53 | sqs = boto3.resource('sqs', 54 | region_name=settings.AWS_REGION) 55 | sqs.create_queue(QueueName='default') 56 | queue_client = SqsQueueClient() 57 | 58 | with self.assertRaises(QueueDoesNotExistException): 59 | queue_client.add_message('invalid', 'msg', 0) 60 | 61 | @mock_sqs() 62 | def test_auto_add_queue(self): 63 | settings.AUTO_ADD_QUEUE = True 64 | 65 | queue_name = 'test-queue' 66 | 67 | sqs = boto3.resource('sqs', 68 | region_name=settings.AWS_REGION) 69 | 70 | queue_client = SqsQueueClient() 71 | 72 | queue_client.add_message(queue_name, 'msg', 0) 73 | 74 | full_queue_name = settings.QUEUE_PREFIX + queue_name 75 | 76 | queue = sqs.get_queue_by_name(QueueName=full_queue_name) 77 | 78 | self.assertEqual(queue.attributes["ApproximateNumberOfMessages"], '1') 79 | 80 | queue.delete() 81 | 82 | # moto throws exception inconsistent with boto, thus the patching 83 | with patch.object(queue_client.queue_cache[full_queue_name], 'send_message') as send_message_fn: 84 | send_message_fn.side_effect = ClientError({'Error': {'Code': 'AWS.SimpleQueueService.NonExistentQueue'}}, None) 85 | 86 | queue_client.add_message(queue_name, 'msg', 0) 87 | 88 | queue = sqs.get_queue_by_name(QueueName=full_queue_name) 89 | 90 | self.assertEqual(queue.attributes["ApproximateNumberOfMessages"], '1') 91 | 92 | settings.AUTO_ADD_QUEUE = False 93 | -------------------------------------------------------------------------------- /eb_sqs/tests/worker/tests_worker.py: -------------------------------------------------------------------------------- 1 | from unittest import TestCase 2 | 3 | from mock import Mock 4 | 5 | from eb_sqs import settings 6 | from eb_sqs.decorators import task 7 | from eb_sqs.worker.queue_client import QueueClient 8 | from eb_sqs.worker.worker import Worker 9 | from eb_sqs.worker.worker_exceptions import MaxRetriesReachedException 10 | from eb_sqs.worker.worker_factory import WorkerFactory 11 | 12 | 13 | class TestException(Exception): 14 | pass 15 | 16 | 17 | @task() 18 | def dummy_task(msg): 19 | return msg 20 | 21 | 22 | @task(max_retries=100) 23 | def retries_task(num_of_retries): 24 | if retries_task.retry_num < num_of_retries: 25 | retries_task.retry(execute_inline=True) 26 | 27 | 28 | @task(max_retries=5) 29 | def max_retries_task(): 30 | max_retries_task.retry(execute_inline=True) 31 | 32 | 33 | @task(max_retries=100) 34 | def repeating_group_task(num_of_retries): 35 | if repeating_group_task.retry_num < num_of_retries: 36 | repeating_group_task.retry(execute_inline=True) 37 | 38 | 39 | @task() 40 | def exception_group_task(): 41 | raise TestException() 42 | 43 | 44 | @task(max_retries=100) 45 | def exception_repeating_group_task(num_of_retries): 46 | if exception_repeating_group_task.retry_num == num_of_retries: 47 | raise TestException() 48 | else: 49 | exception_repeating_group_task.retry(execute_inline=True) 50 | 51 | 52 | @task(max_retries=5) 53 | def max_retries_group_task(): 54 | max_retries_group_task.retry(execute_inline=True) 55 | 56 | 57 | global_group_mock = Mock() 58 | 59 | 60 | class WorkerTest(TestCase): 61 | def setUp(self): 62 | settings.DEAD_LETTER_MODE = False 63 | 64 | self.queue_mock = Mock(autospec=QueueClient) 65 | self.worker = Worker(self.queue_mock) 66 | 67 | factory_mock = Mock(autospec=WorkerFactory) 68 | factory_mock.create.return_value = self.worker 69 | settings.WORKER_FACTORY = factory_mock 70 | 71 | def test_worker_execution(self): 72 | msg = '{"id": "id-1", "retry": 0, "queue": "default", "maxRetries": 5, "args": [], "func": "eb_sqs.tests.worker.tests_worker.dummy_task", "kwargs": {"msg": "Hello World!"}}' 73 | 74 | result = self.worker.execute(msg) 75 | 76 | self.assertEqual(result, 'Hello World!') 77 | 78 | def test_worker_execution_dead_letter_queue(self): 79 | settings.DEAD_LETTER_MODE = True 80 | 81 | msg = '{"id": "id-1", "groupId": "group-5", "retry": 0, "queue": "default", "maxRetries": 5, "args": [], "func": "eb_sqs.tests.worker.tests_worker.dummy_task", "kwargs": {"msg": "Hello World!"}}' 82 | 83 | result = self.worker.execute(msg) 84 | 85 | self.assertIsNone(result) 86 | 87 | def test_delay(self): 88 | self.worker.delay(None, 'queue', dummy_task, [], {'msg': 'Hello World!'}, 5, False, 3, False) 89 | 90 | self.queue_mock.add_message.assert_called_once() 91 | queue_delay = self.queue_mock.add_message.call_args[0][2] 92 | self.assertEqual(queue_delay, 3) 93 | 94 | def test_delay_inline(self): 95 | result = self.worker.delay(None, 'queue', dummy_task, [], {'msg': 'Hello World!'}, 5, False, 0, True) 96 | 97 | self.queue_mock.add_message.assert_not_called() 98 | self.assertEqual(result, 'Hello World!') 99 | 100 | def test_retry_max_reached_execution(self): 101 | with self.assertRaises(MaxRetriesReachedException): 102 | max_retries_task.delay(execute_inline=True) 103 | 104 | def test_retry_no_limit(self): 105 | retries_task.delay(10, execute_inline=True) 106 | 107 | self.assertEqual(retries_task.retry_num, 10) 108 | -------------------------------------------------------------------------------- /eb_sqs/worker/worker_task.py: -------------------------------------------------------------------------------- 1 | import base64 2 | import importlib 3 | import json 4 | import uuid 5 | from typing import Any 6 | 7 | from eb_sqs import settings 8 | 9 | try: 10 | import cPickle as pickle 11 | except Exception: 12 | import pickle 13 | 14 | 15 | class WorkerTask(object): 16 | def __init__(self, id: str, group_id: str, queue: str, func: Any, args: tuple, kwargs: dict, max_retries: int, retry: int, retry_id: str, use_pickle: bool): 17 | super(WorkerTask, self).__init__() 18 | self.id = id 19 | self.group_id = group_id 20 | self.queue = queue 21 | self.func = func 22 | self.args = args 23 | self.kwargs = kwargs 24 | self.max_retries = max_retries 25 | self.retry = retry 26 | self.retry_id = retry_id 27 | self.use_pickle = use_pickle 28 | 29 | self.abs_func_name = '{}.{}'.format(self.func.__module__, self.func.__name__) 30 | 31 | def execute(self) -> Any: 32 | from eb_sqs.decorators import func_retry_decorator 33 | self.func.retry_num = self.retry 34 | self.func.retry = func_retry_decorator(worker_task=self) 35 | return self.func(*self.args, **self.kwargs) 36 | 37 | def serialize(self) -> str: 38 | args = WorkerTask._pickle_args(self.args) if self.use_pickle else self.args 39 | kwargs = WorkerTask._pickle_args(self.kwargs) if self.use_pickle else self.kwargs 40 | 41 | task = { 42 | 'id': self.id, 43 | 'groupId': self.group_id, 44 | 'queue': self.queue, 45 | 'func': self.abs_func_name, 46 | 'args': args, 47 | 'kwargs': kwargs, 48 | 'maxRetries': self.max_retries, 49 | 'retry': self.retry, 50 | 'retryId': self.retry_id, 51 | 'pickle': self.use_pickle, 52 | } 53 | 54 | return json.dumps(task) 55 | 56 | def copy(self, use_serialization: bool): 57 | if use_serialization: 58 | return WorkerTask.deserialize(self.serialize()) 59 | else: 60 | return WorkerTask( 61 | self.id, 62 | self.group_id, 63 | self.queue, 64 | self.func, 65 | self.args, 66 | self.kwargs, 67 | self.max_retries, 68 | self.retry, 69 | self.retry_id, 70 | self.use_pickle, 71 | ) 72 | 73 | @staticmethod 74 | def _pickle_args(args: Any) -> str: 75 | return base64.b64encode(pickle.dumps(args, pickle.HIGHEST_PROTOCOL)).decode('utf-8') 76 | 77 | @staticmethod 78 | def deserialize(msg: str): 79 | task = json.loads(msg) 80 | 81 | id = task.get('id', str(uuid.uuid4())) 82 | group_id = task.get('groupId') 83 | 84 | abs_func_name = task['func'] 85 | func_name = abs_func_name.split(".")[-1] 86 | func_path = ".".join(abs_func_name.split(".")[:-1]) 87 | func_module = importlib.import_module(func_path) 88 | 89 | func = getattr(func_module, func_name) 90 | 91 | use_pickle = task.get('pickle', False) 92 | queue = task.get('queue', settings.DEFAULT_QUEUE) 93 | 94 | task_args = task.get('args', []) 95 | args = WorkerTask._unpickle_args(task_args) if use_pickle else task_args 96 | 97 | kwargs = WorkerTask._unpickle_args(task['kwargs']) if use_pickle else task['kwargs'] 98 | 99 | max_retries = task.get('maxRetries', settings.DEFAULT_MAX_RETRIES) 100 | retry = task.get('retry', 0) 101 | retry_id = task.get('retryId') 102 | 103 | return WorkerTask(id, group_id, queue, func, args, kwargs, max_retries, retry, retry_id, use_pickle) 104 | 105 | @staticmethod 106 | def _unpickle_args(args: str) -> dict: 107 | return pickle.loads(base64.b64decode(args.encode('utf-8'))) 108 | -------------------------------------------------------------------------------- /eb_sqs/auto_tasks/service.py: -------------------------------------------------------------------------------- 1 | import importlib 2 | import logging 3 | from typing import Any 4 | 5 | from eb_sqs.auto_tasks.exceptions import RetryableTaskException 6 | from eb_sqs.decorators import task 7 | from eb_sqs.worker.worker_exceptions import MaxRetriesReachedException 8 | 9 | logger = logging.getLogger(__name__) 10 | 11 | 12 | @task() 13 | def _auto_task_wrapper(module_name, class_name, func_name, *args, **kwargs): 14 | try: 15 | logger.debug( 16 | 'Invoke _auto_task_wrapper with module: %s class: %s func: %s args: %s and kwargs: %s', 17 | module_name, 18 | class_name, 19 | func_name, 20 | args, 21 | kwargs 22 | ) 23 | 24 | module = importlib.import_module(module_name) # import module 25 | class_ = getattr(module, class_name) # find class 26 | 27 | auto_task_executor_service = _AutoTaskExecutorService(func_name) 28 | instance = class_(auto_task_service=auto_task_executor_service) 29 | 30 | executor_func_name = auto_task_executor_service.get_executor_func_name() 31 | if executor_func_name: 32 | getattr(instance, executor_func_name)(*args, **kwargs) # invoke method on instance 33 | else: 34 | logger.error( 35 | 'Trying to invoke _auto_task_wrapper for unregistered task with module: %s class: %s func: %s args: %s and kwargs: %s', 36 | module_name, 37 | class_name, 38 | func_name, 39 | args, 40 | kwargs 41 | ) 42 | except RetryableTaskException as exc: 43 | try: 44 | retry_kwargs = {} 45 | 46 | if exc.delay is not None: 47 | retry_kwargs['delay'] = exc.delay 48 | 49 | if exc.count_retries is not None: 50 | retry_kwargs['count_retries'] = exc.count_retries 51 | 52 | _auto_task_wrapper.retry(**retry_kwargs) 53 | except MaxRetriesReachedException: 54 | if exc.max_retries_func: 55 | exc.max_retries_func() 56 | else: 57 | # by default log an error 58 | logger.error('Reached max retries in auto task {}.{}.{} with error: {}'.format(module_name, class_name, 59 | func_name, repr(exc))) 60 | 61 | 62 | class AutoTaskService(object): 63 | def register_task(self, method: Any, queue_name: str = None, max_retries: int = None): 64 | instance = method.__self__ 65 | class_ = instance.__class__ 66 | func_name = method.__name__ 67 | 68 | def _auto_task_wrapper_invoker(*args, **kwargs): 69 | if queue_name is not None: 70 | kwargs['queue_name'] = queue_name 71 | 72 | if max_retries is not None: 73 | kwargs['max_retries'] = max_retries 74 | 75 | _auto_task_wrapper.delay( 76 | class_.__module__, 77 | class_.__name__, 78 | func_name, 79 | *args, **kwargs 80 | ) 81 | 82 | setattr(instance, func_name, _auto_task_wrapper_invoker) 83 | 84 | 85 | class _AutoTaskExecutorService(AutoTaskService): 86 | def __init__(self, func_name: str): 87 | self._func_name = func_name 88 | 89 | self._executor_func_name = None 90 | 91 | def register_task(self, method: Any, queue_name: str = None, max_retries: int = None): 92 | if self._func_name == method.__name__: 93 | # circuit breaker to allow actually executing the method once 94 | instance = method.__self__ 95 | 96 | self._executor_func_name = self._func_name + '__auto_task_executor__' 97 | setattr(instance, self._executor_func_name, getattr(instance, self._func_name)) 98 | 99 | super(_AutoTaskExecutorService, self).register_task(method, queue_name, max_retries) 100 | 101 | def get_executor_func_name(self) -> str: 102 | return self._executor_func_name 103 | -------------------------------------------------------------------------------- /eb_sqs/worker/worker.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import uuid 3 | from typing import Any 4 | 5 | from eb_sqs import settings 6 | from eb_sqs.worker.queue_client import QueueDoesNotExistException, QueueClient, QueueClientException 7 | from eb_sqs.worker.worker_exceptions import InvalidMessageFormatException, ExecutionFailedException, \ 8 | MaxRetriesReachedException, InvalidQueueException, QueueException 9 | from eb_sqs.worker.worker_task import WorkerTask 10 | 11 | logger = logging.getLogger("eb_sqs") 12 | 13 | 14 | class Worker(object): 15 | def __init__(self, queue_client: QueueClient): 16 | super(Worker, self).__init__() 17 | self.queue_client = queue_client 18 | 19 | def execute(self, msg: str) -> Any: 20 | try: 21 | worker_task = WorkerTask.deserialize(msg) 22 | except Exception as ex: 23 | logger.exception( 24 | 'Message %s is not a valid worker task: %s', 25 | msg, 26 | ex 27 | ) 28 | 29 | raise InvalidMessageFormatException(msg, ex) 30 | 31 | try: 32 | if settings.DEAD_LETTER_MODE: 33 | # If in dead letter mode only try to run callback. Do not execute task. 34 | logger.debug( 35 | 'Task %s (%s, retry-id: %s) not executed (dead letter queue)', 36 | worker_task.abs_func_name, 37 | worker_task.id, 38 | worker_task.retry_id, 39 | ) 40 | 41 | else: 42 | logger.debug( 43 | 'Execute task %s (%s, retry-id: %s) with args: %s and kwargs: %s', 44 | worker_task.abs_func_name, 45 | worker_task.id, 46 | worker_task.retry_id, 47 | worker_task.args, 48 | worker_task.kwargs 49 | ) 50 | 51 | return self._execute_task(worker_task) 52 | except QueueException: 53 | raise 54 | except MaxRetriesReachedException: 55 | raise 56 | except Exception as ex: 57 | logger.exception( 58 | 'Task %s (%s, retry-id: %s) failed to execute with args: %s and kwargs: %s: %s', 59 | worker_task.abs_func_name, 60 | worker_task.id, 61 | worker_task.retry_id, 62 | worker_task.args, 63 | worker_task.kwargs, 64 | ex 65 | ) 66 | 67 | raise ExecutionFailedException(worker_task.abs_func_name, ex) 68 | 69 | def delay(self, group_id: str, queue_name: str, func: Any, args: tuple, kwargs: dict, max_retries: int, use_pickle: bool, 70 | delay: int, execute_inline: bool) -> Any: 71 | worker_task = WorkerTask(str(uuid.uuid4()), group_id, queue_name, func, args, kwargs, max_retries, 0, None, 72 | use_pickle) 73 | return self._enqueue_task(worker_task, delay, execute_inline, False, True) 74 | 75 | def retry(self, worker_task: WorkerTask, delay: int, execute_inline: bool, count_retries: bool) -> Any: 76 | worker_task = worker_task.copy(settings.FORCE_SERIALIZATION) 77 | worker_task.retry_id = str(uuid.uuid4()) 78 | return self._enqueue_task(worker_task, delay, execute_inline, True, count_retries) 79 | 80 | def _enqueue_task(self, worker_task: WorkerTask, delay: int, execute_inline: bool, is_retry: bool, 81 | count_retries: bool) -> Any: 82 | try: 83 | if is_retry and count_retries: 84 | worker_task.retry += 1 85 | if worker_task.retry >= worker_task.max_retries: 86 | raise MaxRetriesReachedException(worker_task.retry) 87 | 88 | logger.debug('%s task %s (%s, retry-id: %s): %s, %s (%s%s)', 89 | 'Retrying' if is_retry else 'Delaying', 90 | worker_task.abs_func_name, 91 | worker_task.id, 92 | worker_task.retry_id, 93 | worker_task.args, 94 | worker_task.kwargs, 95 | worker_task.queue, 96 | ', inline' if execute_inline else '') 97 | 98 | if execute_inline: 99 | return self._execute_task(worker_task) 100 | else: 101 | self.queue_client.add_message(worker_task.queue, worker_task.serialize(), delay) 102 | return None 103 | except QueueDoesNotExistException as ex: 104 | raise InvalidQueueException(ex.queue_name) 105 | except QueueClientException as ex: 106 | logger.warning('Task %s (%s, retry-id: %s) failed to enqueue to %s: %s', 107 | worker_task.abs_func_name, 108 | worker_task.id, 109 | worker_task.retry_id, 110 | worker_task.queue, 111 | ex) 112 | 113 | raise QueueException() 114 | 115 | @classmethod 116 | def _execute_task(cls, worker_task: WorkerTask) -> Any: 117 | result = worker_task.execute() 118 | return result 119 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | 2 | [![PyPI version](https://img.shields.io/pypi/v/django-eb-sqs)](https://pypi.org/project/django-eb-sqs/) 3 | [![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://opensource.org/licenses/MIT) 4 | [![CircleCI](https://img.shields.io/circleci/build/github/cuda-networks/django-eb-sqs/master)](https://circleci.com/gh/cuda-networks/django-eb-sqs/tree/master) 5 | 6 | # Django EB SQS - Background Tasks for Amazon SQS 7 | 8 | django-eb-sqs is a simple task manager for AWS SQS. It uses SQS and the [boto3](https://github.com/boto/boto3) library. 9 | 10 | ### Installation 11 | 12 | Install the module with `pip install git+git://github.com/cuda-networks/django-eb-sqs.git` or add it to your `requirements.txt`. 13 | 14 | Don't forget to add django-eb-sqs app to your Django `INSTALLED_APPS` settings: 15 | ```python 16 | INSTALLED_APPS = ( 17 | ..., 18 | 'eb_sqs', 19 | ) 20 | ``` 21 | 22 | ### Usage 23 | 24 | #### Creating Tasks 25 | 26 | Adding a task to a queue is simple. 27 | 28 | ```python 29 | from eb_sqs.decorators import task 30 | 31 | @task(queue_name='test') 32 | def echo(message): 33 | print(message) 34 | 35 | echo.delay(message='Hello World!') 36 | ``` 37 | **NOTE:** This assumes that you have your AWS keys in the appropriate environment variables, or are using IAM roles. Consult the `boto3` [documentation](https://boto3.readthedocs.org/en/latest/) for further info. 38 | 39 | If you don't pass a queue name, the `EB_SQS_DEFAULT_QUEUE` setting is used. If not set, the queue name is `eb-sqs-default`. 40 | 41 | Additionally the task decorator supports `max_retries` (default `0`) and `use_pickle` (default `False`) attributes for advanced control task execution. 42 | 43 | You can also delay the execution of a task by specifying the delay time in seconds. 44 | 45 | ```python 46 | echo.delay(message='Hello World!', delay=60) 47 | ``` 48 | 49 | During development it is sometimes useful to execute a task immediately without using SQS. This is possible with the `execute_inline` argument. 50 | 51 | ```python 52 | echo.delay(message='Hello World!', execute_inline=True) 53 | ``` 54 | 55 | **NOTE:** `delay` is not applied when `execute_inline` is set to `True`. 56 | 57 | Failed tasks can be retried by using the `retry` method. See the following example: 58 | 59 | ```python 60 | from eb_sqs.decorators import task 61 | 62 | @task(queue_name='test', max_retries=5) 63 | def upload_file(message): 64 | print('# of retries: {}'.format(upload_file.retry_num)) 65 | try: 66 | # upload ... 67 | except ConnectionException: 68 | upload_file.retry() 69 | ``` 70 | 71 | The retry call supports the `delay` and `execute_inline` arguments in order to delay the retry or execute it inline. If the retry shall not be counted for the max retry limit set `count_retries` to false. Use 'retry_num' to get the number of retries for the current task. 72 | 73 | **NOTE:** `retry()` throws a `MaxRetriesReachedException` exception if the maximum number of retries is reached. 74 | 75 | #### Executing Tasks 76 | 77 | In order to execute tasks, use the Django command `process_queue`. 78 | This command can work with one or more queues, reading from the queues infinitely and executing tasks as they come-in. 79 | 80 | ```bash 81 | python manage.py process_queue --queues 82 | ``` 83 | 84 | You can either use full queue names, or queue prefix using `prefix:*my_example_prefix*` notation. 85 | 86 | Examples: 87 | ```bash 88 | python manage.py process_queue --queues queue1,queue2 # process queue1 and queue2 89 | python manage.py process_queue --queues queue1,prefix:pr1-,queue2 # process queue1, queue2 and any queue whose name starts with 'pr1-' 90 | ``` 91 | 92 | Use the signals `MESSAGES_RECEIVED`, `MESSAGES_PROCESSED`, `MESSAGES_DELETED` of the `WorkerService` to get informed about the current SQS batch being processed by the management command. 93 | 94 | #### Auto Tasks 95 | 96 | This is a helper tool for the case you wish to define one of your class method as a task, and make it seamless to all callers. 97 | This makes the code much simpler, and allows using classes to invoke your method directly without considering whether it's invoked async or not. 98 | 99 | This is how you would define your class: 100 | ```python 101 | from eb_sqs.auto_tasks.service import AutoTaskService 102 | 103 | class MyService: 104 | def __init__(self, p1=default1, ..., pN=defaultN, auto_task_service=None): 105 | self._auto_task_service = auto_task_service or AutoTaskService() 106 | 107 | self._auto_task_service.register_task(self.my_task_method) 108 | 109 | def my_task_method(self, *args, **kwargs): 110 | ... 111 | 112 | ``` 113 | 114 | Notice the following: 115 | 1. Your class needs to have defaults for all parameters in the c'tor 116 | 2. The c'tor must have a parameter named `auto_task_service` 117 | 3. The method shouldn't have any return value (as it's invoked async) 118 | 119 | In case you want your method to retry certain cases, you need to raise `RetryableTaskException`. 120 | You can provide on optional `delay` time for the retry, set `count_retries=False` in case you don't want to limit retries, or use `max_retries_func` to specify a function which will be invoked when the defined maximum number of retries is exhausted. 121 | 122 | #### Settings 123 | 124 | The following settings can be used to fine tune django-eb-sqs. Copy them into your Django `settings.py` file. 125 | 126 | - EB_AWS_REGION (`us-east-1`): The AWS region to use when working with SQS. 127 | - EB_SQS_MAX_NUMBER_OF_MESSAGES (`10`): The maximum number of messages to read in a single call from SQS (<= 10). 128 | - EB_SQS_WAIT_TIME_S (`2`): The time to wait (seconds) when receiving messages from SQS. 129 | - NO_QUEUES_WAIT_TIME_S (`5`): The time a workers waits if there are no SQS queues available to process. 130 | - EB_SQS_AUTO_ADD_QUEUE (`False`): If queues should be added automatically to AWS if they don't exist. 131 | - EB_SQS_QUEUE_MESSAGE_RETENTION (`1209600`): The value (in seconds) to be passed to MessageRetentionPeriod parameter, when creating a queue (only relevant in case EB_SQS_AUTO_ADD_QUEUE is set to True). 132 | - EB_SQS_QUEUE_VISIBILITY_TIMEOUT (`300`): The value (in seconds) to be passed to VisibilityTimeout parameter, when creating a queue (only relevant in case EB_SQS_AUTO_ADD_QUEUE is set to True). 133 | - EB_SQS_DEAD_LETTER_MODE (`False`): Enable if this worker is handling the SQS dead letter queue. Tasks won't be executed but group callback is. 134 | - EB_SQS_DEFAULT_DELAY (`0`): Default task delay time in seconds. 135 | - EB_SQS_DEFAULT_MAX_RETRIES (`0`): Default retry limit for all tasks. 136 | - EB_SQS_DEFAULT_COUNT_RETRIES (`True`): Count retry calls. Needed if max retries check shall be executed. 137 | - EB_SQS_DEFAULT_QUEUE (`eb-sqs-default`): Default queue name if none is specified when creating a task. 138 | - EB_SQS_EXECUTE_INLINE (`False`): Execute tasks immediately without using SQS. Useful during development. Global setting `True` will override setting it on a task level. 139 | - EB_SQS_FORCE_SERIALIZATION (`False`): Forces serialization of tasks when executed `inline`. This setting is helpful during development to see if all arguments are serialized and deserialized properly. 140 | - EB_SQS_QUEUE_PREFIX (``): Prefix to use for the queues. The prefix is added to the queue name. 141 | - EB_SQS_USE_PICKLE (`False`): Enable to use `pickle` to serialize task parameters. Uses `json` as default. 142 | - EB_SQS_AWS_MAX_RETRIES (`30`): Default retry limit on a boto3 call to AWS SQS. 143 | - EB_SQS_REFRESH_PREFIX_QUEUES_S (`10`): Minimal number of seconds to wait between refreshing queue list, in case prefix is used 144 | 145 | 146 | ### Development 147 | 148 | Make sure to install the development dependencies from `development.txt`. 149 | 150 | #### Tests 151 | 152 | The build in tests can be executed with the Django test runner. 153 | 154 | ```bash 155 | python -m django test --settings=eb_sqs.test_settings 156 | ``` 157 | -------------------------------------------------------------------------------- /eb_sqs/worker/service.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import signal 3 | from datetime import timedelta 4 | from time import sleep 5 | from typing import Any 6 | 7 | import boto3 8 | from boto3.resources.base import ServiceResource 9 | 10 | from botocore.config import Config 11 | from botocore.exceptions import ClientError 12 | import django.dispatch 13 | from django.utils import timezone 14 | 15 | from eb_sqs import settings 16 | from eb_sqs.worker.commons import django_db_management 17 | from eb_sqs.worker.worker import Worker 18 | from eb_sqs.worker.worker_exceptions import ExecutionFailedException 19 | from eb_sqs.worker.worker_factory import WorkerFactory 20 | 21 | logger = logging.getLogger(__name__) 22 | 23 | MESSAGES_RECEIVED = django.dispatch.Signal() 24 | MESSAGES_PROCESSED = django.dispatch.Signal() 25 | MESSAGES_DELETED = django.dispatch.Signal() 26 | 27 | 28 | class WorkerService(object): 29 | _PREFIX_STR = 'prefix:' 30 | _RECEIVE_COUNT_ATTRIBUTE = 'ApproximateReceiveCount' 31 | 32 | def __init__(self): 33 | self._exit_gracefully = False 34 | self._last_healthcheck_time = None 35 | 36 | def process_queues(self, queue_names: list): 37 | signal.signal(signal.SIGTERM, self._exit_called) 38 | 39 | self.write_healthcheck_file() 40 | self._last_healthcheck_time = timezone.now() 41 | 42 | logger.debug('[django-eb-sqs] Connecting to SQS: {}'.format(', '.join(queue_names))) 43 | 44 | sqs = boto3.resource( 45 | 'sqs', 46 | region_name=settings.AWS_REGION, 47 | config=Config(retries={'max_attempts': settings.AWS_MAX_RETRIES}) 48 | ) 49 | 50 | prefixes = list(filter(lambda qn: qn.startswith(self._PREFIX_STR), queue_names)) 51 | queues = self.get_queues_by_names(sqs, list(set(queue_names) - set(prefixes))) 52 | 53 | queue_prefixes = [prefix.split(self._PREFIX_STR)[1] for prefix in prefixes] 54 | static_queues = queues 55 | last_update_time = timezone.now() - timedelta(seconds=settings.REFRESH_PREFIX_QUEUES_S) 56 | 57 | logger.debug('[django-eb-sqs] Connected to SQS: {}'.format(', '.join(queue_names))) 58 | 59 | worker = WorkerFactory.default().create() 60 | 61 | logger.info('[django-eb-sqs] WAIT_TIME_S = {}'.format(settings.WAIT_TIME_S)) 62 | logger.info('[django-eb-sqs] NO_QUEUES_WAIT_TIME_S = {}'.format(settings.NO_QUEUES_WAIT_TIME_S)) 63 | logger.info('[django-eb-sqs] MAX_NUMBER_OF_MESSAGES = {}'.format(settings.MAX_NUMBER_OF_MESSAGES)) 64 | logger.info('[django-eb-sqs] AUTO_ADD_QUEUE = {}'.format(settings.AUTO_ADD_QUEUE)) 65 | logger.info('[django-eb-sqs] QUEUE_PREFIX = {}'.format(settings.QUEUE_PREFIX)) 66 | logger.info('[django-eb-sqs] DEFAULT_QUEUE = {}'.format(settings.DEFAULT_QUEUE)) 67 | logger.info('[django-eb-sqs] DEFAULT_MAX_RETRIES = {}'.format(settings.DEFAULT_MAX_RETRIES)) 68 | logger.info('[django-eb-sqs] REFRESH_PREFIX_QUEUES_S = {}'.format(settings.REFRESH_PREFIX_QUEUES_S)) 69 | 70 | while not self._exit_gracefully: 71 | if len(queue_prefixes) > 0 and \ 72 | timezone.now() - timedelta(seconds=settings.REFRESH_PREFIX_QUEUES_S) > last_update_time: 73 | queues = static_queues + self.get_queues_by_prefixes(sqs, queue_prefixes) 74 | last_update_time = timezone.now() 75 | logger.debug('[django-eb-sqs] Updated SQS queues: {}'.format( 76 | ', '.join([queue.url for queue in queues]) 77 | )) 78 | 79 | logger.debug('[django-eb-sqs] Processing {} queues'.format(len(queues))) 80 | if len(queues) == 0: 81 | sleep(settings.NO_QUEUES_WAIT_TIME_S) 82 | else: 83 | self.process_messages(queues, worker, static_queues) 84 | 85 | def process_messages(self, queues: list, worker: Worker, static_queues: list): 86 | 87 | for queue in queues: 88 | if self._exit_gracefully: 89 | return 90 | 91 | try: 92 | messages = self.poll_messages(queue) 93 | logger.debug('[django-eb-sqs] Polled {} messages'.format(len(messages))) 94 | 95 | self._send_signal(MESSAGES_RECEIVED, messages=messages) 96 | 97 | msg_entries = [] 98 | for msg in messages: 99 | self._execute_user_code(lambda: self._process_message(msg, worker)) 100 | msg_entries.append({ 101 | 'Id': msg.message_id, 102 | 'ReceiptHandle': msg.receipt_handle 103 | }) 104 | 105 | self._send_signal(MESSAGES_PROCESSED, messages=messages) 106 | 107 | self.delete_messages(queue, msg_entries) 108 | 109 | self._send_signal(MESSAGES_DELETED, messages=messages) 110 | except ClientError as exc: 111 | error_code = exc.response.get('Error', {}).get('Code', None) 112 | if error_code == 'AWS.SimpleQueueService.NonExistentQueue' and queue not in static_queues: 113 | logger.debug('[django-eb-sqs] Queue was already deleted {}: {}'.format(queue.url, exc), 114 | exc_info=True) 115 | else: 116 | logger.warning('[django-eb-sqs] Error polling queue {}: {}'.format(queue.url, exc), exc_info=True) 117 | except Exception as exc: 118 | logger.warning('[django-eb-sqs] Error polling queue {}: {}'.format(queue.url, exc), exc_info=True) 119 | 120 | if timezone.now() - timedelta( 121 | seconds=settings.MIN_HEALTHCHECK_WRITE_PERIOD_S) > self._last_healthcheck_time: 122 | self.write_healthcheck_file() 123 | self._last_healthcheck_time = timezone.now() 124 | 125 | def delete_messages(self, queue, msg_entries: list): 126 | if len(msg_entries) > 0: 127 | response = queue.delete_messages(Entries=msg_entries) 128 | 129 | # logging 130 | failed = response.get('Failed', []) 131 | num_failed = len(failed) 132 | if num_failed > 0: 133 | logger.warning('[django-eb-sqs] Failed deleting {} messages: {}'.format(num_failed, failed)) 134 | 135 | def poll_messages(self, queue) -> list: 136 | return queue.receive_messages( 137 | MaxNumberOfMessages=settings.MAX_NUMBER_OF_MESSAGES, 138 | WaitTimeSeconds=settings.WAIT_TIME_S, 139 | AttributeNames=[self._RECEIVE_COUNT_ATTRIBUTE] 140 | ) 141 | 142 | def _send_signal(self, dispatch_signal: django.dispatch.Signal, messages: list): 143 | if dispatch_signal.has_listeners(sender=self.__class__): 144 | self._execute_user_code(lambda: dispatch_signal.send(sender=self.__class__, messages=messages)) 145 | 146 | def _process_message(self, msg, worker: Worker): 147 | logger.debug('[django-eb-sqs] Read message {}'.format(msg.message_id)) 148 | try: 149 | receive_count = int(msg.attributes[self._RECEIVE_COUNT_ATTRIBUTE]) 150 | 151 | if receive_count > 1: 152 | logger.warning('[django-eb-sqs] SQS re-queued message {} times - msg: {}'.format( 153 | receive_count, msg.body 154 | )) 155 | 156 | worker.execute(msg.body) 157 | 158 | logger.debug('[django-eb-sqs] Processed message {}'.format(msg.message_id)) 159 | except ExecutionFailedException as exc: 160 | logger.warning('[django-eb-sqs] Handling message {} got error: {}'.format(msg.message_id, repr(exc))) 161 | 162 | @staticmethod 163 | def _execute_user_code(function: Any): 164 | try: 165 | with django_db_management(): 166 | function() 167 | except Exception as exc: 168 | logger.error('[django-eb-sqs] Unhandled error: {}'.format(exc), exc_info=True) 169 | 170 | def get_queues_by_names(self, sqs: ServiceResource, queue_names: list) -> list: 171 | return [sqs.get_queue_by_name(QueueName=queue_name) for queue_name in queue_names] 172 | 173 | def get_queues_by_prefixes(self, sqs: ServiceResource, prefixes: list) -> list: 174 | queues = [] 175 | 176 | for prefix in prefixes: 177 | queues += sqs.queues.filter(QueueNamePrefix=prefix) 178 | 179 | return queues 180 | 181 | def write_healthcheck_file(self): 182 | with open(settings.HEALTHCHECK_FILE_NAME, 'w') as file: 183 | file.write(timezone.now().isoformat()) 184 | 185 | def _exit_called(self, signum, frame): 186 | logger.info('[django-eb-sqs] Termination signal called: {}'.format(signum)) 187 | self._exit_gracefully = True 188 | --------------------------------------------------------------------------------