├── .gitignore ├── .travis.yml ├── CHANGELOG.md ├── LICENSE ├── MANIFEST.in ├── README.md ├── celery_singleton ├── __init__.py ├── backends │ ├── __init__.py │ ├── base.py │ └── redis.py ├── config.py ├── exceptions.py ├── singleton.py └── util.py ├── poetry.lock ├── pyproject.toml ├── sample └── sample.py ├── setup.cfg └── tests ├── conftest.py ├── test_backends.py ├── test_config.py └── test_singleton.py /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | env/ 12 | build/ 13 | develop-eggs/ 14 | dist/ 15 | downloads/ 16 | eggs/ 17 | .eggs/ 18 | lib/ 19 | lib64/ 20 | parts/ 21 | sdist/ 22 | var/ 23 | *.egg-info/ 24 | .installed.cfg 25 | *.egg 26 | 27 | # PyInstaller 28 | # Usually these files are written by a python script from a template 29 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 30 | *.manifest 31 | *.spec 32 | 33 | # Installer logs 34 | pip-log.txt 35 | pip-delete-this-directory.txt 36 | 37 | # Unit test / coverage reports 38 | htmlcov/ 39 | .tox/ 40 | .coverage 41 | .coverage.* 42 | .cache 43 | nosetests.xml 44 | coverage.xml 45 | *,cover 46 | .hypothesis/ 47 | 48 | # Translations 49 | *.mo 50 | *.pot 51 | 52 | # Django stuff: 53 | *.log 54 | local_settings.py 55 | 56 | # Flask stuff: 57 | instance/ 58 | .webassets-cache 59 | 60 | # Scrapy stuff: 61 | .scrapy 62 | 63 | # Sphinx documentation 64 | docs/_build/ 65 | 66 | # PyBuilder 67 | target/ 68 | 69 | # IPython Notebook 70 | .ipynb_checkpoints 71 | 72 | # pyenv 73 | .python-version 74 | 75 | # celery beat schedule file 76 | celerybeat-schedule 77 | 78 | # dotenv 79 | .env 80 | 81 | # virtualenv 82 | venv/ 83 | ENV/ 84 | 85 | # Spyder project settings 86 | .spyderproject 87 | 88 | # Rope project settings 89 | .ropeproject 90 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | os: linux 2 | dist: focal 3 | cache: pip 4 | services: 5 | - redis 6 | language: python 7 | python: 8 | - "3.6" 9 | - "3.7" 10 | - "3.8" 11 | env: 12 | - CELERY=4 13 | - CELERY=5 14 | before_install: 15 | - pip install --upgrade pip 16 | - pip install poetry 17 | install: 18 | - poetry install -v 19 | - if [[ $CELERY == "4" ]]; then pip install "celery>=4, <5"; fi 20 | script: python -m pytest 21 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Changelog 2 | 3 | ### Added 4 | - Support passing an optional custom [`json.JSONEncoder`] to `util.generate_lock()` via `singleton_json_encoder_class`. 5 | Useful for task arguments with objects marshalable to the same string representation, e.g. passing [`uuid.UUID`] to [`str()`]. 6 | 7 | PR [#44](https://github.com/steinitzu/celery-singleton/pull/44) by [Tony Narlock](https://github.com/tony) in regards to [#42](https://github.com/steinitzu/celery-singleton/issues/42) and [#36](https://github.com/steinitzu/celery-singleton/issues/36). 8 | 9 | [`json.JSONEncoder`]: https://docs.python.org/3/library/json.html#json.JSONEncoder 10 | [`str()`]: https://docs.python.org/3/library/stdtypes.html#str 11 | [`uuid.UUID`]: https://docs.python.org/3/library/uuid.html#uuid.UUID 12 | 13 | ## [0.3.1] - 2021-01-14 14 | 15 | ### Changed 16 | - Correct signal usage in README [#30](https://github.com/steinitzu/celery-singleton/pull/30) by [@reorx](https://github.com/reorx) 17 | - Fix wrong repository and homepage URL in pyproject.toml (thanks [@utapyngo](https://github.com/utapyngo) for pointing it out) 18 | 19 | ## [0.3.0] - 2020-10-12 20 | 21 | ### Added 22 | - Support Celery 5. PR [#30](https://github.com/steinitzu/celery-singleton/pull/30) by [@wangsha](https://github.com/wangsha) 23 | 24 | ### Removed 25 | - Remove python 3.5 support (dropped by Celery 5) 26 | 27 | ## [0.2.0] - 2019-05-24 28 | 29 | ### Added 30 | - This changelog 31 | - Support for custom storage backend implementations 32 | - Configurable backend URL for default or custom storage backend (to e.g. use a different redis server) 33 | - Configurable key prefix for locks 34 | - `lock\_expiry` option 35 | - `raise\_on\_duplicate` option 36 | - `unique\_on` option 37 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2017 Steinþór Pálsson 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include README.md LICENSE -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Celery-Singleton 2 | 3 | Duplicate tasks clogging up your message broker? Do time based rate limits make you feel icky? Look no further! 4 | This is a baseclass for celery tasks that ensures only one instance of the task can be queued or running at any given time. Uses the task's name+arguments to determine uniqueness. 5 | 6 | 7 | **Table of Contents** 8 | 9 | - [Celery-Singleton](#celery-singleton) 10 | - [Prerequisites](#prerequisites) 11 | - [Quick start](#quick-start) 12 | - [How does it work?](#how-does-it-work) 13 | - [Handling deadlocks](#handling-deadlocks) 14 | - [Backends](#backends) 15 | - [Task configuration](#task-configuration) 16 | - [unique\_on](#uniqueon) 17 | - [raise\_on\_duplicate](#raiseonduplicate) 18 | - [App Configuration](#app-configuration) 19 | - [Testing](#testing) 20 | - [Contribute](#contribute) 21 | 22 | 23 | 24 | 25 | ## Prerequisites 26 | celery-singleton uses the JSON representation of a task's `delay()` or `apply_async()` arguments to generate a unique lock and stores it in redis. 27 | By default it uses the redis server of the celery [result backend](http://docs.celeryproject.org/en/latest/getting-started/first-steps-with-celery.html#keeping-results). If you use a different/no result backend or want to use a different redis server for celery-singleton, refer the [configuration section](#app-configuration) for how to customize the redis. To use something other than redis, refer to the section on [backends](#backends) 28 | 29 | So in gist: 30 | 1. Make sure all your tasks arguments are JSON serializable 31 | 2. Your celery app is configured with a redis result backend or you have specified another redis/compatible backend in your config 32 | 33 | If you're already using a redis backend and a mostly default celery config, you're all set! 34 | 35 | ## Quick start 36 | `$ pip install celery-singleton` 37 | 38 | ```python 39 | import time 40 | from celery_singleton import Singleton 41 | from somewhere import celery_app 42 | 43 | @celery_app.task(base=Singleton) 44 | def do_stuff(*args, **kwargs): 45 | time.sleep(4) 46 | return 'I just woke up' 47 | 48 | # run the task as normal 49 | async_result = do_stuff.delay(1, 2, 3, a='b') 50 | async_result2 = do_stuff.delay(1, 2, 3, a='b') 51 | 52 | assert async_result == async_result2 # These are the same, task is only queued once 53 | ``` 54 | 55 | That's it! Your task is a singleton and calls to `do_stuff.delay()` will either queue a new task or return an AsyncResult for the currently queued/running instance of the task. 56 | 57 | 58 | ## How does it work? 59 | 60 | The `Singleton` class overrides `apply_async()` of the base task implementation to only queue a task if an identical task is not already running. Two tasks are considered identical if both have the same name and same arguments. 61 | 62 | This is achieved by using redis for distributed locking. 63 | 64 | When you call `delay()` or `apply_async()` on a singleton task it first attempts to aquire a lock in redis using a hash of [task_name+arguments] as a key and a new task ID as a value. `SETNX` is used for this to prevent race conditions. 65 | If a lock is successfully aquired, the task is queued as normal with the `apply_async` method of the base class. 66 | If another run of the task already holds a lock, we fetch its task ID instead and return an `AsyncResult` for it. This way it works seamlessly with a standard celery setup, there are no "duplicate exceptions" you need to handle, no timeouts. `delay()` always returns an `AsyncResult` as expected, either for the task you just spawned or for the task that aquired the lock before it. 67 | So continuing on with the "Quick start" example: 68 | 69 | ```python 70 | a = do_stuff.delay(1, 2, 3) 71 | b = do_stuff.delay(1, 2, 3) 72 | 73 | assert a == b # Both are AsyncResult for the same task 74 | 75 | c = do_stuff.delay(4, 5, 6) 76 | 77 | assert a != c # c has different arguments so it spawns a new task 78 | ``` 79 | 80 | The lock is released only when the task has finished running, using either the `on_success` or `on_failure` handler, after which you're free to start another identical run. 81 | 82 | ```python 83 | # wait for a to finish 84 | a.get() 85 | 86 | # Now we can spawn a duplicate of it 87 | d = do_stuff.delay(1, 2, 3) 88 | 89 | assert a != d 90 | ``` 91 | 92 | 93 | ## Handling deadlocks 94 | Since the task locks are only released when the task is actually finished running (on success or on failure), you can sometimes end up in a situation where the lock remains but there's no task available to release it. 95 | This can for example happen if your celery worker crashes before it can release the lock. 96 | 97 | A convenience method is included to clear all existing locks, you can run it on celery worker startup or any other celery signal like so: 98 | 99 | ```python 100 | from celery.signals import worker_ready 101 | from celery_singleton import clear_locks 102 | from somewhere import celery_app 103 | 104 | @worker_ready.connect 105 | def unlock_all(**kwargs): 106 | clear_locks(celery_app) 107 | ``` 108 | 109 | An alternative is to set a [lock expiry](#lock\_expiry) time in the task or app config. This makes it so that locks are always released after a given time. 110 | 111 | ## Backends 112 | 113 | Redis is the default storage backend for celery singleton. This is where task locks are stored where they can be accessed across celery workers. 114 | A custom redis url can be set using the `singleton_backend_url` config variable in the celery config. By default Celery Singleton attempts to use the redis url of the celery result backend and if that fails the celery broker. 115 | 116 | If you don't want to use redis you can implement a custom storage backend. 117 | An abstract base class to inherit from is included in `celery_singleton.backends.BaseBackend` and [the source code of `RedisBackend`](celery_singleton/backends/redis.py) serves as an example implementation. 118 | Once you have your backend implemented, set the `singleton_backend_class` [configuration](#app-configuration) variables to point to your class. 119 | 120 | 121 | ## Task configuration 122 | 123 | ### unique\_on 124 | 125 | This can be used to make celery-singleton only consider certain arguments when deciding whether two tasks are identical. 126 | (By default, two tasks are considered identical to each other if their name and all arguments are the same). 127 | 128 | For example, this task allows only one instance per username, other arguments don't matter: 129 | 130 | ```python 131 | @app.task(base=Singleton, unique_on=['username', ]) 132 | def do_something(username, otherarg=None): 133 | time.sleep(5) 134 | 135 | 136 | task1 = do_something.delay(username='bob', otherarg=99) 137 | task2 = do_something.delay(username='bob', otherarg=100) # this is a duplicate of task1 138 | assert task1 == task2 139 | ``` 140 | 141 | Specify an empty list to consider the task name only. 142 | 143 | ### raise\_on\_duplicate 144 | 145 | When this option is enabled the task's `delay` and `apply_async` method will raise a `DuplicateTaskError` exception when attempting to spawn a duplicate task instead of returning the existing task's `AsyncResult` 146 | This is useful when you want only one of a particular task at a time, but want more control over what happens on duplicate attempts. 147 | 148 | ```python 149 | from celery_singleton import Singleton, DuplicateTaskError 150 | 151 | 152 | @app.task(base=Singleton, raise_on_duplicate=True) 153 | def do_something(username): 154 | time.sleep(5) 155 | 156 | task1 = do_something.delay('bob') 157 | try: 158 | task2 = do_something.delay('bob') 159 | except DuplicateTaskerror as e: 160 | print("You tried to create a duplicate of task with ID", e.task_id) 161 | ``` 162 | 163 | This option can also be applied globally to all `Singleton` tasks by setting `singleton_raise_on_duplicate` in the [app config](#app-configuration). The task level option always overrides the app config when supplied. 164 | 165 | ### lock\_expiry 166 | 167 | Number of seconds until the task lock expires. This is useful when you want a max of one task queued within a given time frame rather than strictly one at a time. 168 | This also adds some safety to your application as it guarantees that locks will eventually be released in case of worker crashes and network failures. For this use case it's recommended to set the lock expiry to a value slightly longer than the expected task duration. 169 | 170 | Example 171 | 172 | ```python 173 | @app.task(base=Singleton, lock_expiry=10) 174 | def runs_for_12_seconds(): 175 | self.time.sleep(12) 176 | 177 | 178 | task1 = runs_for_12_seconds.delay() 179 | time.sleep(11) 180 | task2 = runs_for_12_seconds.delay() 181 | 182 | assert task1 != task2 # These are two separate task instances 183 | ``` 184 | 185 | This option can be applied globally in the [app config](#app-configuration) with `singleton_lock_expiry`. Task option supersedes the app config. 186 | 187 | 188 | ## App Configuration 189 | 190 | Celery singleton supports the following configuration option. These should be added to your Celery app config. 191 | Note: if using old style celery config with uppercase variables and a namespace, make sure the singleton config matches. E.g. `CELERY_SINGLETON_BACKEND_URL` instead of `singleton_backend_url` 192 | 193 | 194 | | Key | Default | Description | 195 | |--------------------------------|-----------------------------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------| 196 | | `singleton_backend_url` | `celery_backend_url` | The URL of the storage backend. If using the default backend implementation, this should be a redis URL. It is passed as the first argument to the backend class. | 197 | | `singleton_backend_class` | `celery_singleton.backend.RedisBackend` | The full import path of a backend class as string or a reference to the class | 198 | | `singleton_backend_kwargs` | `{}` | Passed as keyword arguments to the backend class | 199 | | `singleton_json_encoder_class` | `None` ([`json.JSONEncoder`]) | Optional JSON encoder class for generating lock. Useful for task arguments where objects can be reliably marshalled to string (such as [`uuid.UUID`]) | 200 | | `singleton_key_prefix` | `SINGLETONLOCK_` | Locks are stored as ``. Use to prevent collisions with other keys in your database. | 201 | | `singleton_raise_on_duplicate` | `False` | When `True` an attempt to queue a duplicate task will raise a `DuplicateTaskerror`. The default behavior is to return the `AsyncResult` for the existing task. | 202 | | `singleton_lock_expiry` | `None` (Never expires) | Lock expiry time in second for singleton task locks. When lock expires identical tasks are allowed to run regardless of whether the locked task has finished or not. | 203 | | | | | 204 | 205 | [`json.JSONEncoder`]: https://docs.python.org/3/library/json.html#json.JSONEncoder 206 | [`uuid.UUID`]: https://docs.python.org/3/library/uuid.html#uuid.UUID 207 | 208 | ## Testing 209 | 210 | Tests are located in the `/tests` directory can be run with pytest 211 | 212 | ``` 213 | pip install -r dev-requirements.txt 214 | python -m pytest 215 | ``` 216 | 217 | Some of the tests require a running redis server on `redis://localhost` 218 | To use a redis server on a different url/host, set the env variable `CELERY_SINGLETON_TEST_REDIS_URL` 219 | 220 | 221 | ## Contribute 222 | Please open an issue if you encounter a bug, have any questions or suggestions for improvements or run into any trouble at all using this package. 223 | -------------------------------------------------------------------------------- /celery_singleton/__init__.py: -------------------------------------------------------------------------------- 1 | from .singleton import Singleton, clear_locks 2 | from .exceptions import DuplicateTaskError 3 | 4 | __version__ = "0.3.1" 5 | -------------------------------------------------------------------------------- /celery_singleton/backends/__init__.py: -------------------------------------------------------------------------------- 1 | from .redis import RedisBackend 2 | from .base import BaseBackend 3 | 4 | 5 | _backend = None 6 | 7 | 8 | def get_backend(config): 9 | """ 10 | Get the celery-singleton backend. 11 | The backend instance is cached for subsequent calls. 12 | 13 | :param app: celery instance 14 | :type app: celery.Celery 15 | """ 16 | global _backend 17 | if _backend: 18 | return _backend 19 | klass = config.backend_class 20 | kwargs = config.backend_kwargs 21 | url = config.backend_url 22 | _backend = klass(url, **kwargs) 23 | return _backend 24 | 25 | 26 | __all__ = ["RedisBackend", "BaseBackend", "get_backend"] 27 | -------------------------------------------------------------------------------- /celery_singleton/backends/base.py: -------------------------------------------------------------------------------- 1 | from abc import ABC, abstractmethod 2 | 3 | 4 | class BaseBackend(ABC): 5 | @abstractmethod 6 | def lock(self, lock, task_id, expiry=None): 7 | """ 8 | Store a lock for given lock value and task ID 9 | 10 | :param lock: Lock/mutex string 11 | :type lock: `str` 12 | :param task_id: Task id associated with the lock 13 | :type task_id: `str` 14 | :param expiry: Lock's time to live in seconds, after which 15 | the lock will be removed regardless of whether 16 | the task has finished or not. When not supplied the 17 | lock never expires. 18 | :type expiry: `int` 19 | :return: `True` if lock was aquired succesfully otherwise `False` 20 | :rtype: `bool` 21 | """ 22 | 23 | @abstractmethod 24 | def unlock(self, lock): 25 | """ 26 | Unlock the given lock 27 | 28 | :param lock: Lock/mutext string to unlock 29 | :type lock: `str` 30 | """ 31 | 32 | @abstractmethod 33 | def get(self, lock): 34 | """ 35 | Get task ID for given lock 36 | 37 | :param lock: Lock/mutext string 38 | :type lock: str 39 | :return: A task ID if exists, otherwise `None` 40 | :rtype: `str` or `None` 41 | """ 42 | 43 | @abstractmethod 44 | def clear(self, key_prefix): 45 | """ 46 | Clear all locks stored under given key_prefix 47 | 48 | :param key_prefix: Prefix of keys to clear 49 | :type key_prefix: str 50 | :return: `None` 51 | """ 52 | -------------------------------------------------------------------------------- /celery_singleton/backends/redis.py: -------------------------------------------------------------------------------- 1 | from redis import Redis 2 | 3 | from .base import BaseBackend 4 | 5 | 6 | class RedisBackend(BaseBackend): 7 | def __init__(self, *args, **kwargs): 8 | """ 9 | args and kwargs are forwarded to redis.from_url 10 | """ 11 | self.redis = Redis.from_url(*args, decode_responses=True, **kwargs) 12 | 13 | def lock(self, lock, task_id, expiry=None): 14 | return not not self.redis.set(lock, task_id, nx=True, ex=expiry) 15 | 16 | def unlock(self, lock): 17 | self.redis.delete(lock) 18 | 19 | def get(self, lock): 20 | return self.redis.get(lock) 21 | 22 | def clear(self, key_prefix): 23 | cursor = 0 24 | while True: 25 | cursor, keys = self.redis.scan(cursor=cursor, match=key_prefix + "*") 26 | for k in keys: 27 | self.redis.delete(k) 28 | if cursor == 0: 29 | break 30 | -------------------------------------------------------------------------------- /celery_singleton/config.py: -------------------------------------------------------------------------------- 1 | from importlib import import_module 2 | 3 | 4 | class Config: 5 | def __init__(self, app): 6 | self.app = app 7 | 8 | @property 9 | def key_prefix(self): 10 | return self.app.conf.get("singleton_key_prefix", "SINGLETONLOCK_") 11 | 12 | @property 13 | def backend_class(self): 14 | path_or_class = self.app.conf.get( 15 | "singleton_backend_class", "celery_singleton.backends.redis.RedisBackend" 16 | ) 17 | if isinstance(path_or_class, str): 18 | path = path_or_class.split(".") 19 | mod_name, class_name = ".".join(path[:-1]), path[-1] 20 | mod = import_module(mod_name) 21 | return getattr(mod, class_name) 22 | return path_or_class 23 | 24 | @property 25 | def json_encoder_class(self): 26 | path_or_class = self.app.conf.get("singleton_json_encoder_class", None) 27 | if isinstance(path_or_class, str): 28 | path = path_or_class.split(".") 29 | mod_name, class_name = ".".join(path[:-1]), path[-1] 30 | mod = import_module(mod_name) 31 | return getattr(mod, class_name) 32 | return path_or_class 33 | 34 | @property 35 | def backend_kwargs(self): 36 | return self.app.conf.get("singleton_backend_kwargs", {}) 37 | 38 | @property 39 | def backend_url(self): 40 | url = self.app.conf.get("singleton_backend_url") 41 | if url is not None: 42 | return url 43 | url = self.app.conf.get("result_backend") 44 | if not url or not url.startswith("redis://"): 45 | url = self.app.conf.get("broker_url") 46 | return url 47 | 48 | @property 49 | def raise_on_duplicate(self): 50 | return self.app.conf.get("singleton_raise_on_duplicate") 51 | 52 | @property 53 | def lock_expiry(self): 54 | return self.app.conf.get("singleton_lock_expiry") 55 | -------------------------------------------------------------------------------- /celery_singleton/exceptions.py: -------------------------------------------------------------------------------- 1 | class CelerySingletonException(Exception): 2 | pass 3 | 4 | 5 | class DuplicateTaskError(CelerySingletonException): 6 | """ 7 | Raised when attempting to queue a duplicat task 8 | and `raise_on_duplicate` is enabled 9 | """ 10 | 11 | def __init__(self, message, task_id): 12 | self.task_id = task_id 13 | super().__init__(message) 14 | 15 | pass 16 | -------------------------------------------------------------------------------- /celery_singleton/singleton.py: -------------------------------------------------------------------------------- 1 | from celery import Task as BaseTask 2 | from kombu.utils.uuid import uuid 3 | import inspect 4 | 5 | from .backends import get_backend 6 | from .config import Config 7 | from .exceptions import DuplicateTaskError 8 | from . import util 9 | 10 | 11 | def clear_locks(app): 12 | config = Config(app) 13 | backend = get_backend(config) 14 | backend.clear(config.key_prefix) 15 | 16 | 17 | class Singleton(BaseTask): 18 | abstract = True 19 | _singleton_backend = None 20 | _singleton_config = None 21 | unique_on = None 22 | raise_on_duplicate = None 23 | lock_expiry = None 24 | 25 | @property 26 | def _raise_on_duplicate(self): 27 | if self.raise_on_duplicate is not None: 28 | return self.raise_on_duplicate 29 | return self.singleton_config.raise_on_duplicate or False 30 | 31 | @property 32 | def singleton_config(self): 33 | if self._singleton_config: 34 | return self._singleton_config 35 | self._singleton_config = Config(self._get_app()) 36 | return self._singleton_config 37 | 38 | @property 39 | def singleton_backend(self): 40 | if self._singleton_backend: 41 | return self._singleton_backend 42 | self._singleton_backend = get_backend(self.singleton_config) 43 | return self._singleton_backend 44 | 45 | def aquire_lock(self, lock, task_id): 46 | expiry = ( 47 | self.lock_expiry 48 | if self.lock_expiry is not None 49 | else self.singleton_config.lock_expiry 50 | ) 51 | return self.singleton_backend.lock(lock, task_id, expiry=expiry) 52 | 53 | def get_existing_task_id(self, lock): 54 | return self.singleton_backend.get(lock) 55 | 56 | def generate_lock(self, task_name, task_args=None, task_kwargs=None): 57 | unique_on = self.unique_on 58 | task_args = task_args or [] 59 | task_kwargs = task_kwargs or {} 60 | if unique_on is not None: 61 | if isinstance(unique_on, str): 62 | unique_on = [unique_on] 63 | if not any(unique_on): 64 | unique_kwargs = {} 65 | else: 66 | sig = inspect.signature(self.run) 67 | bound = sig.bind(*task_args, **task_kwargs) 68 | bound.apply_defaults() 69 | unique_kwargs = {key: bound.arguments[key] for key in unique_on} 70 | unique_args = [] 71 | else: 72 | unique_args = task_args 73 | unique_kwargs = task_kwargs 74 | return util.generate_lock( 75 | task_name, 76 | unique_args, 77 | unique_kwargs, 78 | key_prefix=self.singleton_config.key_prefix, 79 | json_encoder_class=self.singleton_config.json_encoder_class, 80 | ) 81 | 82 | def apply_async( 83 | self, 84 | args=None, 85 | kwargs=None, 86 | task_id=None, 87 | producer=None, 88 | link=None, 89 | link_error=None, 90 | shadow=None, 91 | **options 92 | ): 93 | args = args or [] 94 | kwargs = kwargs or {} 95 | task_id = task_id or uuid() 96 | lock = self.generate_lock(self.name, args, kwargs) 97 | 98 | run_args = dict( 99 | lock=lock, 100 | args=args, 101 | kwargs=kwargs, 102 | task_id=task_id, 103 | producer=producer, 104 | link=link, 105 | link_error=link_error, 106 | shadow=shadow, 107 | **options 108 | ) 109 | 110 | task = self.lock_and_run(**run_args) 111 | if task: 112 | return task 113 | 114 | existing_task_id = self.get_existing_task_id(lock) 115 | while not existing_task_id: 116 | task = self.lock_and_run(**run_args) 117 | if task: 118 | return task 119 | existing_task_id = self.get_existing_task_id(lock) 120 | return self.on_duplicate(existing_task_id) 121 | 122 | def lock_and_run(self, lock, *args, task_id=None, **kwargs): 123 | lock_aquired = self.aquire_lock(lock, task_id) 124 | if lock_aquired: 125 | try: 126 | return super(Singleton, self).apply_async( 127 | *args, task_id=task_id, **kwargs 128 | ) 129 | except Exception: 130 | # Clear the lock if apply_async fails 131 | self.unlock(lock) 132 | raise 133 | 134 | def release_lock(self, task_args=None, task_kwargs=None): 135 | lock = self.generate_lock(self.name, task_args, task_kwargs) 136 | self.unlock(lock) 137 | 138 | def unlock(self, lock): 139 | self.singleton_backend.unlock(lock) 140 | 141 | def on_duplicate(self, existing_task_id): 142 | if self._raise_on_duplicate: 143 | raise DuplicateTaskError( 144 | "Attempted to queue a duplicate of task ID {}".format(existing_task_id), 145 | task_id=existing_task_id, 146 | ) 147 | return self.AsyncResult(existing_task_id) 148 | 149 | def on_failure(self, exc, task_id, args, kwargs, einfo): 150 | self.release_lock(task_args=args, task_kwargs=kwargs) 151 | 152 | def on_success(self, retval, task_id, args, kwargs): 153 | self.release_lock(task_args=args, task_kwargs=kwargs) 154 | -------------------------------------------------------------------------------- /celery_singleton/util.py: -------------------------------------------------------------------------------- 1 | import json 2 | from hashlib import md5 3 | 4 | 5 | def generate_lock( 6 | task_name, 7 | task_args=None, 8 | task_kwargs=None, 9 | key_prefix="SINGLETONLOCK_", 10 | json_encoder_class=None, 11 | ): 12 | str_args = json.dumps(task_args or [], sort_keys=True, cls=json_encoder_class) 13 | str_kwargs = json.dumps(task_kwargs or {}, sort_keys=True, cls=json_encoder_class) 14 | task_hash = md5((task_name + str_args + str_kwargs).encode()).hexdigest() 15 | key_prefix = key_prefix 16 | return key_prefix + task_hash 17 | -------------------------------------------------------------------------------- /poetry.lock: -------------------------------------------------------------------------------- 1 | [[package]] 2 | name = "amqp" 3 | version = "5.0.1" 4 | description = "Low-level AMQP client for Python (fork of amqplib)." 5 | category = "main" 6 | optional = false 7 | python-versions = ">=3.6" 8 | 9 | [package.dependencies] 10 | vine = "5.0.0" 11 | 12 | [[package]] 13 | name = "atomicwrites" 14 | version = "1.4.0" 15 | description = "Atomic file writes." 16 | category = "dev" 17 | optional = false 18 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 19 | 20 | [[package]] 21 | name = "attrs" 22 | version = "20.2.0" 23 | description = "Classes Without Boilerplate" 24 | category = "dev" 25 | optional = false 26 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 27 | 28 | [package.extras] 29 | dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface", "sphinx", "sphinx-rtd-theme", "pre-commit"] 30 | docs = ["sphinx", "sphinx-rtd-theme", "zope.interface"] 31 | tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface"] 32 | tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six"] 33 | 34 | [[package]] 35 | name = "billiard" 36 | version = "3.6.3.0" 37 | description = "Python multiprocessing fork with improvements and bugfixes" 38 | category = "main" 39 | optional = false 40 | python-versions = "*" 41 | 42 | [[package]] 43 | name = "celery" 44 | version = "5.0.0" 45 | description = "Distributed Task Queue." 46 | category = "main" 47 | optional = false 48 | python-versions = ">=3.6," 49 | 50 | [package.dependencies] 51 | billiard = ">=3.6.3.0,<4.0" 52 | click = ">=7.0" 53 | click-didyoumean = ">=0.0.3" 54 | click-repl = ">=0.1.6" 55 | kombu = ">=5.0.0,<6.0" 56 | pytz = ">0.0-dev" 57 | vine = ">=5.0.0,<6.0" 58 | 59 | [package.extras] 60 | arangodb = ["pyArango (>=1.3.2)"] 61 | auth = ["cryptography"] 62 | azureblockblob = ["azure-storage (0.36.0)", "azure-common (1.1.5)", "azure-storage-common (1.1.0)"] 63 | brotli = ["brotli (>=1.0.0)", "brotlipy (>=0.7.0)"] 64 | cassandra = ["cassandra-driver (<3.21.0)"] 65 | consul = ["python-consul"] 66 | cosmosdbsql = ["pydocumentdb (2.3.2)"] 67 | couchbase = ["couchbase (>=3.0.0)"] 68 | couchdb = ["pycouchdb"] 69 | django = ["Django (>=1.11)"] 70 | dynamodb = ["boto3 (>=1.9.178)"] 71 | elasticsearch = ["elasticsearch"] 72 | eventlet = ["eventlet (>=0.26.1)"] 73 | gevent = ["gevent (>=1.0.0)"] 74 | librabbitmq = ["librabbitmq (>=1.5.0)"] 75 | lzma = ["backports.lzma"] 76 | memcache = ["pylibmc"] 77 | mongodb = ["pymongo[srv] (>=3.3.0)"] 78 | msgpack = ["msgpack"] 79 | pymemcache = ["python-memcached"] 80 | pyro = ["pyro4"] 81 | redis = ["redis (>=3.2.0)"] 82 | s3 = ["boto3 (>=1.9.125)"] 83 | slmq = ["softlayer-messaging (>=1.0.3)"] 84 | solar = ["ephem"] 85 | sqlalchemy = ["sqlalchemy"] 86 | sqs = ["boto3 (>=1.9.125)", "pycurl (7.43.0.5)"] 87 | tblib = ["tblib (>=1.3.0)", "tblib (>=1.5.0)"] 88 | yaml = ["PyYAML (>=3.10)"] 89 | zookeeper = ["kazoo (>=1.3.1)"] 90 | zstd = ["zstandard"] 91 | 92 | [[package]] 93 | name = "click" 94 | version = "7.1.2" 95 | description = "Composable command line interface toolkit" 96 | category = "main" 97 | optional = false 98 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" 99 | 100 | [[package]] 101 | name = "click-didyoumean" 102 | version = "0.0.3" 103 | description = "Enable git-like did-you-mean feature in click." 104 | category = "main" 105 | optional = false 106 | python-versions = "*" 107 | 108 | [package.dependencies] 109 | click = "*" 110 | 111 | [[package]] 112 | name = "click-repl" 113 | version = "0.1.6" 114 | description = "REPL plugin for Click" 115 | category = "main" 116 | optional = false 117 | python-versions = "*" 118 | 119 | [package.dependencies] 120 | click = "*" 121 | prompt-toolkit = "*" 122 | six = "*" 123 | 124 | [[package]] 125 | name = "colorama" 126 | version = "0.4.4" 127 | description = "Cross-platform colored terminal text." 128 | category = "dev" 129 | optional = false 130 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" 131 | 132 | [[package]] 133 | name = "coverage" 134 | version = "5.3" 135 | description = "Code coverage measurement for Python" 136 | category = "dev" 137 | optional = false 138 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" 139 | 140 | [package.extras] 141 | toml = ["toml"] 142 | 143 | [[package]] 144 | name = "importlib-metadata" 145 | version = "2.0.0" 146 | description = "Read metadata from Python packages" 147 | category = "main" 148 | optional = false 149 | python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" 150 | 151 | [package.dependencies] 152 | zipp = ">=0.5" 153 | 154 | [package.extras] 155 | docs = ["sphinx", "rst.linker"] 156 | testing = ["packaging", "pep517", "importlib-resources (>=1.3)"] 157 | 158 | [[package]] 159 | name = "iniconfig" 160 | version = "1.0.1" 161 | description = "iniconfig: brain-dead simple config-ini parsing" 162 | category = "dev" 163 | optional = false 164 | python-versions = "*" 165 | 166 | [[package]] 167 | name = "kombu" 168 | version = "5.0.2" 169 | description = "Messaging library for Python." 170 | category = "main" 171 | optional = false 172 | python-versions = ">=3.6" 173 | 174 | [package.dependencies] 175 | amqp = ">=5.0.0,<6.0.0" 176 | importlib-metadata = {version = ">=0.18", markers = "python_version < \"3.8\""} 177 | 178 | [package.extras] 179 | azureservicebus = ["azure-servicebus (>=0.21.1)"] 180 | azurestoragequeues = ["azure-storage-queue"] 181 | consul = ["python-consul (>=0.6.0)"] 182 | librabbitmq = ["librabbitmq (>=1.5.2)"] 183 | mongodb = ["pymongo (>=3.3.0)"] 184 | msgpack = ["msgpack"] 185 | pyro = ["pyro4"] 186 | qpid = ["qpid-python (>=0.26)", "qpid-tools (>=0.26)"] 187 | redis = ["redis (>=3.3.11)"] 188 | slmq = ["softlayer-messaging (>=1.0.3)"] 189 | sqlalchemy = ["sqlalchemy"] 190 | sqs = ["boto3 (>=1.4.4)", "pycurl (7.43.0.2)"] 191 | yaml = ["PyYAML (>=3.10)"] 192 | zookeeper = ["kazoo (>=1.3.1)"] 193 | 194 | [[package]] 195 | name = "packaging" 196 | version = "20.4" 197 | description = "Core utilities for Python packages" 198 | category = "dev" 199 | optional = false 200 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 201 | 202 | [package.dependencies] 203 | pyparsing = ">=2.0.2" 204 | six = "*" 205 | 206 | [[package]] 207 | name = "pluggy" 208 | version = "0.13.1" 209 | description = "plugin and hook calling mechanisms for python" 210 | category = "dev" 211 | optional = false 212 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 213 | 214 | [package.dependencies] 215 | importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} 216 | 217 | [package.extras] 218 | dev = ["pre-commit", "tox"] 219 | 220 | [[package]] 221 | name = "prompt-toolkit" 222 | version = "3.0.3" 223 | description = "Library for building powerful interactive command lines in Python" 224 | category = "main" 225 | optional = false 226 | python-versions = ">=3.6" 227 | 228 | [package.dependencies] 229 | wcwidth = "*" 230 | 231 | [[package]] 232 | name = "py" 233 | version = "1.9.0" 234 | description = "library with cross-python path, ini-parsing, io, code, log facilities" 235 | category = "dev" 236 | optional = false 237 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 238 | 239 | [[package]] 240 | name = "pyparsing" 241 | version = "2.4.7" 242 | description = "Python parsing module" 243 | category = "dev" 244 | optional = false 245 | python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" 246 | 247 | [[package]] 248 | name = "pytest" 249 | version = "6.1.1" 250 | description = "pytest: simple powerful testing with Python" 251 | category = "dev" 252 | optional = false 253 | python-versions = ">=3.5" 254 | 255 | [package.dependencies] 256 | atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} 257 | attrs = ">=17.4.0" 258 | colorama = {version = "*", markers = "sys_platform == \"win32\""} 259 | importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} 260 | iniconfig = "*" 261 | packaging = "*" 262 | pluggy = ">=0.12,<1.0" 263 | py = ">=1.8.2" 264 | toml = "*" 265 | 266 | [package.extras] 267 | checkqa_mypy = ["mypy (0.780)"] 268 | testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"] 269 | 270 | [[package]] 271 | name = "pytest-celery" 272 | version = "0.0.0a1" 273 | description = "pytest-celery a shim pytest plugin to enable celery.contrib.pytest" 274 | category = "dev" 275 | optional = false 276 | python-versions = "*" 277 | 278 | [package.dependencies] 279 | celery = ">=4.4.0" 280 | 281 | [[package]] 282 | name = "pytest-cov" 283 | version = "2.10.1" 284 | description = "Pytest plugin for measuring coverage." 285 | category = "dev" 286 | optional = false 287 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" 288 | 289 | [package.dependencies] 290 | coverage = ">=4.4" 291 | pytest = ">=4.6" 292 | 293 | [package.extras] 294 | testing = ["fields", "hunter", "process-tests (2.0.2)", "six", "pytest-xdist", "virtualenv"] 295 | 296 | [[package]] 297 | name = "pytz" 298 | version = "2020.1" 299 | description = "World timezone definitions, modern and historical" 300 | category = "main" 301 | optional = false 302 | python-versions = "*" 303 | 304 | [[package]] 305 | name = "redis" 306 | version = "3.5.3" 307 | description = "Python client for Redis key-value store" 308 | category = "main" 309 | optional = false 310 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" 311 | 312 | [package.extras] 313 | hiredis = ["hiredis (>=0.1.3)"] 314 | 315 | [[package]] 316 | name = "six" 317 | version = "1.15.0" 318 | description = "Python 2 and 3 compatibility utilities" 319 | category = "main" 320 | optional = false 321 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" 322 | 323 | [[package]] 324 | name = "toml" 325 | version = "0.10.1" 326 | description = "Python Library for Tom's Obvious, Minimal Language" 327 | category = "dev" 328 | optional = false 329 | python-versions = "*" 330 | 331 | [[package]] 332 | name = "vine" 333 | version = "5.0.0" 334 | description = "Promises, promises, promises." 335 | category = "main" 336 | optional = false 337 | python-versions = ">=3.6" 338 | 339 | [[package]] 340 | name = "wcwidth" 341 | version = "0.2.5" 342 | description = "Measures the displayed width of unicode strings in a terminal" 343 | category = "main" 344 | optional = false 345 | python-versions = "*" 346 | 347 | [[package]] 348 | name = "zipp" 349 | version = "3.3.0" 350 | description = "Backport of pathlib-compatible object wrapper for zip files" 351 | category = "main" 352 | optional = false 353 | python-versions = ">=3.6" 354 | 355 | [package.extras] 356 | docs = ["sphinx", "jaraco.packaging (>=3.2)", "rst.linker (>=1.9)"] 357 | testing = ["pytest (>=3.5,<3.7.3 || >3.7.3)", "pytest-checkdocs (>=1.2.3)", "pytest-flake8", "pytest-cov", "jaraco.test (>=3.2.0)", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy"] 358 | 359 | [metadata] 360 | lock-version = "1.1" 361 | python-versions = "^3.6" 362 | content-hash = "a7763747bbc4f48cd1c97d4710b2b5cfa37cd55741122434d3d7da5ae6a5e749" 363 | 364 | [metadata.files] 365 | amqp = [ 366 | {file = "amqp-5.0.1-py2.py3-none-any.whl", hash = "sha256:a8fb8151eb9d12204c9f1784c0da920476077609fa0a70f2468001e3a4258484"}, 367 | {file = "amqp-5.0.1.tar.gz", hash = "sha256:9881f8e6fe23e3db9faa6cfd8c05390213e1d1b95c0162bc50552cad75bffa5f"}, 368 | ] 369 | atomicwrites = [ 370 | {file = "atomicwrites-1.4.0-py2.py3-none-any.whl", hash = "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197"}, 371 | {file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"}, 372 | ] 373 | attrs = [ 374 | {file = "attrs-20.2.0-py2.py3-none-any.whl", hash = "sha256:fce7fc47dfc976152e82d53ff92fa0407700c21acd20886a13777a0d20e655dc"}, 375 | {file = "attrs-20.2.0.tar.gz", hash = "sha256:26b54ddbbb9ee1d34d5d3668dd37d6cf74990ab23c828c2888dccdceee395594"}, 376 | ] 377 | billiard = [ 378 | {file = "billiard-3.6.3.0-py3-none-any.whl", hash = "sha256:bff575450859a6e0fbc2f9877d9b715b0bbc07c3565bb7ed2280526a0cdf5ede"}, 379 | {file = "billiard-3.6.3.0.tar.gz", hash = "sha256:d91725ce6425f33a97dfa72fb6bfef0e47d4652acd98a032bd1a7fbf06d5fa6a"}, 380 | ] 381 | celery = [ 382 | {file = "celery-5.0.0-py3-none-any.whl", hash = "sha256:72138dc3887f68dc58e1a2397e477256f80f1894c69fa4337f8ed70be460375b"}, 383 | {file = "celery-5.0.0.tar.gz", hash = "sha256:313930fddde703d8e37029a304bf91429cd11aeef63c57de6daca9d958e1f255"}, 384 | ] 385 | click = [ 386 | {file = "click-7.1.2-py2.py3-none-any.whl", hash = "sha256:dacca89f4bfadd5de3d7489b7c8a566eee0d3676333fbb50030263894c38c0dc"}, 387 | {file = "click-7.1.2.tar.gz", hash = "sha256:d2b5255c7c6349bc1bd1e59e08cd12acbbd63ce649f2588755783aa94dfb6b1a"}, 388 | ] 389 | click-didyoumean = [ 390 | {file = "click-didyoumean-0.0.3.tar.gz", hash = "sha256:112229485c9704ff51362fe34b2d4f0b12fc71cc20f6d2b3afabed4b8bfa6aeb"}, 391 | ] 392 | click-repl = [ 393 | {file = "click-repl-0.1.6.tar.gz", hash = "sha256:b9f29d52abc4d6059f8e276132a111ab8d94980afe6a5432b9d996544afa95d5"}, 394 | {file = "click_repl-0.1.6-py3-none-any.whl", hash = "sha256:9c4c3d022789cae912aad8a3f5e1d7c2cdd016ee1225b5212ad3e8691563cda5"}, 395 | ] 396 | colorama = [ 397 | {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"}, 398 | ] 399 | coverage = [ 400 | {file = "coverage-5.3-cp27-cp27m-macosx_10_13_intel.whl", hash = "sha256:bd3166bb3b111e76a4f8e2980fa1addf2920a4ca9b2b8ca36a3bc3dedc618270"}, 401 | {file = "coverage-5.3-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:9342dd70a1e151684727c9c91ea003b2fb33523bf19385d4554f7897ca0141d4"}, 402 | {file = "coverage-5.3-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:63808c30b41f3bbf65e29f7280bf793c79f54fb807057de7e5238ffc7cc4d7b9"}, 403 | {file = "coverage-5.3-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:4d6a42744139a7fa5b46a264874a781e8694bb32f1d76d8137b68138686f1729"}, 404 | {file = "coverage-5.3-cp27-cp27m-win32.whl", hash = "sha256:86e9f8cd4b0cdd57b4ae71a9c186717daa4c5a99f3238a8723f416256e0b064d"}, 405 | {file = "coverage-5.3-cp27-cp27m-win_amd64.whl", hash = "sha256:7858847f2d84bf6e64c7f66498e851c54de8ea06a6f96a32a1d192d846734418"}, 406 | {file = "coverage-5.3-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:530cc8aaf11cc2ac7430f3614b04645662ef20c348dce4167c22d99bec3480e9"}, 407 | {file = "coverage-5.3-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:381ead10b9b9af5f64646cd27107fb27b614ee7040bb1226f9c07ba96625cbb5"}, 408 | {file = "coverage-5.3-cp35-cp35m-macosx_10_13_x86_64.whl", hash = "sha256:71b69bd716698fa62cd97137d6f2fdf49f534decb23a2c6fc80813e8b7be6822"}, 409 | {file = "coverage-5.3-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:1d44bb3a652fed01f1f2c10d5477956116e9b391320c94d36c6bf13b088a1097"}, 410 | {file = "coverage-5.3-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:1c6703094c81fa55b816f5ae542c6ffc625fec769f22b053adb42ad712d086c9"}, 411 | {file = "coverage-5.3-cp35-cp35m-win32.whl", hash = "sha256:cedb2f9e1f990918ea061f28a0f0077a07702e3819602d3507e2ff98c8d20636"}, 412 | {file = "coverage-5.3-cp35-cp35m-win_amd64.whl", hash = "sha256:7f43286f13d91a34fadf61ae252a51a130223c52bfefb50310d5b2deb062cf0f"}, 413 | {file = "coverage-5.3-cp36-cp36m-macosx_10_13_x86_64.whl", hash = "sha256:c851b35fc078389bc16b915a0a7c1d5923e12e2c5aeec58c52f4aa8085ac8237"}, 414 | {file = "coverage-5.3-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:aac1ba0a253e17889550ddb1b60a2063f7474155465577caa2a3b131224cfd54"}, 415 | {file = "coverage-5.3-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:2b31f46bf7b31e6aa690d4c7a3d51bb262438c6dcb0d528adde446531d0d3bb7"}, 416 | {file = "coverage-5.3-cp36-cp36m-win32.whl", hash = "sha256:c5f17ad25d2c1286436761b462e22b5020d83316f8e8fcb5deb2b3151f8f1d3a"}, 417 | {file = "coverage-5.3-cp36-cp36m-win_amd64.whl", hash = "sha256:aef72eae10b5e3116bac6957de1df4d75909fc76d1499a53fb6387434b6bcd8d"}, 418 | {file = "coverage-5.3-cp37-cp37m-macosx_10_13_x86_64.whl", hash = "sha256:e8caf961e1b1a945db76f1b5fa9c91498d15f545ac0ababbe575cfab185d3bd8"}, 419 | {file = "coverage-5.3-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:29a6272fec10623fcbe158fdf9abc7a5fa032048ac1d8631f14b50fbfc10d17f"}, 420 | {file = "coverage-5.3-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:2d43af2be93ffbad25dd959899b5b809618a496926146ce98ee0b23683f8c51c"}, 421 | {file = "coverage-5.3-cp37-cp37m-win32.whl", hash = "sha256:c3888a051226e676e383de03bf49eb633cd39fc829516e5334e69b8d81aae751"}, 422 | {file = "coverage-5.3-cp37-cp37m-win_amd64.whl", hash = "sha256:9669179786254a2e7e57f0ecf224e978471491d660aaca833f845b72a2df3709"}, 423 | {file = "coverage-5.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0203acd33d2298e19b57451ebb0bed0ab0c602e5cf5a818591b4918b1f97d516"}, 424 | {file = "coverage-5.3-cp38-cp38-manylinux1_i686.whl", hash = "sha256:582ddfbe712025448206a5bc45855d16c2e491c2dd102ee9a2841418ac1c629f"}, 425 | {file = "coverage-5.3-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:0f313707cdecd5cd3e217fc68c78a960b616604b559e9ea60cc16795c4304259"}, 426 | {file = "coverage-5.3-cp38-cp38-win32.whl", hash = "sha256:78e93cc3571fd928a39c0b26767c986188a4118edc67bc0695bc7a284da22e82"}, 427 | {file = "coverage-5.3-cp38-cp38-win_amd64.whl", hash = "sha256:8f264ba2701b8c9f815b272ad568d555ef98dfe1576802ab3149c3629a9f2221"}, 428 | {file = "coverage-5.3-cp39-cp39-macosx_10_13_x86_64.whl", hash = "sha256:50691e744714856f03a86df3e2bff847c2acede4c191f9a1da38f088df342978"}, 429 | {file = "coverage-5.3-cp39-cp39-manylinux1_i686.whl", hash = "sha256:9361de40701666b034c59ad9e317bae95c973b9ff92513dd0eced11c6adf2e21"}, 430 | {file = "coverage-5.3-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:c1b78fb9700fc961f53386ad2fd86d87091e06ede5d118b8a50dea285a071c24"}, 431 | {file = "coverage-5.3-cp39-cp39-win32.whl", hash = "sha256:cb7df71de0af56000115eafd000b867d1261f786b5eebd88a0ca6360cccfaca7"}, 432 | {file = "coverage-5.3-cp39-cp39-win_amd64.whl", hash = "sha256:47a11bdbd8ada9b7ee628596f9d97fbd3851bd9999d398e9436bd67376dbece7"}, 433 | {file = "coverage-5.3.tar.gz", hash = "sha256:280baa8ec489c4f542f8940f9c4c2181f0306a8ee1a54eceba071a449fb870a0"}, 434 | ] 435 | importlib-metadata = [ 436 | {file = "importlib_metadata-2.0.0-py2.py3-none-any.whl", hash = "sha256:cefa1a2f919b866c5beb7c9f7b0ebb4061f30a8a9bf16d609b000e2dfaceb9c3"}, 437 | {file = "importlib_metadata-2.0.0.tar.gz", hash = "sha256:77a540690e24b0305878c37ffd421785a6f7e53c8b5720d211b211de8d0e95da"}, 438 | ] 439 | iniconfig = [ 440 | {file = "iniconfig-1.0.1-py3-none-any.whl", hash = "sha256:80cf40c597eb564e86346103f609d74efce0f6b4d4f30ec8ce9e2c26411ba437"}, 441 | {file = "iniconfig-1.0.1.tar.gz", hash = "sha256:e5f92f89355a67de0595932a6c6c02ab4afddc6fcdc0bfc5becd0d60884d3f69"}, 442 | ] 443 | kombu = [ 444 | {file = "kombu-5.0.2-py2.py3-none-any.whl", hash = "sha256:6dc509178ac4269b0e66ab4881f70a2035c33d3a622e20585f965986a5182006"}, 445 | {file = "kombu-5.0.2.tar.gz", hash = "sha256:f4965fba0a4718d47d470beeb5d6446e3357a62402b16c510b6a2f251e05ac3c"}, 446 | ] 447 | packaging = [ 448 | {file = "packaging-20.4-py2.py3-none-any.whl", hash = "sha256:998416ba6962ae7fbd6596850b80e17859a5753ba17c32284f67bfff33784181"}, 449 | {file = "packaging-20.4.tar.gz", hash = "sha256:4357f74f47b9c12db93624a82154e9b120fa8293699949152b22065d556079f8"}, 450 | ] 451 | pluggy = [ 452 | {file = "pluggy-0.13.1-py2.py3-none-any.whl", hash = "sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d"}, 453 | {file = "pluggy-0.13.1.tar.gz", hash = "sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0"}, 454 | ] 455 | prompt-toolkit = [ 456 | {file = "prompt_toolkit-3.0.3-py3-none-any.whl", hash = "sha256:c93e53af97f630f12f5f62a3274e79527936ed466f038953dfa379d4941f651a"}, 457 | {file = "prompt_toolkit-3.0.3.tar.gz", hash = "sha256:a402e9bf468b63314e37460b68ba68243d55b2f8c4d0192f85a019af3945050e"}, 458 | ] 459 | py = [ 460 | {file = "py-1.9.0-py2.py3-none-any.whl", hash = "sha256:366389d1db726cd2fcfc79732e75410e5fe4d31db13692115529d34069a043c2"}, 461 | {file = "py-1.9.0.tar.gz", hash = "sha256:9ca6883ce56b4e8da7e79ac18787889fa5206c79dcc67fb065376cd2fe03f342"}, 462 | ] 463 | pyparsing = [ 464 | {file = "pyparsing-2.4.7-py2.py3-none-any.whl", hash = "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b"}, 465 | {file = "pyparsing-2.4.7.tar.gz", hash = "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1"}, 466 | ] 467 | pytest = [ 468 | {file = "pytest-6.1.1-py3-none-any.whl", hash = "sha256:7a8190790c17d79a11f847fba0b004ee9a8122582ebff4729a082c109e81a4c9"}, 469 | {file = "pytest-6.1.1.tar.gz", hash = "sha256:8f593023c1a0f916110285b6efd7f99db07d59546e3d8c36fc60e2ab05d3be92"}, 470 | ] 471 | pytest-celery = [ 472 | {file = "pytest-celery-0.0.0a1.tar.gz", hash = "sha256:3e0e0817c2d3f2870dafebd915bf13100fc12920b5d42dfe5fdc35844fe42e62"}, 473 | {file = "pytest_celery-0.0.0a1-py2.py3-none-any.whl", hash = "sha256:2fa8d0ae0d573fb2ee51902bfa220e891044eafadbfb132b28b7087295c3004f"}, 474 | ] 475 | pytest-cov = [ 476 | {file = "pytest-cov-2.10.1.tar.gz", hash = "sha256:47bd0ce14056fdd79f93e1713f88fad7bdcc583dcd7783da86ef2f085a0bb88e"}, 477 | {file = "pytest_cov-2.10.1-py2.py3-none-any.whl", hash = "sha256:45ec2d5182f89a81fc3eb29e3d1ed3113b9e9a873bcddb2a71faaab066110191"}, 478 | ] 479 | pytz = [ 480 | {file = "pytz-2020.1-py2.py3-none-any.whl", hash = "sha256:a494d53b6d39c3c6e44c3bec237336e14305e4f29bbf800b599253057fbb79ed"}, 481 | {file = "pytz-2020.1.tar.gz", hash = "sha256:c35965d010ce31b23eeb663ed3cc8c906275d6be1a34393a1d73a41febf4a048"}, 482 | ] 483 | redis = [ 484 | {file = "redis-3.5.3-py2.py3-none-any.whl", hash = "sha256:432b788c4530cfe16d8d943a09d40ca6c16149727e4afe8c2c9d5580c59d9f24"}, 485 | {file = "redis-3.5.3.tar.gz", hash = "sha256:0e7e0cfca8660dea8b7d5cd8c4f6c5e29e11f31158c0b0ae91a397f00e5a05a2"}, 486 | ] 487 | six = [ 488 | {file = "six-1.15.0-py2.py3-none-any.whl", hash = "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced"}, 489 | {file = "six-1.15.0.tar.gz", hash = "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259"}, 490 | ] 491 | toml = [ 492 | {file = "toml-0.10.1-py2.py3-none-any.whl", hash = "sha256:bda89d5935c2eac546d648028b9901107a595863cb36bae0c73ac804a9b4ce88"}, 493 | {file = "toml-0.10.1.tar.gz", hash = "sha256:926b612be1e5ce0634a2ca03470f95169cf16f939018233a670519cb4ac58b0f"}, 494 | ] 495 | vine = [ 496 | {file = "vine-5.0.0-py2.py3-none-any.whl", hash = "sha256:4c9dceab6f76ed92105027c49c823800dd33cacce13bdedc5b914e3514b7fb30"}, 497 | {file = "vine-5.0.0.tar.gz", hash = "sha256:7d3b1624a953da82ef63462013bbd271d3eb75751489f9807598e8f340bd637e"}, 498 | ] 499 | wcwidth = [ 500 | {file = "wcwidth-0.2.5-py2.py3-none-any.whl", hash = "sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784"}, 501 | {file = "wcwidth-0.2.5.tar.gz", hash = "sha256:c4d647b99872929fdb7bdcaa4fbe7f01413ed3d98077df798530e5b04f116c83"}, 502 | ] 503 | zipp = [ 504 | {file = "zipp-3.3.0-py3-none-any.whl", hash = "sha256:eed8ec0b8d1416b2ca33516a37a08892442f3954dee131e92cfd92d8fe3e7066"}, 505 | {file = "zipp-3.3.0.tar.gz", hash = "sha256:64ad89efee774d1897a58607895d80789c59778ea02185dd846ac38394a8642b"}, 506 | ] 507 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | name = "celery-singleton" 3 | version = "0.3.1" 4 | description = "Prevent duplicate celery tasks" 5 | authors = ["Steinthor Palsson "] 6 | homepage = 'https://github.com/steinitzu/celery-singleton' 7 | repository = 'https://github.com/steinitzu/celery-singleton' 8 | license = "MIT" 9 | 10 | [tool.poetry.dependencies] 11 | python = "^3.6" 12 | celery = ">=4" 13 | redis = "*" 14 | 15 | [tool.poetry.dev-dependencies] 16 | pytest = "*" 17 | pytest-cov = "*" 18 | pytest-celery = "*" 19 | 20 | [build-system] 21 | requires = ["poetry>=0.12"] 22 | build-backend = "poetry.masonry.api" 23 | -------------------------------------------------------------------------------- /sample/sample.py: -------------------------------------------------------------------------------- 1 | import time 2 | 3 | from celery import Celery 4 | from celery.signals import celeryd_init 5 | from celery_singleton import Singleton 6 | from celery_singleton.singleton import clear_locks 7 | 8 | 9 | celery_app = Celery( 10 | __name__, 11 | broker='redis://localhost:6379', 12 | backend='redis://localhost:6379' 13 | ) 14 | 15 | @celeryd_init.connect() 16 | def clear_all_locks(**kwargs): 17 | clear_locks(celery_app) 18 | 19 | 20 | @celery_app.task(bind=True, name='lazy_return', base=Singleton) 21 | def lazy_return(self, *args, **kwargs): 22 | print('running task') 23 | time.sleep(5) 24 | print('returning') 25 | return args, kwargs 26 | 27 | 28 | if __name__ == '__main__': 29 | task1 = lazy_return.delay(1, 2, 3, key='abc') 30 | task2 = lazy_return.delay(1, 2, 3, key='abc') 31 | task3 = lazy_return.delay(3, 4, 5, key='abc') 32 | 33 | print(task1) 34 | print(task2) 35 | print(task3) 36 | assert task1 == task2 37 | assert task1 != task3 38 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [tool:pytest] 2 | addopts=--cov celery_singleton --no-cov-on-fail -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | import os 3 | 4 | 5 | @pytest.fixture(scope="session") 6 | def redis_url(): 7 | return os.environ.get( 8 | "CELERY_SINGLETON_TEST_REDIS_URL", "redis://localhost" 9 | ) 10 | -------------------------------------------------------------------------------- /tests/test_backends.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from contextlib import contextmanager 3 | 4 | from uuid import uuid4 5 | from hashlib import md5 6 | from celery_singleton.backends.redis import RedisBackend 7 | from celery_singleton.backends import get_backend 8 | from celery_singleton import backends 9 | 10 | 11 | def random_hash(): 12 | return "SINGLETON_TEST_KEY_PREFIX_" + md5(uuid4().bytes).hexdigest() 13 | 14 | 15 | def random_task_id(): 16 | return str(uuid4()) 17 | 18 | 19 | def clear_locks(backend): 20 | backend.clear("SINGLETON_TEST_KEY_PREFIX_") 21 | 22 | 23 | @pytest.fixture 24 | @contextmanager 25 | def backend(redis_url): 26 | backend = RedisBackend(redis_url) 27 | try: 28 | yield backend 29 | finally: 30 | backend.redis.flushall() 31 | backends._backend = None 32 | 33 | 34 | class TestLock: 35 | def test__new_lock__is_set(self, backend): 36 | with backend as b: 37 | lock = random_hash() 38 | task_id = random_task_id() 39 | 40 | b.lock(lock, task_id) 41 | 42 | assert b.redis.get(lock) == task_id 43 | 44 | def test__new_lock__returns_true(self, backend): 45 | with backend as b: 46 | lock = random_hash() 47 | task_id = random_task_id() 48 | 49 | assert b.lock(lock, task_id) is True 50 | 51 | def test__lock_exists__is_not_set(self, backend): 52 | with backend as b: 53 | lock = random_hash() 54 | task_id = random_task_id() 55 | 56 | b.lock(lock, task_id) 57 | task_id2 = random_task_id() 58 | 59 | b.lock(lock, task_id2) 60 | 61 | assert b.redis.get(lock) == task_id and b.redis.get(lock) != task_id2 62 | 63 | def test__lock_exists__returns_false(self, backend): 64 | with backend as b: 65 | lock = random_hash() 66 | task_id = random_task_id() 67 | 68 | b.lock(lock, task_id) 69 | task_id2 = random_task_id() 70 | 71 | assert b.lock(lock, task_id2) is False 72 | 73 | 74 | class TestUnlock: 75 | def test__unlock__deletes_key(self, backend): 76 | with backend as b: 77 | lock = random_hash() 78 | task_id = random_task_id() 79 | 80 | b.lock(lock, task_id) 81 | b.unlock(lock) 82 | 83 | assert b.redis.get(lock) is None 84 | 85 | 86 | class TestClear: 87 | def test__clear_locks__all_gone(self, backend): 88 | with backend as b: 89 | locks = [random_hash() for i in range(10)] 90 | values = [random_task_id() for i in range(10)] 91 | 92 | for lock, value in zip(locks, values): 93 | b.lock(lock, value) 94 | 95 | b.clear("SINGLETON_TEST_KEY_PREFIX_") 96 | 97 | for lock in locks: 98 | assert b.get(lock) is None 99 | 100 | 101 | class FakeBackend: 102 | def __init__(self, *args, **kwargs): 103 | self.args = args 104 | self.kwargs = kwargs 105 | 106 | 107 | @pytest.fixture(scope="function") 108 | def fake_config(): 109 | class FakeConfig: 110 | backend_url = "redis://localhost" 111 | backend_kwargs = {} 112 | backend_class = FakeBackend 113 | 114 | try: 115 | yield FakeConfig() 116 | finally: 117 | backends._backend = None 118 | 119 | 120 | class TestGetBackend: 121 | def test__correct_class(self, fake_config): 122 | backend = get_backend(fake_config) 123 | assert isinstance(backend, fake_config.backend_class) 124 | 125 | def test__receives_kwargs(self, fake_config): 126 | kwargs = dict(a=1, b=2, c=3) 127 | fake_config.backend_kwargs = kwargs 128 | 129 | backend = get_backend(fake_config) 130 | 131 | assert backend.kwargs == kwargs 132 | 133 | def test__receives_url_as_first_arg(self, fake_config): 134 | fake_config.backend_url = "test backend url" 135 | 136 | backend = get_backend(fake_config) 137 | 138 | assert backend.args[0] == fake_config.backend_url 139 | 140 | def test__get_backend_twice_returns_same_instance(self, fake_config): 141 | fake_config.backend_url = "test backend url" 142 | 143 | backend = get_backend(fake_config) 144 | backend2 = get_backend(fake_config) 145 | 146 | assert backend is backend2 147 | -------------------------------------------------------------------------------- /tests/test_config.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | import sys 3 | 4 | from celery_singleton.config import Config 5 | from celery_singleton.backends.redis import RedisBackend 6 | 7 | 8 | class TestBackendUrl: 9 | @pytest.mark.celery(result_backend="redis://test_backend_url") 10 | def test__defaults_to_celery_backend_url(self, celery_app): 11 | config = Config(celery_app) 12 | assert config.backend_url == "redis://test_backend_url" 13 | 14 | @pytest.mark.celery(broker_url="redis://test_broker_url") 15 | def test__defaults_to_celery_broker_url(self, celery_app): 16 | config = Config(celery_app) 17 | assert config.backend_url == "redis://test_broker_url" 18 | 19 | @pytest.mark.celery(singleton_backend_url="redis://override_url") 20 | def test__override_url(self, celery_app): 21 | config = Config(celery_app) 22 | assert config.backend_url == "redis://override_url" 23 | 24 | 25 | class FakeBackendModule: 26 | class FakeBackend: 27 | pass 28 | 29 | 30 | class TestBackend: 31 | def test__default_backend_class__redis_backend(self, celery_app): 32 | config = Config(celery_app) 33 | assert config.backend_class == RedisBackend 34 | 35 | @pytest.mark.celery(singleton_backend_class="singleton_backends.fake.FakeBackend") 36 | def test__override_from_string__returns_class(self, celery_app, monkeypatch): 37 | with monkeypatch.context() as monkey: 38 | monkey.setitem(sys.modules, "singleton_backends.fake", FakeBackendModule) 39 | config = Config(celery_app) 40 | assert config.backend_class == FakeBackendModule.FakeBackend 41 | 42 | @pytest.mark.celery(singleton_backend_class=FakeBackendModule.FakeBackend) 43 | def test__override_from_class__returns_class(self, celery_app): 44 | config = Config(celery_app) 45 | assert config.backend_class == FakeBackendModule.FakeBackend 46 | 47 | 48 | class TestKeyPrefix: 49 | def test__default_key_prefix(self, celery_app): 50 | config = Config(celery_app) 51 | assert config.key_prefix == "SINGLETONLOCK_" 52 | 53 | @pytest.mark.celery(singleton_key_prefix="CUSTOM_KEY_PREFIX") 54 | def test__override_key_prefix(self, celery_app): 55 | config = Config(celery_app) 56 | assert config.key_prefix == "CUSTOM_KEY_PREFIX" 57 | 58 | 59 | class TestRaiseOnDuplicate: 60 | @pytest.mark.celery(singleton_raise_on_duplicate=True) 61 | def test__true_is_true(self, celery_app): 62 | config = Config(celery_app) 63 | assert config.raise_on_duplicate is True 64 | 65 | @pytest.mark.celery(singleton_raise_on_duplicate=False) 66 | def test__false_is_false(self, celery_app): 67 | config = Config(celery_app) 68 | assert config.raise_on_duplicate is False 69 | 70 | def test__default_is_none(self, celery_app): 71 | config = Config(celery_app) 72 | assert config.raise_on_duplicate is None 73 | 74 | 75 | class TestTaskExpiry: 76 | @pytest.mark.celery(singleton_lock_expiry=60) 77 | def test__has_config_value(self, celery_app): 78 | config = Config(celery_app) 79 | assert config.lock_expiry == 60 80 | 81 | def test__default_is_none(self, celery_app): 82 | config = Config(celery_app) 83 | assert config.lock_expiry is None 84 | -------------------------------------------------------------------------------- /tests/test_singleton.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from unittest import mock 3 | import time 4 | from contextlib import contextmanager 5 | 6 | import json 7 | import random 8 | import uuid 9 | from celery import Celery 10 | from celery import Task as BaseTask 11 | from celery_singleton.singleton import Singleton, clear_locks 12 | from celery_singleton import util, DuplicateTaskError 13 | from celery_singleton.backends.redis import RedisBackend 14 | from celery_singleton.backends import get_backend 15 | from celery_singleton.config import Config 16 | 17 | 18 | @pytest.fixture(scope="session") 19 | def celery_config(redis_url): 20 | return { 21 | "broker_url": redis_url, 22 | "result_backend": redis_url, 23 | "singleton_key_prefix": "lock_prefix:", 24 | } 25 | 26 | 27 | @pytest.fixture(scope="session") 28 | def celery_enable_logging(): 29 | return True 30 | 31 | 32 | @pytest.fixture(scope='function') 33 | @contextmanager 34 | def scoped_app(celery_app): 35 | backend = get_backend(Config(celery_app)) 36 | try: 37 | yield celery_app 38 | finally: 39 | backend.redis.flushall() 40 | 41 | 42 | class ExpectedTaskFail(Exception): 43 | pass 44 | 45 | 46 | class TestSimpleTask: 47 | def test__queue_duplicates__same_id(self, scoped_app): 48 | with scoped_app as app: 49 | 50 | @app.task(base=Singleton) 51 | def simple_task(*args): 52 | return args 53 | 54 | tasks = [simple_task.apply_async(args=[1, 2, 3]) for i in range(10)] 55 | assert set(tasks) == set([tasks[0]]) 56 | 57 | def test__queue_multiple_uniques__different_ids(self, scoped_app): 58 | with scoped_app as app: 59 | 60 | @app.task(base=Singleton) 61 | def simple_task(*args): 62 | return args 63 | 64 | tasks = [simple_task.apply_async(args=[i, i + 1, i + 2]) for i in range(5)] 65 | assert len(set(tasks)) == len(tasks) 66 | 67 | def test__queue_duplicate_after_success__different_ids( 68 | self, scoped_app, celery_worker 69 | ): 70 | with scoped_app: 71 | 72 | @celery_worker.app.task(base=Singleton) 73 | def simple_task(*args): 74 | return args 75 | 76 | celery_worker.reload() 77 | 78 | task1 = simple_task.apply_async(args=[1, 2, 3]) 79 | task1.get() 80 | time.sleep(0.05) # small delay for on_success 81 | task2 = simple_task.apply_async(args=[1, 2, 3]) 82 | task2.get() 83 | 84 | assert task1 != task2 85 | 86 | def test__queue_duplicate_after_error__different_ids( 87 | self, scoped_app, celery_worker 88 | ): 89 | with scoped_app: 90 | 91 | @celery_worker.app.task(base=Singleton) 92 | def fails(*args): 93 | raise ExpectedTaskFail() 94 | 95 | celery_worker.reload() 96 | 97 | task1 = fails.apply_async(args=[1, 2, 3]) 98 | try: 99 | task1.get() 100 | except Exception as e: 101 | assert type(e).__name__ == ExpectedTaskFail.__name__ 102 | time.sleep(0.05) # small delay for on_success 103 | task2 = fails.apply_async(args=[1, 2, 3]) 104 | 105 | assert task1 != task2 106 | 107 | def test__get_existing_task_id(self, scoped_app): 108 | with scoped_app as app: 109 | 110 | @app.task(base=Singleton) 111 | def simple_task(*args): 112 | return args 113 | 114 | lock = simple_task.generate_lock("simple_task", task_args=[1, 2, 3]) 115 | simple_task.aquire_lock(lock, "test_task_id") 116 | 117 | task_id = simple_task.get_existing_task_id(lock) 118 | 119 | assert task_id == "test_task_id" 120 | 121 | @mock.patch.object( 122 | BaseTask, "apply_async", side_effect=Exception("Apply async error") 123 | ) 124 | def test__apply_async_fails__lock_cleared(self, mock_base, scoped_app): 125 | with scoped_app as app: 126 | 127 | @app.task(base=Singleton) 128 | def simple_task(*args): 129 | return args 130 | 131 | task_args = [1, 2, 3] 132 | lock = simple_task.generate_lock("simple_task", task_args=task_args) 133 | try: 134 | simple_task.apply_async(args=task_args) 135 | except Exception: 136 | pass 137 | assert simple_task.get_existing_task_id(lock) is None 138 | 139 | @mock.patch.object( 140 | BaseTask, "apply_async", side_effect=ExpectedTaskFail("Apply async error") 141 | ) 142 | def test__apply_async_fails__exception_reraised(self, mock_base, scoped_app): 143 | with scoped_app as app: 144 | 145 | @app.task(base=Singleton) 146 | def simple_task(*args): 147 | return args 148 | 149 | with pytest.raises(ExpectedTaskFail): 150 | simple_task.apply_async(args=[1, 2, 3]) 151 | 152 | def test__raise_on_duplicate__raises_duplicate_error(self, scoped_app): 153 | with scoped_app as app: 154 | 155 | @app.task(base=Singleton, raise_on_duplicate=True) 156 | def raise_on_duplicate_task(*args): 157 | return args 158 | 159 | t1 = raise_on_duplicate_task.delay(1, 2, 3) 160 | with pytest.raises(DuplicateTaskError) as exinfo: 161 | raise_on_duplicate_task.delay(1, 2, 3) 162 | assert exinfo.value.task_id == t1.task_id 163 | 164 | 165 | class TestClearLocks: 166 | def test__clear_locks(self, scoped_app): 167 | with scoped_app as app: 168 | 169 | @app.task(base=Singleton) 170 | def simple_task(*args): 171 | return args 172 | 173 | [simple_task.apply_async(args=[i]) for i in range(5)] 174 | clear_locks(app) 175 | 176 | backend = simple_task.singleton_backend 177 | config = simple_task.singleton_config 178 | 179 | assert not backend.redis.keys(config.key_prefix + "*") 180 | 181 | 182 | class TestUniqueOn: 183 | @mock.patch.object( 184 | util, "generate_lock", autospec=True, side_effect=util.generate_lock 185 | ) 186 | def test__unique_on_pos_arg__lock_on_unique_args_only( 187 | self, mock_gen, scoped_app, celery_worker 188 | ): 189 | with scoped_app: 190 | 191 | @celery_worker.app.task(base=Singleton, unique_on=["a", "c"]) 192 | def unique_on_args_task(a, b, c, d=4): 193 | return a * b * c * d 194 | 195 | celery_worker.reload() # So task is registered 196 | 197 | result = unique_on_args_task.delay(2, 3, 4, 5) 198 | result.get() 199 | time.sleep(0.05) # Small delay for on_success 200 | 201 | expected_args = [ 202 | [ 203 | (unique_on_args_task.name, [], {"a": 2, "c": 4}), 204 | {"key_prefix": unique_on_args_task.singleton_config.key_prefix, 205 | "json_encoder_class": unique_on_args_task.singleton_config.json_encoder_class}, 206 | ] 207 | ] * 2 208 | assert mock_gen.call_count == 2 209 | assert [list(a) for a in mock_gen.call_args_list] == expected_args 210 | 211 | @mock.patch.object( 212 | util, "generate_lock", autospec=True, side_effect=util.generate_lock 213 | ) 214 | def test__unique_on_kwargs__lock_on_unique_args_only( 215 | self, mock_gen, scoped_app, celery_worker 216 | ): 217 | with scoped_app: 218 | 219 | @celery_worker.app.task(base=Singleton, unique_on=["b", "d"]) 220 | def unique_on_kwargs_task(a, b=2, c=3, d=4): 221 | return a * b * c * d 222 | 223 | celery_worker.reload() # So task is registered 224 | 225 | result = unique_on_kwargs_task.delay(2, b=3, c=4, d=5) 226 | 227 | result.get() 228 | time.sleep(0.05) # Small delay for on_success 229 | 230 | expected_args = [ 231 | [ 232 | (unique_on_kwargs_task.name, [], {"b": 3, "d": 5}), 233 | {"key_prefix": unique_on_kwargs_task.singleton_config.key_prefix, 234 | "json_encoder_class": unique_on_kwargs_task.singleton_config.json_encoder_class}, 235 | ] 236 | ] * 2 237 | assert mock_gen.call_count == 2 238 | assert [list(a) for a in mock_gen.call_args_list] == expected_args 239 | 240 | @mock.patch.object( 241 | util, "generate_lock", autospec=True, side_effect=util.generate_lock 242 | ) 243 | def test__unique_on_empty__lock_on_task_name_only( 244 | self, mock_gen, scoped_app, celery_worker 245 | ): 246 | with scoped_app as app: 247 | 248 | @celery_worker.app.task(base=Singleton, unique_on=[]) 249 | def unique_on_empty_task(a, b=2, c=3, d=4): 250 | return a * b * c * d 251 | 252 | celery_worker.reload() # So task is registered 253 | 254 | result = unique_on_empty_task.delay(2, b=3, c=4, d=5) 255 | 256 | result.get() 257 | time.sleep(0.05) # Small delay for on_success 258 | 259 | expected_args = [ 260 | [ 261 | (unique_on_empty_task.name, [], {}), 262 | {"key_prefix": unique_on_empty_task.singleton_config.key_prefix, 263 | "json_encoder_class": unique_on_empty_task.singleton_config.json_encoder_class}, 264 | ] 265 | ] * 2 266 | assert mock_gen.call_count == 2 267 | assert [list(a) for a in mock_gen.call_args_list] == expected_args 268 | 269 | @mock.patch.object( 270 | util, "generate_lock", autospec=True, side_effect=util.generate_lock 271 | ) 272 | def test__unique_on_is_string_convertes_to_list( 273 | self, mock_gen, scoped_app, celery_worker 274 | ): 275 | with scoped_app as app: 276 | 277 | @celery_worker.app.task(base=Singleton, unique_on="c") 278 | def unique_on_string_task(a, b=2, c=3, d=4): 279 | return a * b * c * d 280 | 281 | celery_worker.reload() # So task is registered 282 | 283 | result = unique_on_string_task.delay(2, b=3, c=4, d=5) 284 | 285 | result.get() 286 | time.sleep(0.05) # Small delay for on_success 287 | 288 | expected_args = [ 289 | [ 290 | (unique_on_string_task.name, [], {"c": 4}), 291 | {"key_prefix": unique_on_string_task.singleton_config.key_prefix, 292 | "json_encoder_class": unique_on_string_task.singleton_config.json_encoder_class}, 293 | ] 294 | ] * 2 295 | assert mock_gen.call_count == 2 296 | assert [list(a) for a in mock_gen.call_args_list] == expected_args 297 | 298 | @mock.patch.object( 299 | util, "generate_lock", autospec=True, side_effect=util.generate_lock 300 | ) 301 | def test__unique_on_handles_unspecified_default_args( 302 | self, mock_gen, scoped_app, celery_worker 303 | ): 304 | with scoped_app as app: 305 | 306 | @celery_worker.app.task(base=Singleton, unique_on="d") 307 | def unique_on_default_task(a, b=2, c=3, d=4): 308 | return a * b * c * d 309 | 310 | celery_worker.reload() # So task is registered 311 | 312 | result = unique_on_default_task.delay(2, b=3, c=4) 313 | 314 | result.get() 315 | time.sleep(0.05) # Small delay for on_success 316 | 317 | expected_args = [ 318 | [ 319 | (unique_on_default_task.name, [], {"d": 4}), 320 | {"key_prefix": unique_on_default_task.singleton_config.key_prefix, 321 | "json_encoder_class": unique_on_default_task.singleton_config.json_encoder_class}, 322 | ] 323 | ] * 2 324 | assert mock_gen.call_count == 2 325 | assert [list(a) for a in mock_gen.call_args_list] == expected_args 326 | 327 | 328 | class TestRaiseOnDuplicateConfig: 329 | def test__default_false(self, scoped_app): 330 | with scoped_app as app: 331 | 332 | @app.task(base=Singleton) 333 | def mytask(): 334 | pass 335 | 336 | assert mytask._raise_on_duplicate is False 337 | 338 | def test__task_cfg_overrides_app_cfg(self, celery_config): 339 | config = dict(celery_config, singleton_raise_on_duplicate=False) 340 | 341 | app = Celery() 342 | app.config_from_object(config) 343 | 344 | @app.task(base=Singleton, raise_on_duplicate=True) 345 | def mytask(): 346 | pass 347 | 348 | assert mytask._raise_on_duplicate is True 349 | assert mytask.singleton_config.raise_on_duplicate is False 350 | 351 | def test__app_cfg_used_when_task_cfg_unset(self, celery_config): 352 | config = dict(celery_config, singleton_raise_on_duplicate=True) 353 | 354 | app = Celery() 355 | app.config_from_object(config) 356 | 357 | @app.task(base=Singleton) 358 | def mytask(): 359 | pass 360 | 361 | assert mytask._raise_on_duplicate is True 362 | 363 | 364 | class TestLockExpiry: 365 | @mock.patch.object(RedisBackend, "lock", return_value=True, autospec=True) 366 | def test__lock_expiry__sent_to_backend(self, mock_lock, scoped_app): 367 | with scoped_app as app: 368 | 369 | @app.task(base=Singleton, lock_expiry=60) 370 | def simple_task(*args): 371 | return args 372 | 373 | result = simple_task.delay(1, 2, 3) 374 | 375 | lock = simple_task.generate_lock(simple_task.name, task_args=[1, 2, 3]) 376 | 377 | mock_lock.assert_called_once_with( 378 | simple_task.singleton_backend, lock, result.task_id, expiry=60 379 | ) 380 | 381 | 382 | class MyJSONEncoder(json.JSONEncoder): 383 | def default(self, obj): 384 | if isinstance(obj, uuid.UUID): 385 | return str(obj) 386 | return obj 387 | 388 | 389 | @pytest.mark.parametrize( 390 | "singleton_json_encoder_class", 391 | [(MyJSONEncoder), ("tests.test_singleton.MyJSONEncoder")], 392 | ids=["class", "path"], 393 | ) 394 | class TestCustomJSONEncoder: 395 | @pytest.fixture(scope="function") 396 | def celery_config(self, celery_config, singleton_json_encoder_class): 397 | celery_config = dict( 398 | celery_config, singleton_json_encoder_class=singleton_json_encoder_class 399 | ) 400 | yield celery_config 401 | 402 | def test__queue_fails_with_non_string_value(self, scoped_app): 403 | with scoped_app as app: 404 | app.conf["singleton_json_encoder_class"] = None 405 | 406 | @app.task(base=Singleton) 407 | def simple_task(*args): 408 | return args 409 | 410 | args = [uuid.uuid4(), uuid.uuid4(), uuid.uuid4()] 411 | 412 | with pytest.raises( 413 | TypeError, match=r"Object of type '?UUID'? is not JSON serializable" 414 | ): 415 | [simple_task.apply_async(args=args) for i in range(10)] 416 | 417 | def test__json_queue_duplicates__same_id(self, scoped_app, celery_config): 418 | with scoped_app as app: 419 | 420 | @app.task(base=Singleton) 421 | def simple_task(*args): 422 | return args 423 | 424 | args = [uuid.uuid4(), uuid.uuid4(), uuid.uuid4()] 425 | tasks = [simple_task.apply_async(args=args) for i in range(10)] 426 | assert set(tasks) == set([tasks[0]]) 427 | 428 | def test__queue_duplicates__same_id__via_marshall(self, scoped_app, celery_config): 429 | with scoped_app as app: 430 | 431 | @app.task(base=Singleton) 432 | def simple_task(*args): 433 | return args 434 | 435 | args = [uuid.uuid4(), uuid.uuid4(), uuid.uuid4()] 436 | 437 | def random_cast(val): 438 | return random.choice([str, lambda s: s])(val) 439 | 440 | tasks = [ 441 | simple_task.apply_async(args=[random_cast(arg) for arg in args]) 442 | for i in range(10) 443 | ] 444 | assert set(tasks) == set([tasks[0]]) 445 | 446 | def test__queue_multiple_uniques__different_ids(self, scoped_app): 447 | with scoped_app as app: 448 | 449 | @app.task(base=Singleton) 450 | def simple_task(*args): 451 | return args 452 | 453 | tasks = [ 454 | simple_task.apply_async(args=[uuid.uuid4(), uuid.uuid4(), uuid.uuid4()]) 455 | for i in range(5) 456 | ] 457 | assert len(set(tasks)) == len(tasks) 458 | --------------------------------------------------------------------------------