├── .gitignore
├── .travis.yml
├── BACKEND_GUIDE.rst
├── CONTRIBUTING.rst
├── Dockerfile
├── HISTORY.rst
├── LICENSE
├── MANIFEST.in
├── README.rst
├── celery_once
├── __init__.py
├── backends
│ ├── __init__.py
│ ├── file.py
│ └── redis.py
├── helpers.py
└── tasks.py
├── docker-compose.yml
├── pytest.ini
├── requirements-dev.txt
├── setup.py
├── tests
├── __init__.py
├── backends.py
├── integration
│ ├── __init__.py
│ ├── backends
│ │ ├── __init__.py
│ │ ├── test_file.py
│ │ └── test_redis.py
│ ├── flask_app
│ │ ├── __init__.py
│ │ ├── app.py
│ │ └── test_flask.py
│ └── test_tasks.py
└── unit
│ ├── __init__.py
│ ├── backends
│ ├── __init__.py
│ ├── test_file.py
│ └── test_redis.py
│ ├── test_helpers.py
│ └── test_tasks.py
└── tox.ini
/.gitignore:
--------------------------------------------------------------------------------
1 | # Project specific
2 | example/
3 | Pipfile
4 |
5 | # Byte-compiled / optimized / DLL files
6 | __pycache__/
7 | *.py[cod]
8 | *$py.class
9 |
10 | # C extensions
11 | *.so
12 |
13 | # Distribution / packaging
14 | .Python
15 | build/
16 | develop-eggs/
17 | dist/
18 | downloads/
19 | eggs/
20 | .eggs/
21 | lib/
22 | lib64/
23 | parts/
24 | sdist/
25 | var/
26 | wheels/
27 | *.egg-info/
28 | .installed.cfg
29 | *.egg
30 | MANIFEST
31 |
32 | # PyInstaller
33 | # Usually these files are written by a python script from a template
34 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
35 | *.manifest
36 | *.spec
37 |
38 | # Installer logs
39 | pip-log.txt
40 | pip-delete-this-directory.txt
41 |
42 | # Unit test / coverage reports
43 | htmlcov/
44 | .tox/
45 | .coverage
46 | .coverage.*
47 | .cache
48 | nosetests.xml
49 | coverage.xml
50 | *.cover
51 | .hypothesis/
52 | .pytest_cache/
53 |
54 | # Translations
55 | *.mo
56 | *.pot
57 |
58 | # Django stuff:
59 | *.log
60 | local_settings.py
61 | db.sqlite3
62 |
63 | # Flask stuff:
64 | instance/
65 | .webassets-cache
66 |
67 | # Scrapy stuff:
68 | .scrapy
69 |
70 | # Sphinx documentation
71 | docs/_build/
72 |
73 | # PyBuilder
74 | target/
75 |
76 | # Jupyter Notebook
77 | .ipynb_checkpoints
78 |
79 | # pyenv
80 | .python-version
81 |
82 | # celery beat schedule file
83 | celerybeat-schedule
84 |
85 | # SageMath parsed files
86 | *.sage.py
87 |
88 | # Environments
89 | .env
90 | .venv
91 | env/
92 | venv/
93 | ENV/
94 | env.bak/
95 | venv.bak/
96 |
97 | # Spyder project settings
98 | .spyderproject
99 | .spyproject
100 |
101 | # Rope project settings
102 | .ropeproject
103 |
104 | # mkdocs documentation
105 | /site
106 |
107 | # mypy
108 | .mypy_cache/
109 |
110 | # OSX
111 | *.DS_Store
112 |
--------------------------------------------------------------------------------
/.travis.yml:
--------------------------------------------------------------------------------
1 | language: python
2 | services:
3 | - redis-server
4 | python:
5 | - "2.7"
6 | - "3.5"
7 | - "3.6"
8 | install:
9 | - "pip install -e ."
10 | - "pip install -r requirements-dev.txt"
11 | script:
12 | - pytest --cov celery_once --cov-report term-missing
13 | - celery -A tests.integration.flask_app.app.celery worker&
14 | - sleep 5
15 | - pytest -m "framework"
16 | after_success:
17 | - coveralls
18 |
--------------------------------------------------------------------------------
/BACKEND_GUIDE.rst:
--------------------------------------------------------------------------------
1 | Contributing a new backend.
2 | ===========================
3 |
4 | Contributing a new locking backend is greatly appreciated! Each new
5 | backend must implement the following methods…
6 |
7 | .. code:: python
8 |
9 | class Backend(object):
10 | """
11 | Each new backend must implement the following methods,
12 | - __init__
13 | - raise_or_lock
14 | - clear_lock
15 | """
16 | def __init__(self, settings):
17 | pass
18 |
19 | def raise_or_lock(self, key, timeout):
20 | pass
21 |
22 | def clear_lock(self, key):
23 | pass
24 |
25 | ``def raise_or_lock(self, key, timeout)``
26 | -----------------------------------------
27 |
28 | Checks if the task is locked based on the ``key`` argument (str). If
29 | already locked should raise an ``AlreadyQueued`` exception. If not,
30 | locks the task by the key. A ``timeout`` argument (int) can also be
31 | passed in. The key should be cleared after the ``timeout`` (in seconds)
32 | has passed.
33 |
34 | ``def clear_lock(self, key)``
35 | -----------------------------
36 |
37 | Removes the lock based on the ``key`` argument (str). This is called
38 | after a task completes (either successfully, or fails beyond celery’s
39 | retry limit).
40 |
41 | ``def __init__(self, settings)``
42 | --------------------------------
43 |
44 | The ``settings`` argument (dict) is based on the celery once
45 | configuration. This can be used to setup the connection/client to the
46 | backend. Any imports for backend specific modules should happen inside
47 | here.
48 |
49 | The `redis backend`_ is a good example of all of this in practice. If
50 | you’d like to contribute a new backend and still feel unsure how to do
51 | so, feel free to open an issue with any questions.
52 |
53 | .. _redis backend: https://github.com/cameronmaske/celery-once/blob/dc1d679b6b12e2a26fafa6783bed0e54108336ce/celery_once/backends/redis.py#L32
54 |
--------------------------------------------------------------------------------
/CONTRIBUTING.rst:
--------------------------------------------------------------------------------
1 | Contributing
2 | ============
3 |
4 | Contributions are welcome, and they are greatly appreciated! Every
5 | little bit helps, and credit will always be given.
6 |
7 | You can contribute in many ways:
8 |
9 | Types of Contributions
10 | ----------------------
11 |
12 | Report Bugs
13 | ~~~~~~~~~~~
14 |
15 | Report bugs at https://github.com/cameronmaske/celery-once/issues.
16 |
17 | If you are reporting a bug, please include:
18 |
19 | - Your operating system name and version.
20 | - Any details about your local setup that might be helpful in
21 | troubleshooting.
22 | - Detailed steps to reproduce the bug.
23 |
24 | Fix Bugs
25 | ~~~~~~~~
26 |
27 | Look through the GitHub issues for bugs. Anything tagged with “bug” is
28 | open to whoever wants to implement it.
29 |
30 | Write Documentation
31 | ~~~~~~~~~~~~~~~~~~~
32 |
33 | Celery Once could always use more documentation, whether as part of the
34 | README, in docstrings, or even on the web in blog
35 | posts, articles, and such.
36 |
37 | Submit Feedback
38 | ~~~~~~~~~~~~~~~
39 |
40 | The best way to send feedback is to file an issue at
41 | https://github.com/cameronmaske/celery-once/issues.
42 |
43 | If you are proposing a feature:
44 |
45 | - Explain in detail how it would work.
46 | - Keep the scope as narrow as possible, to make it easier to implement.
47 | - Remember that this is a volunteer-driven project, and that
48 | contributions are welcome :)
49 |
50 | Get Started!
51 | ~~~~~~~~~~~~
52 |
53 | Ready to contribute? Here’s how to set up ``celery_once`` for local
54 | development.
55 |
56 | 1. Fork the ``celery_once`` repo on GitHub.
57 | 2. Clone your fork locally::
58 |
59 | $ git clone git@github.com:your\_name\_here/celery-once.git
60 |
61 | 3. Install your local copy into a virtualenv. Assuming you have
62 | virtualenvwrapper installed, this is how you set up your fork for
63 | local development::
64 |
65 | $ mkvirtualenv celery-once
66 | $ cd celery-once/ $ pip install -e .
67 | $ pip install -r requirements-dev.txt
68 |
69 | 4. Create a branch for local development::
70 |
71 | $ git checkout -b name-of-your-bugfix-or-feature
72 |
73 | Now you can make your changes locally.
74 |
75 | 5. When you’re done making changes, check that your changes using::
76 |
77 | $ py.test tests/
78 |
79 | 6. Commit your changes and push your branch to GitHub::
80 |
81 | $ git add .
82 | $ git commit -m “Your detailed description of your changes.”
83 | $ git push origin name-of-your-bugfix-or-feature
84 |
85 | 7. Submit a pull request through the GitHub website.
86 |
87 | Pull Request Guidelines
88 | ~~~~~~~~~~~~~~~~~~~~~~~
89 |
90 | Before you submit a pull request, check that it meets these guidelines:
91 |
92 | 1. The pull request should include tests. If you are not sure how to
93 | write the test and could use some guidance, mention that in the PR.
94 | 2. If the pull request adds functionality, the README.md doc should be
95 | updated.
96 | 3. The pull request should work for Python 2.7, 3.5 and 3.6. Check
97 | https://travis-ci.org/cameronmaske/celery-once/pull\_requests and make
98 | sure that the tests pass for all supported Python versions.
99 |
100 | Tips
101 | ~~~~
102 |
103 | To run a subset of tests::
104 |
105 | $ py.test
106 |
107 | To run against python 2.7 and 3.3::
108 |
109 | $ tox
110 |
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python:2.7-slim
2 |
3 | WORKDIR /code
4 | COPY . ./
5 | RUN pip install -e .
6 | RUN pip install --no-cache-dir -r requirements-dev.txt
7 |
--------------------------------------------------------------------------------
/HISTORY.rst:
--------------------------------------------------------------------------------
1 | History
2 | =======
3 |
4 | 3.0.1
5 | -----
6 | 2019-08-21
7 |
8 | Behind the scenes, changed how key's are generated from task function and arguments.
9 | No longer uses ``inspect.getcallargs`` (deprecated) and is stored on ``__init__``.
10 | Should fix issues if tasks are wrapped by other libraries (i.e. sentry-python).
11 |
12 | 3.0.0
13 | -----
14 | 2019-05-13
15 |
16 | Fixed an issue where large/long arguments could cause ``OSError Filename too long`` with the file backend (see #96).
17 | Keys generated for file backend, are now hashed and limited to 50 characters in length.
18 | *Due to this, it is not backwards compatible with existing keys from the file backend, so any pending locks from previous version will be ignored.*
19 | The Redis backend is unchanged, and thus fully compatible.
20 |
21 | Credit for fix to @xuhcc.
22 |
23 | 2.1.2
24 | -----
25 | 2019-05-13
26 |
27 | - Add support for ``rediss``. Thanks @gustavoalmeida
28 |
29 | 2.1.1
30 | -----
31 | 2019-05-08
32 |
33 | - Fix an issue with the ``File`` backend (#89) to close file after creation, else unclosed file descriptors eventually lead to an "Too many open files" error. Thanks @xuhcc.
34 |
35 | 2.1.0
36 | -----
37 | 2019-02-25
38 |
39 | - Added ``File`` backend (#84). Credit and thanks to @xuhcc.
40 |
41 | 2.0.1
42 | -----
43 | 2019-02-25
44 |
45 | - Fixed an issue when using ``autoretry_for`` with a task. (#74, #75). Thanks @pkariz.
46 |
47 | 2.0.0
48 | -----
49 |
50 | 2018-04-25
51 |
52 | Major Release:
53 |
54 | This changes the Redis backend to use a SETNX-based lock (RedLock). This should address race conditions that the previous approach had (See: #7, #60).
55 |
56 | *This may not be backwards compatible with existing keys stored in Redis.*
57 | If you are upgrading from `1.0.0`, it may be safer to remove any previous used lock keys (See https://github.com/cameronmaske/celery-once/pull/67#issuecomment-384281438 for instructions).
58 |
59 | Other changes include:
60 |
61 | - Able to run on blocking mode when scheduling tasks with Redis backend. See the README for more details.
62 |
63 | - ``AlreadyQueued`` exception return's countdown seconds as `float` instead of `int`.
64 |
65 | Big thanks to @grjones for his contributions for this patch.
66 |
67 |
68 | 1.3.0
69 | -----
70 |
71 | 2018-04-25
72 |
73 | - Fixed an issue where tasks with autoretry_for got into a locked state (#58). Thanks @andbortnik.
74 |
75 |
76 | 1.2.0
77 | -----
78 |
79 | 2017-06-12
80 |
81 | - Cache the redis connection, instead of reinstantaiting one after each task execution (#34, #47). Thanks @brouberol.
82 |
83 | 1.1.0
84 | -----
85 |
86 | 2017-06-12
87 |
88 | - Exclude test files from package.
89 | - Use relative import to import Redis backend. #52
90 | - Correctly set `default_timeout` from settings. #53 #54 (Thanks @Snake575)
91 |
92 | 1.0.2
93 | -----
94 |
95 | 2017-06-06
96 |
97 | - Fixed an issue where retrying tasks would check for the lock on re-run (and error out). Thanks @lackita for the fix (#37, #48).
98 |
99 |
100 | 1.0.1
101 | -----
102 |
103 | 2017-06-06
104 |
105 | - Added support to connect to Redis over sockets. Thanks @brouberol (#33, #49)
106 |
107 | 1.0.0
108 | -----
109 |
110 | 2017-06-05
111 |
112 | Major release:
113 |
114 | This release contains breaking changes. Please revisit the README for the latest setup instructions.
115 |
116 | - Refactored code to allow for custom backends.
117 | - Bumped offical support to celery >= 4.
118 | - Bumped offical support to Python 2.7, 3.5 and 3.6.
119 |
120 | 0.1.4
121 | -----
122 |
123 | 2015-07-29
124 |
125 | Bugfixes:
126 |
127 | - Fixed an issue where celery beat would crash on graceful enable tasks (#27).
128 | Thanks @PhilipGarnero!
129 |
130 | 0.1.3
131 | -----
132 |
133 | 2015-07-14
134 |
135 | Features:
136 |
137 | - Added option ``unlock_before_run`` to remove the lock before of after the task's execution. Thanks @jcugat!
138 |
139 | 0.1.2
140 | -----
141 |
142 | 2015-03-15
143 |
144 | Bugfixes:
145 |
146 | - Standardized unicode/string handling for the name of a task when generating lock keys.
147 |
148 | 0.1.1
149 | -----
150 |
151 | 2015-02-26
152 |
153 | Bugfixes:
154 |
155 | - Standardized unicode/string handling for keyword arguments when generating lock keys. #11
156 | - Fixed an issue where self bound task (`bind=true`) would not correctly clear locks. #12
157 |
158 | Thanks to @brouberol for contributions to both!
159 |
160 | 0.1
161 | ---
162 |
163 | - Initial release of PyPI
164 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Copyright (c) 2015.
2 | All rights reserved.
3 |
4 | Redistribution and use in source and binary forms, with or without
5 | modification, are permitted provided that the following conditions are met:
6 |
7 | 1. Redistributions of source code must retain the above copyright notice, this
8 | list of conditions and the following disclaimer.
9 | 2. Redistributions in binary form must reproduce the above copyright notice,
10 | this list of conditions and the following disclaimer in the documentation
11 | and/or other materials provided with the distribution.
12 |
13 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
14 | ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
15 | WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
16 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
17 | ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
18 | (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
19 | LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
20 | ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
21 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
22 | SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
--------------------------------------------------------------------------------
/MANIFEST.in:
--------------------------------------------------------------------------------
1 | recursive-exclude tests *
2 |
--------------------------------------------------------------------------------
/README.rst:
--------------------------------------------------------------------------------
1 | Celery Once
2 | ===========
3 |
4 | |Build Status| |Coverage Status|
5 |
6 | Celery Once allows you to prevent multiple execution and queuing of `celery `_ tasks.
7 |
8 | Installation
9 | ============
10 |
11 | Installing ``celery_once`` is simple with pip, just run:
12 |
13 | ::
14 |
15 | pip install -U celery_once
16 |
17 |
18 | Requirements
19 | ============
20 |
21 | * `Celery `__. Built to run with Celery 4.0. Older versions may work, but are not officially supported.
22 |
23 | Usage
24 | =====
25 |
26 | To use ``celery_once``, your tasks need to inherit from an `abstract `_ base task called ``QueueOnce``.
27 |
28 | Once installed, you'll need to configure a few options a ``ONCE`` key in celery's conf.
29 |
30 | .. code:: python
31 |
32 | from celery import Celery
33 | from celery_once import QueueOnce
34 | from time import sleep
35 |
36 | celery = Celery('tasks', broker='amqp://guest@localhost//')
37 | celery.conf.ONCE = {
38 | 'backend': 'celery_once.backends.Redis',
39 | 'settings': {
40 | 'url': 'redis://localhost:6379/0',
41 | 'default_timeout': 60 * 60
42 | }
43 | }
44 |
45 | @celery.task(base=QueueOnce)
46 | def slow_task():
47 | sleep(30)
48 | return "Done!"
49 |
50 |
51 | The exact configuration, depends on which locking backend you want to use. See `Backends`_.
52 |
53 |
54 | Behind the scenes, this overrides ``apply_async`` and ``delay``. It does not affect calling the tasks directly.
55 |
56 | When running the task, ``celery_once`` checks that no lock is in place (against a Redis key).
57 | If it isn't, the task will run as normal. Once the task completes (or ends due to an exception) the lock will clear.
58 | If an attempt is made to run the task again before it completes an ``AlreadyQueued`` exception will be raised.
59 |
60 | .. code-block:: python
61 |
62 | example.delay(10)
63 | example.delay(10)
64 | Traceback (most recent call last):
65 | ..
66 | AlreadyQueued()
67 |
68 | .. code-block:: python
69 |
70 | result = example.apply_async(args=(10))
71 | result = example.apply_async(args=(10))
72 | Traceback (most recent call last):
73 | ..
74 | AlreadyQueued()
75 |
76 |
77 | ``graceful``
78 | ------------
79 |
80 | Optionally, instead of raising an ``AlreadyQueued`` exception, the task can return ``None`` if ``once={'graceful': True}`` is set in the task's `options `__ or when run through ``apply_async``.
81 |
82 | .. code:: python
83 |
84 | from celery_once import AlreadyQueued
85 | # Either catch the exception,
86 | try:
87 | example.delay(10)
88 | except AlreadyQueued:
89 | pass
90 | # Or, handle it gracefully at run time.
91 | result = example.apply(args=(10), once={'graceful': True})
92 | # or by default.
93 | @celery.task(base=QueueOnce, once={'graceful': True})
94 | def slow_task():
95 | sleep(30)
96 | return "Done!"
97 |
98 |
99 | ``keys``
100 | --------
101 |
102 | By default ``celery_once`` creates a lock based on the task's name and its arguments and values.
103 | Take for example, the following task below...
104 |
105 | .. code:: python
106 |
107 | @celery.task(base=QueueOnce)
108 | def slow_add(a, b):
109 | sleep(30)
110 | return a + b
111 |
112 | Running the task with different arguments will default to checking against different locks.
113 |
114 | .. code:: python
115 |
116 | slow_add(1, 1)
117 | slow_add(1, 2)
118 |
119 | If you want to specify locking based on a subset, or no arguments you can adjust the keys ``celery_once`` looks at in the task's `options `_ with ``once={'keys': [..]}``
120 |
121 | .. code:: python
122 |
123 | @celery.task(base=QueueOnce, once={'keys': ['a']})
124 | def slow_add(a, b):
125 | sleep(30)
126 | return a + b
127 |
128 | example.delay(1, 1)
129 | # Checks if any tasks are running with the `a=1`
130 | example.delay(1, 2)
131 | Traceback (most recent call last):
132 | ..
133 | AlreadyQueued()
134 | example.delay(2, 2)
135 |
136 | .. code:: python
137 |
138 | @celery.task(base=QueueOnce, once={'keys': []})
139 | def slow_add(a, b):
140 | sleep(30)
141 | return a + b
142 |
143 | # Will enforce only one task can run, no matter what arguments.
144 | example.delay(1, 1)
145 | example.delay(2, 2)
146 | Traceback (most recent call last):
147 | ..
148 | AlreadyQueued()
149 |
150 |
151 | ``timeout``
152 | -----------
153 | As a fall back, ``celery_once`` will clear a lock after 60 minutes.
154 | This is set globally in Celery's configuration with ``ONCE_DEFAULT_TIMEOUT`` but can be set for individual tasks using...
155 |
156 | .. code:: python
157 |
158 | @celery.task(base=QueueOnce, once={'timeout': 60 * 60 * 10})
159 | def long_running_task():
160 | sleep(60 * 60 * 3)
161 |
162 |
163 | ``unlock_before_run``
164 | ---------------------
165 | By default, the lock is removed after the task has executed (using celery's `after_return `_). This behaviour can be changed setting the task's option ``unlock_before_run``. When set to ``True``, the lock will be removed just before executing the task.
166 |
167 | **Caveats**:
168 | * Any retry of the task won't re-enable the lock!
169 | * This can only be set when defining the task, it cannot be passed dynamically to ``apply_async``
170 |
171 | .. code:: python
172 |
173 | @celery.task(base=QueueOnce, once={'unlock_before_run': True})
174 | def slow_task():
175 | sleep(30)
176 | return "Done!"
177 |
178 |
179 |
180 |
181 | Backends
182 | ========
183 |
184 | Redis Backend
185 | -------------
186 |
187 | Requires:
188 |
189 | * `Redis `_ is used as a distributed locking mechanism. Behind the scenes, it use redis-py's `shared, distributed Lock `_.
190 |
191 | Configuration:
192 |
193 | - ``backend`` - ``celery_once.backends.Redis``
194 |
195 | - ``settings``
196 |
197 | - ``default_timeout`` - how many seconds after a lock has been set before it should automatically timeout (defaults to 3600 seconds, or 1 hour).
198 |
199 | - ``url`` - should point towards a running Redis instance (defaults to ``redis://localhost:6379/0``). See below for the format options supported
200 |
201 | - ``blocking`` (boolean value: default ``False``) - If set to ``True``, scheduling a task (by ``.delay/.apply_async``) will block for X seconds to acquire the lock (see: ``blocking_timeout`` below). If no lock could be acquired after X seconds, will raise an ``AlreadyQueued`` exception. This is a very specific use-case scenario and by default is disabled.
202 |
203 | - ``blocking_timeout`` (int or float value: default ``1``) - How many seconds the task will block trying to acquire the lock, if ``blocking`` is set to ``True``. Setting this to ``None`` set's no timeout (equivalent to infinite seconds).
204 |
205 |
206 |
207 | The URL parser supports three patterns of urls:
208 |
209 | * ``redis://host:port[/db][?options]``: redis over TCP
210 |
211 | * ``rediss://host:port[/db][?options]``: redis over TCP with SSL enabled.
212 |
213 | * ``redis+socket:///path/to/redis.sock[?options]``: redis over a UNIX socket
214 |
215 | The ``options`` query args are mapped to the `StrictRedis `_ keyword args.
216 | Examples:
217 | * ``redis://localhost:6379/1``
218 |
219 | * ``redis://localhost:6379/1?ssl=true``
220 |
221 | * ``rediss://localhost:6379/1``
222 |
223 | * ``redis+socket:///var/run/redis/redis.sock?db=1``
224 |
225 |
226 | Example Configuration:
227 |
228 | Minimal:
229 |
230 | .. code:: python
231 |
232 | celery.conf.ONCE = {
233 | 'backend': 'celery_once.backends.Redis',
234 | 'settings': {
235 | 'url': 'redis://localhost:6379/0',
236 | 'default_timeout': 60 * 60
237 | }
238 | }
239 |
240 |
241 | Advanced:
242 | Scheduling tasks blocks up to 30 seconds trying to acquire a lock before raising an exception.
243 |
244 | .. code:: python
245 |
246 | celery.conf.ONCE = {
247 | 'backend': 'celery_once.backends.Redis',
248 | 'settings': {
249 | 'url': 'redis://localhost:6379/0',
250 | 'default_timeout': 60 * 60,
251 | 'blocking': True,
252 | 'blocking_timeout': 30
253 | }
254 | }
255 |
256 | File Backend
257 | -------------
258 |
259 | Configuration:
260 |
261 | - ``backend`` - ``celery_once.backends.File``
262 |
263 | - ``settings``
264 |
265 | - ``location`` - directory where lock files will be located. Default is temporary directory.
266 |
267 | - ``default_timeout`` - how many seconds after a lock has been set before it should automatically timeout (defaults to 3600 seconds, or 1 hour).
268 |
269 |
270 | Example Configuration:
271 |
272 | .. code:: python
273 |
274 | celery.conf.ONCE = {
275 | 'backend': 'celery_once.backends.File',
276 | 'settings': {
277 | 'location': '/tmp/celery_once',
278 | 'default_timeout': 60 * 60
279 | }
280 | }
281 |
282 |
283 | Flask Integration
284 | ------------------
285 | To avoid ``RuntimeError: Working outside of application context`` errors when using ``celery_once`` with `Flask `_, you need to make the ``QueueOnce`` task base class application context aware.
286 | If you've implemented Celery following the Flask `documentation `_ you can extend it like so.
287 |
288 | .. code:: python
289 |
290 | def make_celery(app):
291 | celery = Celery(
292 | app.import_name,
293 | backend=app.config['CELERY_RESULT_BACKEND'],
294 | broker=app.config['CELERY_BROKER_URL']
295 | )
296 | celery.conf.update(app.config)
297 |
298 | class ContextTask(celery.Task):
299 | def __call__(self, *args, **kwargs):
300 | with app.app_context():
301 | return self.run(*args, **kwargs)
302 | celery.Task = ContextTask
303 |
304 | # Make QueueOnce app context aware.
305 | class ContextQueueOnce(QueueOnce):
306 | def __call__(self, *args, **kwargs):
307 | with app.app_context():
308 | return super(ContextQueueOnce, self).__call__(*args, **kwargs)
309 |
310 | # Attach to celery object for easy access.
311 | celery.QueueOnce = ContextQueueOnce
312 | return celery
313 |
314 |
315 | Now, when instead of importing the ``QueueOnce`` base, you can use the context aware base on the ``celery`` object.
316 |
317 | .. code:: python
318 |
319 | celery = make_celery(app)
320 |
321 | @celery.task(base=celery.QueueOnce)
322 | def example_task(value):
323 | return
324 |
325 |
326 | Custom Backend
327 | --------------
328 |
329 | If you want to implement a custom locking backend, see `BACKEND\_GUIDE.rst`_.
330 |
331 | .. _BACKEND\_GUIDE.rst: BACKEND_GUIDE.rst
332 |
333 | Support
334 | =======
335 |
336 | * Tests are run against Python 2.7, 3.4 and 3.5. Other versions may work, but are not officially supported.
337 |
338 | Contributing
339 | ============
340 |
341 | Contributions are welcome, and they are greatly appreciated! See `contributing
342 | guide `_ for more details.
343 |
344 |
345 | .. |Build Status| image:: https://travis-ci.org/cameronmaske/celery-once.svg
346 | :target: https://travis-ci.org/cameronmaske/celery-once
347 | .. |Coverage Status| image:: https://coveralls.io/repos/cameronmaske/celery-once/badge.svg
348 | :target: https://coveralls.io/r/cameronmaske/celery-once
349 |
--------------------------------------------------------------------------------
/celery_once/__init__.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 | __author__ = 'Cameron Maske'
4 | __email__ = 'cameronmaske@gmail.com'
5 | __version__ = '3.0.1'
6 |
7 |
8 | from .tasks import QueueOnce, AlreadyQueued
9 |
--------------------------------------------------------------------------------
/celery_once/backends/__init__.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 | from .file import File
4 | from .redis import Redis
5 |
--------------------------------------------------------------------------------
/celery_once/backends/file.py:
--------------------------------------------------------------------------------
1 | """
2 | Definition of the file locking backend.
3 | """
4 | import hashlib
5 | import errno
6 | import os
7 | import tempfile
8 | import time
9 |
10 | import six
11 |
12 | from celery_once.tasks import AlreadyQueued
13 |
14 |
15 | def key_to_lock_name(key):
16 | """
17 | Combine part of a key with its hash to prevent very long filenames
18 | """
19 | MAX_LENGTH = 50
20 | key_hash = hashlib.md5(six.b(key)).hexdigest()
21 | lock_name = key[:MAX_LENGTH - len(key_hash) - 1] + '_' + key_hash
22 | return lock_name
23 |
24 |
25 | class File(object):
26 | """
27 | File locking backend.
28 | """
29 | def __init__(self, settings):
30 | self.location = settings.get('location')
31 | if self.location is None:
32 | self.location = os.path.join(tempfile.gettempdir(),
33 | 'celery_once')
34 | try:
35 | os.makedirs(self.location)
36 | except OSError as error:
37 | # Directory exists?
38 | if error.errno != errno.EEXIST:
39 | # Re-raise unexpected OSError
40 | raise
41 |
42 | def _get_lock_path(self, key):
43 | lock_name = key_to_lock_name(key)
44 | return os.path.join(self.location, lock_name)
45 |
46 | def raise_or_lock(self, key, timeout):
47 | """
48 | Check the lock file and create one if it does not exist.
49 | """
50 | lock_path = self._get_lock_path(key)
51 | try:
52 | # Create lock file, raise exception if it exists
53 | fd = os.open(lock_path, os.O_CREAT | os.O_EXCL)
54 | except OSError as error:
55 | if error.errno == errno.EEXIST:
56 | # File already exists, check its modification time
57 | mtime = os.path.getmtime(lock_path)
58 | ttl = mtime + timeout - time.time()
59 | if ttl > 0:
60 | raise AlreadyQueued(ttl)
61 | else:
62 | # Update modification time if timeout happens
63 | os.utime(lock_path, None)
64 | return
65 | else:
66 | # Re-raise unexpected OSError
67 | raise
68 | else:
69 | os.close(fd)
70 |
71 | def clear_lock(self, key):
72 | """
73 | Remove the lock file.
74 | """
75 | lock_path = self._get_lock_path(key)
76 | os.remove(lock_path)
77 |
--------------------------------------------------------------------------------
/celery_once/backends/redis.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 | """Definition of the redis locking backend."""
4 |
5 | from __future__ import absolute_import
6 |
7 |
8 | try:
9 | from urlparse import urlparse, parse_qsl
10 | except:
11 | # Python 3!
12 | from urllib.parse import urlparse, parse_qsl
13 |
14 | from celery_once.tasks import AlreadyQueued
15 |
16 |
17 | def parse_url(url):
18 | """
19 | Parse the argument url and return a redis connection.
20 | Three patterns of url are supported:
21 |
22 | * redis://host:port[/db][?options]
23 | * redis+socket:///path/to/redis.sock[?options]
24 | * rediss://host:port[/db][?options]
25 |
26 | A ValueError is raised if the URL is not recognized.
27 | """
28 | parsed = urlparse(url)
29 | kwargs = parse_qsl(parsed.query)
30 |
31 | # TCP redis connection
32 | if parsed.scheme in ['redis', 'rediss']:
33 | details = {'host': parsed.hostname}
34 | if parsed.port:
35 | details['port'] = parsed.port
36 | if parsed.password:
37 | details['password'] = parsed.password
38 | db = parsed.path.lstrip('/')
39 | if db and db.isdigit():
40 | details['db'] = db
41 | if parsed.scheme == 'rediss':
42 | details['ssl'] = True
43 |
44 | # Unix socket redis connection
45 | elif parsed.scheme == 'redis+socket':
46 | details = {'unix_socket_path': parsed.path}
47 | else:
48 | raise ValueError('Unsupported protocol %s' % (parsed.scheme))
49 |
50 | # Add kwargs to the details and convert them to the appropriate type, if needed
51 | details.update(kwargs)
52 | if 'socket_timeout' in details:
53 | details['socket_timeout'] = float(details['socket_timeout'])
54 | if 'db' in details:
55 | details['db'] = int(details['db'])
56 |
57 | return details
58 |
59 |
60 | redis = None
61 |
62 | try:
63 | from redis.lock import Lock
64 | except ImportError:
65 | raise ImportError(
66 | "You need to install the redis library in order to use Redis"
67 | " backend (pip install redis)")
68 |
69 |
70 | def get_redis(settings):
71 | global redis
72 | if not redis:
73 | try:
74 | from redis import StrictRedis
75 | except ImportError:
76 | raise ImportError(
77 | "You need to install the redis library in order to use Redis"
78 | " backend (pip install redis)")
79 | redis = StrictRedis(**parse_url(settings['url']))
80 | return redis
81 |
82 |
83 | class Redis(object):
84 | """Redis locking backend."""
85 |
86 | def __init__(self, settings):
87 | self._redis = get_redis(settings)
88 | self.blocking_timeout = settings.get("blocking_timeout", 1)
89 | self.blocking = settings.get("blocking", False)
90 |
91 | @property
92 | def redis(self):
93 | # Used to allow easy mocking when testing.
94 | return self._redis
95 |
96 | def raise_or_lock(self, key, timeout):
97 | """
98 | Checks if the task is locked and raises an exception, else locks
99 | the task. By default, the tasks and the key expire after 60 minutes.
100 | (meaning it will not be executed and the lock will clear).
101 | """
102 | acquired = Lock(
103 | self.redis,
104 | key,
105 | timeout=timeout,
106 | blocking=self.blocking,
107 | blocking_timeout=self.blocking_timeout
108 | ).acquire()
109 |
110 | if not acquired:
111 | # Time remaining in milliseconds
112 | # https://redis.io/commands/pttl
113 | ttl = self.redis.pttl(key)
114 | raise AlreadyQueued(ttl / 1000.)
115 |
116 | def clear_lock(self, key):
117 | """Remove the lock from redis."""
118 | return self.redis.delete(key)
119 |
--------------------------------------------------------------------------------
/celery_once/helpers.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 | """Definition of helper functions."""
4 |
5 | import operator
6 | import six
7 | import importlib
8 | from collections import OrderedDict
9 | from time import time
10 |
11 |
12 | def import_backend(config):
13 | """
14 | Imports and initializes the Backend class.
15 | """
16 | backend_name = config['backend']
17 | path = backend_name.split('.')
18 | backend_mod_name, backend_class_name = '.'.join(path[:-1]), path[-1]
19 | backend_mod = importlib.import_module(backend_mod_name)
20 | backend_class = getattr(backend_mod, backend_class_name)
21 | return backend_class(config['settings'])
22 |
23 |
24 | def items_sorted_by_key(kwargs):
25 | return sorted(six.iteritems(kwargs), key=operator.itemgetter(0))
26 |
27 |
28 | def order_dict_to_string(d):
29 | return '{' + ', '.join(str(dict({x: y})).strip('{}') for x, y in d.items()) + '}'
30 |
31 |
32 | def force_string(kwargs):
33 | """
34 | Force key in dict or list to a string.
35 | Fixes: https://github.com/cameronmaske/celery-once/issues/11
36 | """
37 | if isinstance(kwargs, dict):
38 | # Context: https://github.com/cameronmaske/celery-once/issues/58
39 | # Keep equivalent to string of dict for backwards compatibility.
40 | return order_dict_to_string(OrderedDict(
41 | (force_string(key), force_string(value))
42 | for key, value in items_sorted_by_key(kwargs)
43 | ))
44 | elif isinstance(kwargs, list):
45 | return [force_string(element) for element in kwargs]
46 | elif six.PY2 and isinstance(kwargs, unicode):
47 | return kwargs.encode('utf-8')
48 | return kwargs
49 |
50 |
51 | def kwargs_to_list(kwargs):
52 | """
53 | Turns {'a': 1, 'b': 2} into ["a-1", "b-2"]
54 | """
55 | kwargs_list = []
56 | # Kwargs are sorted in alphabetic order by their keys.
57 | # Taken from http://www.saltycrane.com/blog/2007/09/how-to-sort-python-dictionary-by-keys/
58 | for k, v in items_sorted_by_key(kwargs):
59 | kwargs_list.append(str(k) + '-' + str(force_string(v)))
60 | return kwargs_list
61 |
62 |
63 | def queue_once_key(name, kwargs, restrict_to=None):
64 | """
65 | Turns a list the name of the task, the kwargs and allowed keys
66 | into a redis key.
67 | """
68 | keys = ['qo', force_string(name)]
69 | # Restrict to only the keys allowed in keys.
70 | if restrict_to is not None:
71 | restrict_kwargs = {key: kwargs[key] for key in restrict_to}
72 | keys += kwargs_to_list(restrict_kwargs)
73 | else:
74 | keys += kwargs_to_list(kwargs)
75 | key = "_".join(keys)
76 | return key
77 |
--------------------------------------------------------------------------------
/celery_once/tasks.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | """Definition of the QueueOnce task and AlreadyQueued exception."""
3 |
4 | from celery import Task, states
5 | from celery.result import EagerResult
6 | from .helpers import queue_once_key, import_backend
7 |
8 |
9 | class AlreadyQueued(Exception):
10 | def __init__(self, countdown):
11 | self.message = "Expires in {} seconds".format(countdown)
12 | self.countdown = countdown
13 |
14 | try:
15 | from inspect import signature
16 | except:
17 | from funcsigs import signature
18 |
19 |
20 | class QueueOnce(Task):
21 | abstract = True
22 | once = {
23 | 'graceful': False,
24 | 'unlock_before_run': False
25 | }
26 |
27 | """
28 | 'There can be only one'. - Highlander (1986)
29 |
30 | An abstract tasks with the ability to detect if it has already been queued.
31 | When running the task (through .delay/.apply_async) it checks if the tasks
32 | is not already queued. By default it will raise an
33 | an AlreadyQueued exception if it is, by you can silence this by including
34 | `once={'graceful': True}` in apply_async or in the task's settings.
35 |
36 | Example:
37 |
38 | >>> from celery_queue.tasks import QueueOnce
39 | >>> from celery import task
40 | >>> @task(base=QueueOnce, once={'graceful': True})
41 | >>> def example(time):
42 | >>> from time import sleep
43 | >>> sleep(time)
44 | """
45 | @property
46 | def config(self):
47 | app = self._get_app()
48 | return app.conf
49 |
50 | @property
51 | def once_config(self):
52 | return self.config.ONCE
53 |
54 | @property
55 | def once_backend(self):
56 | return import_backend(self.once_config)
57 |
58 | @property
59 | def default_timeout(self):
60 | return self.once_config['settings'].get('default_timeout', 60 * 60)
61 |
62 | def unlock_before_run(self):
63 | return self.once.get('unlock_before_run', False)
64 |
65 | def __init__(self, *args, **kwargs):
66 | self._signature = signature(self.run)
67 | return super(QueueOnce, self).__init__(*args, **kwargs)
68 |
69 | def __call__(self, *args, **kwargs):
70 | # Only clear the lock before the task's execution if the
71 | # "unlock_before_run" option is True
72 | if self.unlock_before_run():
73 | key = self.get_key(args, kwargs)
74 | self.once_backend.clear_lock(key)
75 | return super(QueueOnce, self).__call__(*args, **kwargs)
76 |
77 | def apply_async(self, args=None, kwargs=None, **options):
78 | """
79 | Attempts to queues a task.
80 | Will raises an AlreadyQueued exception if already queued.
81 |
82 | :param \*args: positional arguments passed on to the task.
83 | :param \*\*kwargs: keyword arguments passed on to the task.
84 | :keyword \*\*once: (optional)
85 | :param: graceful: (optional)
86 | If True, wouldn't raise an exception if already queued.
87 | Instead will return none.
88 | :param: timeout: (optional)
89 | An `int' number of seconds after which the lock will expire.
90 | If not set, defaults to 1 hour.
91 | :param: keys: (optional)
92 |
93 | """
94 | once_options = options.get('once', {})
95 | once_graceful = once_options.get(
96 | 'graceful', self.once.get('graceful', False))
97 | once_timeout = once_options.get(
98 | 'timeout', self.once.get('timeout', self.default_timeout))
99 |
100 | if not options.get('retries'):
101 | key = self.get_key(args, kwargs)
102 | try:
103 | self.once_backend.raise_or_lock(key, timeout=once_timeout)
104 | except AlreadyQueued as e:
105 | if once_graceful:
106 | return EagerResult(None, None, states.REJECTED)
107 | raise e
108 | return super(QueueOnce, self).apply_async(args, kwargs, **options)
109 |
110 | def _get_call_args(self, args, kwargs):
111 | call_args = self._signature.bind(*args, **kwargs).arguments
112 | # Remove the task instance from the kwargs. This only happens when the
113 | # task has the 'bind' attribute set to True. We remove it, as the task
114 | # has a memory pointer in its repr, that will change between the task
115 | # caller and the celery worker
116 | if isinstance(call_args.get('self'), Task):
117 | del call_args['self']
118 | return call_args
119 |
120 | def get_key(self, args=None, kwargs=None):
121 | """
122 | Generate the key from the name of the task (e.g. 'tasks.example') and
123 | args/kwargs.
124 | """
125 | restrict_to = self.once.get('keys', None)
126 | args = args or {}
127 | kwargs = kwargs or {}
128 | call_args = self._get_call_args(args, kwargs)
129 | key = queue_once_key(self.name, call_args, restrict_to)
130 | return key
131 |
132 | def after_return(self, status, retval, task_id, args, kwargs, einfo):
133 | """
134 | After a task has run (both successfully or with a failure) clear the
135 | lock if "unlock_before_run" is False.
136 | """
137 | # Only clear the lock after the task's execution if the
138 | # "unlock_before_run" option is False
139 | if not self.unlock_before_run():
140 | key = self.get_key(args, kwargs)
141 | self.once_backend.clear_lock(key)
142 |
--------------------------------------------------------------------------------
/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: '3'
2 | services:
3 | python:
4 | build: .
5 | command: 'pytest tests/ -m "framework"'
6 | volumes:
7 | - .:/code
8 | environment:
9 | - "REDIS_URL=redis://redis:6379"
10 | depends_on:
11 | - redis
12 | flask_worker:
13 | build: .
14 | command: "celery -A tests.integration.flask_app.app.celery worker"
15 | volumes:
16 | - .:/code
17 | depends_on:
18 | - redis
19 | environment:
20 | - C_FORCE_ROOT=True
21 | - "REDIS_URL=redis://redis:6379"
22 | redis:
23 | image: redis:4.0-alpine
24 |
--------------------------------------------------------------------------------
/pytest.ini:
--------------------------------------------------------------------------------
1 | [pytest]
2 | addopts = -m "not framework"
--------------------------------------------------------------------------------
/requirements-dev.txt:
--------------------------------------------------------------------------------
1 | pytest==4.2.0
2 | pytest-cov==2.6.1
3 | pytest-mock==1.10.1
4 | python-coveralls==2.9.1
5 | coverage==4.5.2
6 | fakeredis==1.0.3
7 | mock==1.0.1
8 | Flask
9 | celery
10 | redis==3.2.1
11 | six==1.12.0
12 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 | from setuptools import setup, find_packages
4 | import re
5 |
6 | with open('README.rst') as f:
7 | readme = f.read()
8 |
9 | requirements = [
10 | "celery",
11 | "redis>=2.10.2"
12 | ]
13 |
14 | __version__ = ''
15 | with open('celery_once/__init__.py', 'r') as fd:
16 | reg = re.compile(r'__version__ = [\'"]([^\'"]*)[\'"]')
17 | for line in fd:
18 | m = reg.match(line)
19 | if m:
20 | __version__ = m.group(1)
21 | break
22 |
23 | if not __version__:
24 | raise RuntimeError('Cannot find version information')
25 |
26 |
27 |
28 | setup(
29 | name='celery_once',
30 | version=__version__,
31 | description='Allows you to prevent multiple execution and queuing of celery tasks.',
32 | long_description=readme,
33 | author='Cameron Maske',
34 | author_email='cameronmaske@gmail.com',
35 | url='https://github.com/cameronmaske/celery-once',
36 | packages=find_packages(),
37 | install_requires=requirements,
38 | license="BSD",
39 | keywords='celery, mutex, once, lock, redis',
40 | classifiers=[
41 | 'Intended Audience :: Developers',
42 | 'License :: OSI Approved :: BSD License',
43 | 'Natural Language :: English',
44 | "Programming Language :: Python :: 2",
45 | 'Programming Language :: Python :: 2.6',
46 | 'Programming Language :: Python :: 2.7',
47 | 'Programming Language :: Python :: 3',
48 | 'Programming Language :: Python :: 3.3',
49 | 'Programming Language :: Python :: 3.4',
50 | 'Topic :: System :: Distributed Computing'
51 | ],
52 | )
53 |
--------------------------------------------------------------------------------
/tests/__init__.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
--------------------------------------------------------------------------------
/tests/backends.py:
--------------------------------------------------------------------------------
1 | import mock
2 |
3 |
4 | class TestBackend(object):
5 | def __init__(self, settings):
6 | self.settings = settings
7 |
8 | raise_or_lock = mock.Mock()
9 | clear_lock = mock.Mock()
10 |
--------------------------------------------------------------------------------
/tests/integration/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cameronmaske/celery-once/4f68b6b3c8503b9360179a246521b07315cdf577/tests/integration/__init__.py
--------------------------------------------------------------------------------
/tests/integration/backends/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cameronmaske/celery-once/4f68b6b3c8503b9360179a246521b07315cdf577/tests/integration/backends/__init__.py
--------------------------------------------------------------------------------
/tests/integration/backends/test_file.py:
--------------------------------------------------------------------------------
1 | import pytest
2 | import time
3 | import os
4 |
5 | from celery import Celery
6 | from celery_once import QueueOnce, AlreadyQueued
7 |
8 |
9 | app = Celery()
10 | app.conf.ONCE = {
11 | 'backend': "celery_once.backends.File",
12 | 'settings': {
13 | 'location': '/tmp/celery_once',
14 | 'default_timeout': 60 * 60
15 | }
16 | }
17 | app.conf.CELERY_ALWAYS_EAGER = True
18 |
19 |
20 | @app.task(name="example", base=QueueOnce)
21 | def example(a=1):
22 | pass
23 |
24 | @pytest.fixture()
25 | def lock_path():
26 | path = '/tmp/celery_once/qo_example_a-1_b7f89d8561e5788a3e7687c6ede93bcd'
27 | yield path
28 | os.remove(path) # Remove file after test function runs.
29 |
30 | def test_delay(lock_path):
31 | example.delay(1)
32 | assert os.open(lock_path, os.O_CREAT) is not None
33 |
34 | def test_delay_already_queued(lock_path):
35 | os.open(lock_path, os.O_CREAT)
36 | with pytest.raises(AlreadyQueued):
37 | example.delay(1)
38 |
39 |
--------------------------------------------------------------------------------
/tests/integration/backends/test_redis.py:
--------------------------------------------------------------------------------
1 | import pytest
2 | import time
3 | from fakeredis import FakeStrictRedis
4 |
5 | from celery import Celery
6 | from celery_once import QueueOnce, AlreadyQueued
7 | from redis.lock import Lock as RedisLock
8 |
9 |
10 | @pytest.fixture()
11 | def redis(monkeypatch):
12 | fake_redis = FakeStrictRedis()
13 | fake_redis.flushall()
14 | monkeypatch.setattr("celery_once.backends.redis.Redis.redis", fake_redis)
15 | return fake_redis
16 |
17 |
18 | app = Celery()
19 | app.conf.ONCE = {
20 | 'backend': "celery_once.backends.redis.Redis",
21 | 'settings': {
22 | 'url': "redis://localhost:1337/0",
23 | 'timeout': 30 * 60
24 | }
25 | }
26 | app.conf.CELERY_ALWAYS_EAGER = True
27 |
28 |
29 | @app.task(name="example", base=QueueOnce)
30 | def example(a=1):
31 | assert example.once_backend.redis.get("qo_example_a-1") is not None
32 |
33 |
34 | def test_init():
35 | details = example.once_backend.redis.connection_pool.connection_kwargs
36 | assert details['host'] == "localhost"
37 | assert details['port'] == 1337
38 | assert details['db'] == 0
39 |
40 |
41 | def test_delay(redis):
42 | example.delay(1)
43 | assert redis.get("qo_example_a-1") is None
44 |
45 |
46 | def test_delay_already_queued(redis):
47 | redis.set("qo_example_a-1", 10000000000)
48 | try:
49 | example.delay(1)
50 | pytest.fail("Didn't raise AlreadyQueued.")
51 | except AlreadyQueued:
52 | pass
53 |
54 |
55 | def test_delay_expired(redis):
56 | lock = RedisLock(redis, "qo_example_a-1", timeout=1)
57 | lock.acquire()
58 |
59 | assert redis.get("qo_example_a-1") is not None
60 |
61 | time.sleep(1)
62 | example.delay(1)
63 |
64 | assert redis.get("qo_example_a-1") is None
65 |
66 |
67 | def test_apply_async(redis):
68 | example.apply_async(args=(1, ))
69 | assert redis.get("qo_example_a-1") is None
70 |
71 |
72 | def test_apply_async_queued(redis):
73 | redis.set("qo_example_a-1", 10000000000)
74 | try:
75 | example.apply_async(args=(1, ))
76 | pytest.fail("Didn't raise AlreadyQueued.")
77 | except AlreadyQueued:
78 | pass
79 |
80 |
81 | def test_already_queued_graceful(redis):
82 | redis.set("qo_example_a-1", 10000000000)
83 | result = example.apply_async(args=(1, ), once={'graceful': True})
84 | assert result.result is None
85 |
86 |
87 | def test_apply_async_expired(redis):
88 | lock = RedisLock(redis, "qo_example_a-1", timeout=1)
89 | lock.acquire()
90 |
91 | assert redis.get("qo_example_a-1") is not None
92 |
93 | time.sleep(1)
94 | example.apply_async(args=(1, ))
95 |
96 | assert redis.get("qo_example_a-1") is None
97 |
--------------------------------------------------------------------------------
/tests/integration/flask_app/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cameronmaske/celery-once/4f68b6b3c8503b9360179a246521b07315cdf577/tests/integration/flask_app/__init__.py
--------------------------------------------------------------------------------
/tests/integration/flask_app/app.py:
--------------------------------------------------------------------------------
1 | from flask import Flask, has_app_context
2 | from celery import Celery
3 | from time import sleep
4 | from celery_once import QueueOnce
5 | import os
6 |
7 | REDIS_URL = os.environ.get("REDIS_URL", "redis://localhost:6379")
8 |
9 | def make_celery(app):
10 | celery = Celery(
11 | app.import_name,
12 | backend=app.config['CELERY_RESULT_BACKEND'],
13 | broker=app.config['CELERY_BROKER_URL']
14 | )
15 | celery.conf.update(app.config)
16 |
17 | class ContextTask(celery.Task):
18 | def __call__(self, *args, **kwargs):
19 | with app.app_context():
20 | return self.run(*args, **kwargs)
21 |
22 | class ContextQueueOnce(QueueOnce):
23 | def __call__(self, *args, **kwargs):
24 | with app.app_context():
25 | return super(ContextQueueOnce, self).__call__(*args, **kwargs)
26 |
27 | celery.Task = ContextTask
28 | celery.QueueOnce = ContextQueueOnce
29 | return celery
30 |
31 | flask_app = Flask(__name__)
32 | flask_app.config.update(
33 | CELERY_BROKER_URL='{}/1'.format(REDIS_URL),
34 | CELERY_RESULT_BACKEND='{}/2'.format(REDIS_URL),
35 | )
36 | celery = make_celery(flask_app)
37 | celery.conf.ONCE = {
38 | 'backend': 'celery_once.backends.Redis',
39 | 'settings': {
40 | 'url': '{}/3'.format(REDIS_URL),
41 | 'default_timeout': 60 * 60
42 | }
43 | }
44 |
45 | # Setting the `name` allow us to reach this task in the test folder.
46 | @celery.task(name="tests.integration.flask_app.app.sleep_task", base=celery.QueueOnce)
47 | def sleep_task(value):
48 | assert has_app_context() is True
49 | return sleep(value)
--------------------------------------------------------------------------------
/tests/integration/flask_app/test_flask.py:
--------------------------------------------------------------------------------
1 | from __future__ import absolute_import
2 | from tests.integration.flask_app.app import sleep_task
3 | from time import sleep
4 | import pytest
5 | from celery_once import AlreadyQueued
6 | import pytest
7 |
8 |
9 | @pytest.mark.framework
10 | def test_flask():
11 | sleep_task.delay(1)
12 | sleep(0.5)
13 | with pytest.raises(AlreadyQueued):
14 | sleep_task.delay(1)
15 | sleep(2) # Task should of completed by now.
16 | sleep_task.delay(1)
17 |
--------------------------------------------------------------------------------
/tests/integration/test_tasks.py:
--------------------------------------------------------------------------------
1 | import pytest
2 |
3 | from celery import Celery
4 | from celery_once import QueueOnce, AlreadyQueued
5 |
6 |
7 | app = Celery()
8 | app.conf.ONCE = {
9 | 'backend': 'tests.backends.TestBackend',
10 | 'settings': {
11 | 'default_timeout': 60
12 | }
13 | }
14 | app.conf.CELERY_ALWAYS_EAGER = True
15 |
16 |
17 | @pytest.fixture(autouse=True)
18 | def mock_backend(mocker):
19 | mocker.patch('tests.backends.TestBackend.raise_or_lock')
20 | mocker.patch('tests.backends.TestBackend.clear_lock')
21 |
22 |
23 | @app.task(name="example", base=QueueOnce)
24 | def example():
25 | return
26 |
27 |
28 | @app.task(name="example_unlock_before_run", base=QueueOnce, once={'unlock_before_run': True})
29 | def example_unlock_before_run():
30 | return
31 |
32 |
33 | @app.task(name="example_retry", base=QueueOnce, once={'keys': []}, bind=True)
34 | def example_retry(self, a=0):
35 | if a != 1:
36 | self.request.called_directly = False
37 | self.retry(kwargs={'a': 1})
38 |
39 |
40 | def test_config():
41 | assert example.config == app.conf
42 |
43 |
44 | def test_once_config():
45 | assert example.once_config == {
46 | 'backend': 'tests.backends.TestBackend',
47 | 'settings': {
48 | 'default_timeout': 60
49 | }
50 | }
51 |
52 |
53 | def test_default_timeout():
54 | assert example.default_timeout == 60
55 |
56 |
57 | def test_apply_async():
58 | example.apply_async()
59 | example.once_backend.raise_or_lock.assert_called_with(
60 | "qo_example", timeout=60)
61 |
62 |
63 | def test_apply_async_timeout(mocker):
64 | example.once_backend.raise_or_lock = mocker.Mock()
65 | example.apply_async(once={'timeout': 120})
66 | example.once_backend.raise_or_lock.assert_called_with(
67 | "qo_example", timeout=120)
68 |
69 |
70 | def test_raise_already_queued():
71 | example.once_backend.raise_or_lock.side_effect = AlreadyQueued(60)
72 | with pytest.raises(AlreadyQueued):
73 | example.apply_async()
74 |
75 |
76 | def test_raise_already_queued_graceful():
77 | example.once_backend.raise_or_lock.side_effect = AlreadyQueued(60)
78 | result = example.apply_async(once={'graceful': True})
79 | assert result.result is None
80 |
81 |
82 | def test_retry():
83 | example_retry.apply_async()
84 | example.once_backend.raise_or_lock.assert_called_with(
85 | "qo_example_retry", timeout=60)
86 | example.once_backend.clear_lock.assert_called_with("qo_example_retry")
87 |
88 |
89 | def test_delay_unlock_before_run(mocker):
90 | mock_parent = mocker.Mock()
91 | clear_lock_mock = mocker.Mock()
92 | after_return_mock = mocker.Mock()
93 | mock_parent.attach_mock(clear_lock_mock, 'clear_lock')
94 | mock_parent.attach_mock(after_return_mock, 'after_return')
95 | example_unlock_before_run.once_backend.clear_lock.side_effect = clear_lock_mock
96 | example_unlock_before_run.after_return = after_return_mock
97 | example_unlock_before_run.apply_async()
98 | assert len(mock_parent.mock_calls) == 2
99 | assert mock_parent.mock_calls[0] == mocker.call.clear_lock('qo_example_unlock_before_run')
100 |
--------------------------------------------------------------------------------
/tests/unit/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cameronmaske/celery-once/4f68b6b3c8503b9360179a246521b07315cdf577/tests/unit/__init__.py
--------------------------------------------------------------------------------
/tests/unit/backends/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cameronmaske/celery-once/4f68b6b3c8503b9360179a246521b07315cdf577/tests/unit/backends/__init__.py
--------------------------------------------------------------------------------
/tests/unit/backends/test_file.py:
--------------------------------------------------------------------------------
1 | import errno
2 | import os
3 | import tempfile
4 | import time
5 |
6 | import pytest
7 |
8 | from celery_once.backends.file import key_to_lock_name, File
9 | from celery_once.tasks import AlreadyQueued
10 |
11 |
12 | def test_key_to_lock_name():
13 | assert key_to_lock_name('qo_test') == \
14 | 'qo_test_999f583e69db6a0c04b86beeebb2b631'
15 | assert key_to_lock_name('qo_looooooong_task_name') == \
16 | 'qo_looooooong_tas_6626e5965e549303044d5a7f4fdc3c6b'
17 |
18 |
19 | def test_file_init(mocker):
20 | makedirs_mock = mocker.patch('celery_once.backends.file.os.makedirs')
21 | location = '/home/test'
22 | backend = File({'location': location})
23 |
24 | assert backend.location == location
25 | assert makedirs_mock.called is True
26 | assert makedirs_mock.call_args[0] == (location,)
27 |
28 |
29 | def test_file_init_default(mocker):
30 | makedirs_mock = mocker.patch('celery_once.backends.file.os.makedirs')
31 | backend = File({})
32 |
33 | assert backend.location == os.path.join(tempfile.gettempdir(),
34 | 'celery_once')
35 | assert makedirs_mock.called is True
36 |
37 |
38 | def test_file_init_location_exists(mocker):
39 | makedirs_mock = mocker.patch('celery_once.backends.file.os.makedirs',
40 | side_effect=OSError(errno.EEXIST, 'error'))
41 | location = '/home/test'
42 | backend = File({'location': location})
43 |
44 | assert backend.location == location
45 | assert makedirs_mock.called is True
46 |
47 |
48 | TEST_LOCATION = '/tmp/celery'
49 |
50 |
51 | @pytest.fixture()
52 | def backend(mocker):
53 | mocker.patch('celery_once.backends.file.os.makedirs')
54 | backend = File({'location': TEST_LOCATION})
55 | return backend
56 |
57 |
58 | def test_file_create_lock(backend, mocker):
59 | key = 'test.task.key'
60 | timeout = 3600
61 | open_mock = mocker.patch('celery_once.backends.file.os.open')
62 | mtime_mock = mocker.patch('celery_once.backends.file.os.path.getmtime')
63 | utime_mock = mocker.patch('celery_once.backends.file.os.utime')
64 | close_mock = mocker.patch('celery_once.backends.file.os.close')
65 | expected_lock_path = os.path.join(TEST_LOCATION,
66 | key_to_lock_name(key))
67 | ret = backend.raise_or_lock(key, timeout)
68 |
69 | assert open_mock.call_count == 1
70 | assert open_mock.call_args[0] == (
71 | expected_lock_path,
72 | os.O_CREAT | os.O_EXCL,
73 | )
74 | assert utime_mock.called is False
75 | assert close_mock.called is True
76 | assert ret is None
77 |
78 | def test_file_lock_exists(backend, mocker):
79 | key = 'test.task.key'
80 | timeout = 3600
81 | open_mock = mocker.patch(
82 | 'celery_once.backends.file.os.open',
83 | side_effect=OSError(errno.EEXIST, 'error'))
84 | mtime_mock = mocker.patch(
85 | 'celery_once.backends.file.os.path.getmtime',
86 | return_value=1550155000.0)
87 | time_mock = mocker.patch(
88 | 'celery_once.backends.file.time.time',
89 | return_value=1550156000.0)
90 | utime_mock = mocker.patch('celery_once.backends.file.os.utime')
91 | close_mock = mocker.patch('celery_once.backends.file.os.close')
92 | with pytest.raises(AlreadyQueued) as exc_info:
93 | backend.raise_or_lock(key, timeout)
94 |
95 | assert open_mock.call_count == 1
96 | assert utime_mock.called is False
97 | assert close_mock.called is False
98 | assert exc_info.value.countdown == timeout - 1000
99 |
100 | def test_file_lock_timeout(backend, mocker):
101 | key = 'test.task.key'
102 | timeout = 3600
103 | open_mock = mocker.patch(
104 | 'celery_once.backends.file.os.open',
105 | side_effect=OSError(errno.EEXIST, 'error'))
106 | mtime_mock = mocker.patch(
107 | 'celery_once.backends.file.os.path.getmtime',
108 | return_value=1550150000.0)
109 | time_mock = mocker.patch(
110 | 'celery_once.backends.file.time.time',
111 | return_value=1550156000.0)
112 | utime_mock = mocker.patch('celery_once.backends.file.os.utime')
113 | close_mock = mocker.patch('celery_once.backends.file.os.close')
114 | expected_lock_path = os.path.join(TEST_LOCATION,
115 | key_to_lock_name(key))
116 | ret = backend.raise_or_lock(key, timeout)
117 |
118 | assert open_mock.call_count == 1
119 | assert utime_mock.call_count == 1
120 | assert utime_mock.call_args[0] == (expected_lock_path, None)
121 | assert close_mock.called is False
122 | assert ret is None
123 |
124 | def test_file_clear_lock(backend, mocker):
125 | key = 'test.task.key'
126 | remove_mock = mocker.patch('celery_once.backends.file.os.remove')
127 | expected_lock_path = os.path.join(TEST_LOCATION,
128 | key_to_lock_name(key))
129 | ret = backend.clear_lock(key)
130 |
131 | assert remove_mock.call_count == 1
132 | assert remove_mock.call_args[0] == (expected_lock_path,)
133 | assert ret is None
134 |
--------------------------------------------------------------------------------
/tests/unit/backends/test_redis.py:
--------------------------------------------------------------------------------
1 | import pytest
2 | from pytest import approx
3 | import time
4 | from fakeredis import FakeStrictRedis
5 |
6 | from celery_once.backends.redis import parse_url, Redis
7 | from celery_once.tasks import AlreadyQueued
8 | from redis.lock import Lock as RedisLock
9 |
10 |
11 | def test_parse_redis_details_tcp_default_args():
12 | details = parse_url('redis://localhost:6379/')
13 | assert details == {'host': 'localhost', 'port': 6379}
14 |
15 |
16 | def test_parse_url_tcp_with_db():
17 | details = parse_url('redis://localhost:6379/3')
18 | assert details == {'host': 'localhost', 'port': 6379, 'db': 3}
19 |
20 |
21 | def test_parse_url_tcp_no_port():
22 | details = parse_url('redis://localhost')
23 | assert details == {'host': 'localhost'}
24 |
25 |
26 | def test_parse_url_tcp_with_password():
27 | details = parse_url('redis://:ohai@localhost:6379')
28 | assert details == {'host': 'localhost', 'port': 6379, 'password': 'ohai'}
29 |
30 |
31 | def test_parse_url_unix_sock_no_options():
32 | details = parse_url('redis+socket:///var/run/redis/redis.sock')
33 | assert details == {'unix_socket_path': '/var/run/redis/redis.sock'}
34 |
35 |
36 | def test_parse_url_unix_sock_with_options():
37 | details = parse_url('redis+socket:///var/run/redis/redis.sock?db=2&socket_timeout=2')
38 | assert details == {
39 | 'unix_socket_path': '/var/run/redis/redis.sock',
40 | 'db': 2,
41 | 'socket_timeout': 2.0
42 | }
43 |
44 |
45 | def test_parse_url_with_ssl():
46 | details = parse_url('rediss://localhost:6379/3')
47 | assert details == {'host': 'localhost', 'port': 6379, 'db': 3, 'ssl': True}
48 |
49 |
50 | def test_parse_unsupported_url():
51 | with pytest.raises(ValueError):
52 | parse_url('amqp://guest:guest@localhost:5672/potato')
53 |
54 |
55 | @pytest.fixture()
56 | def redis(monkeypatch):
57 | fake_redis = FakeStrictRedis()
58 | fake_redis.flushall()
59 | monkeypatch.setattr("celery_once.backends.redis.Redis.redis", fake_redis)
60 | return fake_redis
61 |
62 |
63 | @pytest.fixture()
64 | def backend():
65 | backend = Redis({'url': "redis://localhost:1337"})
66 | return backend
67 |
68 |
69 | def test_redis_raise_or_lock(redis, backend):
70 | assert redis.get("test") is None
71 | backend.raise_or_lock(key="test", timeout=60)
72 | assert redis.get("test") is not None
73 |
74 | def test_redis_raise_or_lock_locked(redis, backend):
75 | # Set to expire in 30 seconds!
76 | lock = RedisLock(redis, "test", timeout=30)
77 | lock.acquire()
78 |
79 | with pytest.raises(AlreadyQueued) as e:
80 | backend.raise_or_lock(key="test", timeout=60)
81 |
82 | assert e.value.countdown == approx(30.0, rel=0.1)
83 | assert "Expires in" in e.value.message
84 |
85 |
86 | def test_redis_raise_or_lock_locked_and_expired(redis, backend):
87 | lock = RedisLock(redis, "test", timeout=1)
88 | lock.acquire()
89 | time.sleep(1) # wait for lock to expire
90 |
91 | backend.raise_or_lock(key="test", timeout=60)
92 | assert redis.get("test") is not None
93 |
94 |
95 | def test_redis_clear_lock(redis, backend):
96 | redis.set("test", 1326499200 + 30)
97 | backend.clear_lock("test")
98 | assert redis.get("test") is None
99 |
100 |
101 | def test_redis_cached_property(mocker, monkeypatch):
102 | # Remove any side effect previous tests could have had
103 | monkeypatch.setattr('celery_once.backends.redis.redis', None)
104 | mock_parse = mocker.patch('celery_once.backends.redis.parse_url')
105 | mock_parse.return_value = {
106 | 'host': "localhost"
107 | }
108 | # Despite the class being inited twice, should only setup once.
109 | Redis({
110 | 'url': "redis://localhost:1337"
111 | })
112 | Redis({})
113 | assert mock_parse.call_count == 1
114 |
--------------------------------------------------------------------------------
/tests/unit/test_helpers.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | from celery_once.helpers import (
3 | queue_once_key, kwargs_to_list, force_string, import_backend)
4 |
5 | import pytest
6 | import six
7 |
8 |
9 | def test_force_string_1():
10 | assert force_string('a') == 'a'
11 |
12 |
13 | def test_force_string_2():
14 | assert force_string(u'a') == 'a'
15 |
16 |
17 | def test_force_string_3():
18 | assert force_string('é') == 'é'
19 |
20 |
21 | def test_force_string_4():
22 | assert force_string(u'é') == 'é'
23 |
24 |
25 | def test_kwargs_to_list_empty():
26 | keys = kwargs_to_list({})
27 | assert keys == []
28 |
29 |
30 | def test_kwargs_to_list_1():
31 | keys = kwargs_to_list({'int': 1})
32 | assert keys == ["int-1"]
33 |
34 |
35 | def test_kwargs_to_list_2():
36 | keys = kwargs_to_list({'int': 1, 'boolean': True})
37 | assert keys == ["boolean-True", "int-1"]
38 |
39 |
40 | def test_kwargs_to_list_3():
41 | keys = kwargs_to_list({'int': 1, 'boolean': True, 'str': "abc"})
42 | assert keys == ["boolean-True", "int-1", "str-abc"]
43 |
44 |
45 | def test_kwargs_to_list_4():
46 | keys = kwargs_to_list(
47 | {'int': 1, 'boolean': True, 'str': 'abc', 'list': [1, '2']})
48 | assert keys == ["boolean-True", "int-1", "list-[1, '2']", "str-abc"]
49 |
50 |
51 | @pytest.mark.skipif(six.PY3, reason='requires python 2')
52 | def test_kwargs_to_list_5():
53 | keys = kwargs_to_list(
54 | {'a': {u'é': 'c'}, 'b': [u'a', 'é'], u'c': 1, 'd': 'é', 'e': u'é'})
55 | assert keys == [
56 | "a-{'\\xc3\\xa9': 'c'}",
57 | "b-['a', '\\xc3\\xa9']",
58 | "c-1",
59 | "d-\xc3\xa9",
60 | "e-\xc3\xa9",
61 | ]
62 |
63 |
64 | @pytest.mark.skipif(six.PY2, reason='requires python 3')
65 | def test_kwargs_to_list_6():
66 | keys = kwargs_to_list(
67 | {'a': {u'é': 'c'}, 'b': [u'a', 'é'], u'c': 1, 'd': 'é', 'e': u'é'})
68 | assert keys == ["a-{'é': 'c'}", "b-['a', 'é']", "c-1", "d-é", 'e-é']
69 |
70 |
71 | def test_kwargs_to_list_nested_dict_is_sorted_1():
72 | keys = kwargs_to_list({'nested': {'a': 1, 'b': 2, 'c': 3, 'd': 4, 'e': 5}})
73 | assert keys == ["nested-{'a': 1, 'b': 2, 'c': 3, 'd': 4, 'e': 5}"]
74 |
75 |
76 | def test_kwargs_to_list_nested_dict_is_sorted_2():
77 | keys = kwargs_to_list({'nested': {'a': {'a': 1, 'b': 2}, 'c': 2, 'b': 3, 'd': 4, 'e': 5}})
78 | assert keys == ["nested-{'a': \"{'a': 1, 'b': 2}\", 'b': 3, 'c': 2, 'd': 4, 'e': 5}"]
79 |
80 |
81 | def test_queue_once_key():
82 | key = queue_once_key("example", {})
83 | assert key == "qo_example"
84 |
85 |
86 | def test_queue_once_key_kwargs():
87 | key = queue_once_key("example", {'pk': 10})
88 | assert key == "qo_example_pk-10"
89 |
90 |
91 | def test_queue_once_key_kwargs_restrict_keys():
92 | key = queue_once_key("example", {'pk': 10, 'id': 10}, restrict_to=['pk'])
93 | assert key == "qo_example_pk-10"
94 |
95 |
96 | @pytest.mark.skipif(six.PY3, reason='requires python 2')
97 | def test_queue_once_key_unicode_py2():
98 | key = queue_once_key(u"éxample", {'a': u'é', u'b': 'é'})
99 | assert key == "qo_\xc3\xa9xample_a-\xc3\xa9_b-\xc3\xa9"
100 |
101 |
102 | @pytest.mark.skipif(six.PY2, reason='requires python 3')
103 | def test_queue_once_key_unicode_py3():
104 | key = queue_once_key(u"éxample", {'a': u'é', u'b': 'é'})
105 | assert key == "qo_éxample_a-é_b-é"
106 |
107 |
108 | class TestBackend(object):
109 | def __init__(self, settings):
110 | self.settings = settings
111 |
112 |
113 | def test_import_backend():
114 | config = {
115 | 'backend': "tests.backends.TestBackend",
116 | 'settings': 1
117 | }
118 | backend = import_backend(config)
119 | assert backend.settings == 1
120 |
121 |
--------------------------------------------------------------------------------
/tests/unit/test_tasks.py:
--------------------------------------------------------------------------------
1 |
2 | from celery import task
3 | from celery_once.tasks import QueueOnce
4 |
5 |
6 | @task(name='simple_example', base=QueueOnce)
7 | def simple_example():
8 | return "simple"
9 |
10 |
11 | @task(name='bound_task', bind=True, base=QueueOnce)
12 | def bound_task(self, a, b):
13 | return a + b
14 |
15 |
16 | @task(name='args_example', base=QueueOnce)
17 | def args_example(a, b):
18 | return a + b
19 |
20 |
21 | @task(name='select_args_example', base=QueueOnce, once={'keys': ['a']})
22 | def select_args_example(a, b):
23 | return a + b
24 |
25 |
26 | @task(name='autoretry_for_example', base=QueueOnce, autoretry_for=(Exception,))
27 | def autoretry_for_example(a, b):
28 | return a + b
29 |
30 |
31 | def test_get_key_simple():
32 | assert "qo_simple_example" == simple_example.get_key()
33 |
34 |
35 | def test_get_key_args_1():
36 | assert "qo_args_example_a-1_b-2" == args_example.get_key(
37 | kwargs={'a': 1, 'b': 2})
38 |
39 |
40 | def test_get_key_args_2():
41 | assert "qo_args_example_a-1_b-2" == args_example.get_key(args=(1, 2, ))
42 |
43 |
44 | def test_get_key_select_args_1():
45 | assert "qo_select_args_example_a-1" == select_args_example.get_key(
46 | kwargs={'a': 1, 'b': 2})
47 |
48 |
49 | def test_get_key_bound_task():
50 | assert "qo_bound_task_a-1_b-2" == bound_task.get_key(
51 | kwargs={'a': 1, 'b': 2})
52 |
53 |
54 | def test_get_key_autoretry_for():
55 | assert "qo_autoretry_for_example_a-1_b-2" == autoretry_for_example.get_key(
56 | kwargs={'a': 1, 'b': 2})
57 |
58 | def test_get_call_args():
59 | args = args_example._get_call_args((1,2), {})
60 | assert args == {'a': 1, 'b': 2}
61 |
62 | def test_get_call_args_bound():
63 | args = bound_task._get_call_args((1,2), {})
64 | assert args == {'a': 1, 'b': 2}
--------------------------------------------------------------------------------
/tox.ini:
--------------------------------------------------------------------------------
1 | [tox]
2 | envlist = py27,py35,py36
3 | [testenv]
4 | deps=
5 | pytest==4.2.0
6 | pytest-cov==2.6.1
7 | pytest-mock==1.10.1
8 | python-coveralls==2.9.1
9 | coverage==4.5.2
10 | fakeredis==1.0.3
11 | mock==1.0.1
12 | redis==3.2.1
13 |
--------------------------------------------------------------------------------