├── .coveragerc
├── .gitignore
├── .travis.yml
├── CHANGELOG.rst
├── LICENSE
├── MANIFEST.in
├── README.rst
├── docs
├── .gitignore
├── Makefile
├── api.rst
├── conf.py
├── faq.rst
├── http.rst
├── index.rst
├── make.bat
└── tutorial.rst
├── dsq
├── __init__.py
├── cli.py
├── compat.py
├── http.py
├── manager.py
├── sched.py
├── store.py
├── utils.py
├── worker.py
└── wsgi.py
├── requirements.txt
├── run.py
├── setup.py
└── tests
├── __init__.py
├── conftest.py
├── test_http.py
├── test_manager.py
├── test_queue_store.py
├── test_result_store.py
├── test_sched.py
└── test_utils.py
/.coveragerc:
--------------------------------------------------------------------------------
1 | [run]
2 | source=dsq
3 | omit=dsq/cli.py
4 | dsq/wsgi.py
5 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | .cache
2 | *.pyc
3 | /tasks.py
4 | *,cover
5 | .coverage
6 | /htmlcov
7 | /*.http
8 | /*.egg-info
9 | /dist
10 |
--------------------------------------------------------------------------------
/.travis.yml:
--------------------------------------------------------------------------------
1 | language: python
2 | python:
3 | - "3.6"
4 | - "3.7"
5 | - "3.8"
6 | - "3.9"
7 | - "3.11"
8 | - "pypy"
9 | install: "pip install -r requirements.txt"
10 | script: py.test --cov dsq --cov-report term-missing --cov-fail-under=100
11 | services:
12 | - redis-server
13 | cache:
14 | - pip
15 |
--------------------------------------------------------------------------------
/CHANGELOG.rst:
--------------------------------------------------------------------------------
1 | 0.9
2 | ===
3 |
4 | * [Fix] Support redis-py>=3.0 (fixed zadd command arguments)
5 |
6 | 0.8
7 | ===
8 |
9 | * [Fix] TERM and INT signals don't work under python3
10 |
11 | 0.7
12 | ===
13 |
14 | * [Breaking] Explicit task enqueue. I did a mistake in API design. All queue
15 | ops must be visible and transparent for source code reader. So old code:
16 |
17 | .. code:: python
18 |
19 | @task
20 | def boo(arg):
21 | ...
22 |
23 | boo('foo')
24 | boo.run_with(keep_result=300)('foo')
25 | boo.sync('foo') # sync (original func) call
26 |
27 | should be converted into:
28 |
29 | .. code:: python
30 |
31 | @task
32 | def boo(arg):
33 | ...
34 |
35 | boo.push('foo')
36 | boo.modify(keep_result=300).push('foo')
37 | boo('foo') # sync (original func) call
38 |
39 | * [Feature] Stateful tasks. One can define a shared state for task functions. It
40 | can be used for async io, for example.
41 |
42 | * [Feature] `@manager.periodic` and `@manager.crontab` to schedule task
43 | execution.
44 |
45 | * [Fix] Log done status for tasks.
46 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | The MIT License (MIT)
2 |
3 | Copyright (c) 2016 Anton Bobrov
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/MANIFEST.in:
--------------------------------------------------------------------------------
1 | recursive-exclude tests *
2 |
--------------------------------------------------------------------------------
/README.rst:
--------------------------------------------------------------------------------
1 | DSQ
2 | ===
3 |
4 | .. image:: https://travis-ci.org/baverman/dsq.svg?branch=master
5 | :target: https://travis-ci.org/baverman/dsq
6 |
7 | .. image:: https://readthedocs.org/projects/dsq/badge/?version=latest
8 | :target: http://dsq.readthedocs.io/en/latest/?badge=latest
9 |
10 | Dead simple task queue using redis.
11 |
12 | .. code:: python
13 |
14 | # tasks.py
15 | import dsq
16 | manager = dsq.create_manager()
17 |
18 | @manager.task(queue='normal')
19 | def add(a, b):
20 | print a + b
21 |
22 | if __name__ == '__main__':
23 | add.push(1, 2)
24 |
25 | .. code:: bash
26 |
27 | $ python tasks.py
28 | $ dsq worker -bt tasks normal
29 |
30 | See full `DSQ documentation `_.
31 |
32 |
33 | Features
34 | --------
35 |
36 | * Low latency.
37 | * Expiring tasks (TTL).
38 | * Delayed tasks (ETA).
39 | * Retries (forever or particular amount).
40 | * Periodic tasks.
41 | * Crontab.
42 | * Dead letters.
43 | * Queue priorities.
44 | * Worker lifetime.
45 | * Task execution timeout.
46 | * Task forwarder from one redis instance to another.
47 | * HTTP interface.
48 | * Inspect tools.
49 | * Supports 2.7, >3.4 and PyPy.
50 | * 100% test coverage.
51 |
52 |
53 | The goal is a simple design. There is no worker manager, one can use
54 | supervisord/circus/whatever to spawn N workers.
55 | Simple storage model. Queue is a list and scheduled tasks are a sorted set.
56 | There are no task keys. Tasks are items of list and sorted set. There is no
57 | any registry to manage workers, basic requirements
58 | (die after some lifetime and do not hang) can be handled by workers themselves.
59 | Worker do not store result by default.
60 |
61 |
62 | Queue overhead benchmarks
63 | -------------------------
64 |
65 | DSQ has a little overhead in compare with RQ and Celery
66 | (https://gist.github.com/baverman/5303506cd89200cf246af7bafd569b2e)
67 |
68 | Pushing and processing 10k trivial add tasks::
69 |
70 | === DSQ ===
71 | Push
72 | real 0m0.906s
73 | user 0m0.790s
74 | sys 0m0.107s
75 |
76 | Process
77 | real 0m1.949s
78 | user 0m0.763s
79 | sys 0m0.103s
80 |
81 |
82 | === RQ ===
83 | Push
84 | real 0m3.617s
85 | user 0m3.177s
86 | sys 0m0.293s
87 |
88 | Process
89 | real 0m57.706s
90 | user 0m29.807s
91 | sys 0m20.070s
92 |
93 |
94 | === Celery ===
95 | Push
96 | real 0m5.753s
97 | user 0m5.237s
98 | sys 0m0.327s
99 |
--------------------------------------------------------------------------------
/docs/.gitignore:
--------------------------------------------------------------------------------
1 | /_build
2 |
--------------------------------------------------------------------------------
/docs/Makefile:
--------------------------------------------------------------------------------
1 | # Makefile for Sphinx documentation
2 | #
3 |
4 | # You can set these variables from the command line.
5 | SPHINXOPTS =
6 | SPHINXBUILD = sphinx-build
7 | PAPER =
8 | BUILDDIR = _build
9 |
10 | # Internal variables.
11 | PAPEROPT_a4 = -D latex_paper_size=a4
12 | PAPEROPT_letter = -D latex_paper_size=letter
13 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
14 | # the i18n builder cannot share the environment and doctrees with the others
15 | I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
16 |
17 | .PHONY: help
18 | help:
19 | @echo "Please use \`make ' where is one of"
20 | @echo " html to make standalone HTML files"
21 | @echo " dirhtml to make HTML files named index.html in directories"
22 | @echo " singlehtml to make a single large HTML file"
23 | @echo " pickle to make pickle files"
24 | @echo " json to make JSON files"
25 | @echo " htmlhelp to make HTML files and a HTML help project"
26 | @echo " qthelp to make HTML files and a qthelp project"
27 | @echo " applehelp to make an Apple Help Book"
28 | @echo " devhelp to make HTML files and a Devhelp project"
29 | @echo " epub to make an epub"
30 | @echo " epub3 to make an epub3"
31 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
32 | @echo " latexpdf to make LaTeX files and run them through pdflatex"
33 | @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx"
34 | @echo " text to make text files"
35 | @echo " man to make manual pages"
36 | @echo " texinfo to make Texinfo files"
37 | @echo " info to make Texinfo files and run them through makeinfo"
38 | @echo " gettext to make PO message catalogs"
39 | @echo " changes to make an overview of all changed/added/deprecated items"
40 | @echo " xml to make Docutils-native XML files"
41 | @echo " pseudoxml to make pseudoxml-XML files for display purposes"
42 | @echo " linkcheck to check all external links for integrity"
43 | @echo " doctest to run all doctests embedded in the documentation (if enabled)"
44 | @echo " coverage to run coverage check of the documentation (if enabled)"
45 | @echo " dummy to check syntax errors of document sources"
46 |
47 | .PHONY: clean
48 | clean:
49 | rm -rf $(BUILDDIR)/*
50 |
51 | .PHONY: html
52 | html:
53 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
54 | @echo
55 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
56 |
57 | .PHONY: dirhtml
58 | dirhtml:
59 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
60 | @echo
61 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
62 |
63 | .PHONY: singlehtml
64 | singlehtml:
65 | $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
66 | @echo
67 | @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
68 |
69 | .PHONY: pickle
70 | pickle:
71 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
72 | @echo
73 | @echo "Build finished; now you can process the pickle files."
74 |
75 | .PHONY: json
76 | json:
77 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
78 | @echo
79 | @echo "Build finished; now you can process the JSON files."
80 |
81 | .PHONY: htmlhelp
82 | htmlhelp:
83 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
84 | @echo
85 | @echo "Build finished; now you can run HTML Help Workshop with the" \
86 | ".hhp project file in $(BUILDDIR)/htmlhelp."
87 |
88 | .PHONY: qthelp
89 | qthelp:
90 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
91 | @echo
92 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \
93 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:"
94 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/DSQ.qhcp"
95 | @echo "To view the help file:"
96 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/DSQ.qhc"
97 |
98 | .PHONY: applehelp
99 | applehelp:
100 | $(SPHINXBUILD) -b applehelp $(ALLSPHINXOPTS) $(BUILDDIR)/applehelp
101 | @echo
102 | @echo "Build finished. The help book is in $(BUILDDIR)/applehelp."
103 | @echo "N.B. You won't be able to view it unless you put it in" \
104 | "~/Library/Documentation/Help or install it in your application" \
105 | "bundle."
106 |
107 | .PHONY: devhelp
108 | devhelp:
109 | $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
110 | @echo
111 | @echo "Build finished."
112 | @echo "To view the help file:"
113 | @echo "# mkdir -p $$HOME/.local/share/devhelp/DSQ"
114 | @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/DSQ"
115 | @echo "# devhelp"
116 |
117 | .PHONY: epub
118 | epub:
119 | $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
120 | @echo
121 | @echo "Build finished. The epub file is in $(BUILDDIR)/epub."
122 |
123 | .PHONY: epub3
124 | epub3:
125 | $(SPHINXBUILD) -b epub3 $(ALLSPHINXOPTS) $(BUILDDIR)/epub3
126 | @echo
127 | @echo "Build finished. The epub3 file is in $(BUILDDIR)/epub3."
128 |
129 | .PHONY: latex
130 | latex:
131 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
132 | @echo
133 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
134 | @echo "Run \`make' in that directory to run these through (pdf)latex" \
135 | "(use \`make latexpdf' here to do that automatically)."
136 |
137 | .PHONY: latexpdf
138 | latexpdf:
139 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
140 | @echo "Running LaTeX files through pdflatex..."
141 | $(MAKE) -C $(BUILDDIR)/latex all-pdf
142 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
143 |
144 | .PHONY: latexpdfja
145 | latexpdfja:
146 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
147 | @echo "Running LaTeX files through platex and dvipdfmx..."
148 | $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja
149 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
150 |
151 | .PHONY: text
152 | text:
153 | $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
154 | @echo
155 | @echo "Build finished. The text files are in $(BUILDDIR)/text."
156 |
157 | .PHONY: man
158 | man:
159 | $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
160 | @echo
161 | @echo "Build finished. The manual pages are in $(BUILDDIR)/man."
162 |
163 | .PHONY: texinfo
164 | texinfo:
165 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
166 | @echo
167 | @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
168 | @echo "Run \`make' in that directory to run these through makeinfo" \
169 | "(use \`make info' here to do that automatically)."
170 |
171 | .PHONY: info
172 | info:
173 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
174 | @echo "Running Texinfo files through makeinfo..."
175 | make -C $(BUILDDIR)/texinfo info
176 | @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
177 |
178 | .PHONY: gettext
179 | gettext:
180 | $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
181 | @echo
182 | @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
183 |
184 | .PHONY: changes
185 | changes:
186 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
187 | @echo
188 | @echo "The overview file is in $(BUILDDIR)/changes."
189 |
190 | .PHONY: linkcheck
191 | linkcheck:
192 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
193 | @echo
194 | @echo "Link check complete; look for any errors in the above output " \
195 | "or in $(BUILDDIR)/linkcheck/output.txt."
196 |
197 | .PHONY: doctest
198 | doctest:
199 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
200 | @echo "Testing of doctests in the sources finished, look at the " \
201 | "results in $(BUILDDIR)/doctest/output.txt."
202 |
203 | .PHONY: coverage
204 | coverage:
205 | $(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage
206 | @echo "Testing of coverage in the sources finished, look at the " \
207 | "results in $(BUILDDIR)/coverage/python.txt."
208 |
209 | .PHONY: xml
210 | xml:
211 | $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml
212 | @echo
213 | @echo "Build finished. The XML files are in $(BUILDDIR)/xml."
214 |
215 | .PHONY: pseudoxml
216 | pseudoxml:
217 | $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml
218 | @echo
219 | @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml."
220 |
221 | .PHONY: dummy
222 | dummy:
223 | $(SPHINXBUILD) -b dummy $(ALLSPHINXOPTS) $(BUILDDIR)/dummy
224 | @echo
225 | @echo "Build finished. Dummy builder generates no files."
226 |
--------------------------------------------------------------------------------
/docs/api.rst:
--------------------------------------------------------------------------------
1 | API
2 | ===
3 |
4 | dsq
5 | ---
6 |
7 | .. automodule:: dsq
8 | :members:
9 |
10 |
11 | dsq.manager
12 | -----------
13 |
14 | .. automodule:: dsq.manager
15 | :members:
16 |
17 | ..
18 | dsq.store
19 | ---------
20 | .. automodule:: dsq.store
21 | :members:
22 |
--------------------------------------------------------------------------------
/docs/conf.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | #
3 | # DSQ documentation build configuration file, created by
4 | # sphinx-quickstart on Sat Jul 9 18:21:58 2016.
5 | #
6 | # This file is execfile()d with the current directory set to its
7 | # containing dir.
8 | #
9 | # Note that not all possible configuration values are present in this
10 | # autogenerated file.
11 | #
12 | # All configuration values have a default; values that are commented out
13 | # serve to show the default.
14 |
15 | # If extensions (or modules to document with autodoc) are in another directory,
16 | # add these directories to sys.path here. If the directory is relative to the
17 | # documentation root, use os.path.abspath to make it absolute, like shown here.
18 | #
19 | import os
20 | import sys
21 | sys.path.insert(0, os.path.dirname(os.path.dirname(__file__)))
22 | import dsq
23 |
24 | # -- General configuration ------------------------------------------------
25 |
26 | # If your documentation needs a minimal Sphinx version, state it here.
27 | #
28 | # needs_sphinx = '1.0'
29 |
30 | # Add any Sphinx extension module names here, as strings. They can be
31 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
32 | # ones.
33 | extensions = [
34 | 'sphinx.ext.autodoc',
35 | ]
36 |
37 | # Add any paths that contain templates here, relative to this directory.
38 | templates_path = ['_templates']
39 |
40 | # The suffix(es) of source filenames.
41 | # You can specify multiple suffix as a list of string:
42 | #
43 | # source_suffix = ['.rst', '.md']
44 | source_suffix = '.rst'
45 |
46 | # The encoding of source files.
47 | #
48 | # source_encoding = 'utf-8-sig'
49 |
50 | # The master toctree document.
51 | master_doc = 'index'
52 |
53 | # General information about the project.
54 | project = u'DSQ'
55 | copyright = u'2016, Anton Bobrov'
56 | author = u'Anton Bobrov'
57 |
58 | # The version info for the project you're documenting, acts as replacement for
59 | # |version| and |release|, also used in various other places throughout the
60 | # built documents.
61 | #
62 | # The short X.Y version.
63 | version = dsq.version
64 | # The full version, including alpha/beta/rc tags.
65 | release = version
66 |
67 | # The language for content autogenerated by Sphinx. Refer to documentation
68 | # for a list of supported languages.
69 | #
70 | # This is also used if you do content translation via gettext catalogs.
71 | # Usually you set "language" from the command line for these cases.
72 | language = None
73 |
74 | # There are two options for replacing |today|: either, you set today to some
75 | # non-false value, then it is used:
76 | #
77 | # today = ''
78 | #
79 | # Else, today_fmt is used as the format for a strftime call.
80 | #
81 | # today_fmt = '%B %d, %Y'
82 |
83 | # List of patterns, relative to source directory, that match files and
84 | # directories to ignore when looking for source files.
85 | # This patterns also effect to html_static_path and html_extra_path
86 | exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
87 |
88 | # The reST default role (used for this markup: `text`) to use for all
89 | # documents.
90 | #
91 | # default_role = None
92 |
93 | # If true, '()' will be appended to :func: etc. cross-reference text.
94 | #
95 | # add_function_parentheses = True
96 |
97 | # If true, the current module name will be prepended to all description
98 | # unit titles (such as .. function::).
99 | #
100 | # add_module_names = True
101 |
102 | # If true, sectionauthor and moduleauthor directives will be shown in the
103 | # output. They are ignored by default.
104 | #
105 | # show_authors = False
106 |
107 | # The name of the Pygments (syntax highlighting) style to use.
108 | pygments_style = 'sphinx'
109 |
110 | # A list of ignored prefixes for module index sorting.
111 | # modindex_common_prefix = []
112 |
113 | # If true, keep warnings as "system message" paragraphs in the built documents.
114 | # keep_warnings = False
115 |
116 | # If true, `todo` and `todoList` produce output, else they produce nothing.
117 | todo_include_todos = False
118 |
119 |
120 | # -- Options for HTML output ----------------------------------------------
121 |
122 | # The theme to use for HTML and HTML Help pages. See the documentation for
123 | # a list of builtin themes.
124 | #
125 | html_theme = 'alabaster'
126 |
127 | # Theme options are theme-specific and customize the look and feel of a theme
128 | # further. For a list of options available for each theme, see the
129 | # documentation.
130 | #
131 | # html_theme_options = {}
132 |
133 | # Add any paths that contain custom themes here, relative to this directory.
134 | # html_theme_path = []
135 |
136 | # The name for this set of Sphinx documents.
137 | # " v documentation" by default.
138 | #
139 | # html_title = u'DSQ v0.9'
140 |
141 | # A shorter title for the navigation bar. Default is the same as html_title.
142 | #
143 | # html_short_title = None
144 |
145 | # The name of an image file (relative to this directory) to place at the top
146 | # of the sidebar.
147 | #
148 | # html_logo = None
149 |
150 | # The name of an image file (relative to this directory) to use as a favicon of
151 | # the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
152 | # pixels large.
153 | #
154 | # html_favicon = None
155 |
156 | # Add any paths that contain custom static files (such as style sheets) here,
157 | # relative to this directory. They are copied after the builtin static files,
158 | # so a file named "default.css" will overwrite the builtin "default.css".
159 | html_static_path = ['_static']
160 |
161 | # Add any extra paths that contain custom files (such as robots.txt or
162 | # .htaccess) here, relative to this directory. These files are copied
163 | # directly to the root of the documentation.
164 | #
165 | # html_extra_path = []
166 |
167 | # If not None, a 'Last updated on:' timestamp is inserted at every page
168 | # bottom, using the given strftime format.
169 | # The empty string is equivalent to '%b %d, %Y'.
170 | #
171 | # html_last_updated_fmt = None
172 |
173 | # If true, SmartyPants will be used to convert quotes and dashes to
174 | # typographically correct entities.
175 | #
176 | # html_use_smartypants = True
177 |
178 | # Custom sidebar templates, maps document names to template names.
179 | #
180 | # html_sidebars = {}
181 | html_sidebars = {'index': ['globaltoc.html', 'relations.html',
182 | 'sourcelink.html', 'searchbox.html']}
183 |
184 | # Additional templates that should be rendered to pages, maps page names to
185 | # template names.
186 | #
187 | # html_additional_pages = {}
188 |
189 | # If false, no module index is generated.
190 | #
191 | # html_domain_indices = True
192 |
193 | # If false, no index is generated.
194 | #
195 | # html_use_index = True
196 |
197 | # If true, the index is split into individual pages for each letter.
198 | #
199 | # html_split_index = False
200 |
201 | # If true, links to the reST sources are added to the pages.
202 | #
203 | # html_show_sourcelink = True
204 |
205 | # If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
206 | #
207 | # html_show_sphinx = True
208 |
209 | # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
210 | #
211 | # html_show_copyright = True
212 |
213 | # If true, an OpenSearch description file will be output, and all pages will
214 | # contain a tag referring to it. The value of this option must be the
215 | # base URL from which the finished HTML is served.
216 | #
217 | # html_use_opensearch = ''
218 |
219 | # This is the file name suffix for HTML files (e.g. ".xhtml").
220 | # html_file_suffix = None
221 |
222 | # Language to be used for generating the HTML full-text search index.
223 | # Sphinx supports the following languages:
224 | # 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
225 | # 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr', 'zh'
226 | #
227 | # html_search_language = 'en'
228 |
229 | # A dictionary with options for the search language support, empty by default.
230 | # 'ja' uses this config value.
231 | # 'zh' user can custom change `jieba` dictionary path.
232 | #
233 | # html_search_options = {'type': 'default'}
234 |
235 | # The name of a javascript file (relative to the configuration directory) that
236 | # implements a search results scorer. If empty, the default will be used.
237 | #
238 | # html_search_scorer = 'scorer.js'
239 |
240 | # Output file base name for HTML help builder.
241 | htmlhelp_basename = 'DSQdoc'
242 |
243 | # -- Options for LaTeX output ---------------------------------------------
244 |
245 | latex_elements = {
246 | # The paper size ('letterpaper' or 'a4paper').
247 | #
248 | # 'papersize': 'letterpaper',
249 |
250 | # The font size ('10pt', '11pt' or '12pt').
251 | #
252 | # 'pointsize': '10pt',
253 |
254 | # Additional stuff for the LaTeX preamble.
255 | #
256 | # 'preamble': '',
257 |
258 | # Latex figure (float) alignment
259 | #
260 | # 'figure_align': 'htbp',
261 | }
262 |
263 | # Grouping the document tree into LaTeX files. List of tuples
264 | # (source start file, target name, title,
265 | # author, documentclass [howto, manual, or own class]).
266 | latex_documents = [
267 | (master_doc, 'DSQ.tex', u'DSQ Documentation',
268 | u'Anton Bobrov', 'manual'),
269 | ]
270 |
271 | # The name of an image file (relative to this directory) to place at the top of
272 | # the title page.
273 | #
274 | # latex_logo = None
275 |
276 | # For "manual" documents, if this is true, then toplevel headings are parts,
277 | # not chapters.
278 | #
279 | # latex_use_parts = False
280 |
281 | # If true, show page references after internal links.
282 | #
283 | # latex_show_pagerefs = False
284 |
285 | # If true, show URL addresses after external links.
286 | #
287 | # latex_show_urls = False
288 |
289 | # Documents to append as an appendix to all manuals.
290 | #
291 | # latex_appendices = []
292 |
293 | # If false, no module index is generated.
294 | #
295 | # latex_domain_indices = True
296 |
297 |
298 | # -- Options for manual page output ---------------------------------------
299 |
300 | # One entry per manual page. List of tuples
301 | # (source start file, name, description, authors, manual section).
302 | man_pages = [
303 | (master_doc, 'dsq', u'DSQ Documentation',
304 | [author], 1)
305 | ]
306 |
307 | # If true, show URL addresses after external links.
308 | #
309 | # man_show_urls = False
310 |
311 |
312 | # -- Options for Texinfo output -------------------------------------------
313 |
314 | # Grouping the document tree into Texinfo files. List of tuples
315 | # (source start file, target name, title, author,
316 | # dir menu entry, description, category)
317 | texinfo_documents = [
318 | (master_doc, 'DSQ', u'DSQ Documentation',
319 | author, 'DSQ', 'One line description of project.',
320 | 'Miscellaneous'),
321 | ]
322 |
323 | # Documents to append as an appendix to all manuals.
324 | #
325 | # texinfo_appendices = []
326 |
327 | # If false, no module index is generated.
328 | #
329 | # texinfo_domain_indices = True
330 |
331 | # How to display URL addresses: 'footnote', 'no', or 'inline'.
332 | #
333 | # texinfo_show_urls = 'footnote'
334 |
335 | # If true, do not generate a @detailmenu in the "Top" node's menu.
336 | #
337 | # texinfo_no_detailmenu = False
338 |
--------------------------------------------------------------------------------
/docs/faq.rst:
--------------------------------------------------------------------------------
1 | FAQ
2 | ===
3 |
4 | Why you don't use celery
5 | ------------------------
6 |
7 | Celery has problems with worker freezes and there is no any tools
8 | to investigate whats wrong with it. A HUGE codebase leads to numerous bugs.
9 | Redis is not primary backend and generic interface don't allow to use
10 | redis effectively. I can do better.
11 |
12 |
13 | Why you don't use RQ
14 | --------------------
15 |
16 | RQ has no delayed tasks. And it has very strange worker forking model which
17 | one should keep in mind. Also codebase is not flexible enough to add
18 | delayed task support. I can do better.
19 |
20 |
21 | Why you don't use ...
22 | ---------------------
23 |
24 | Other variants have same popularity and level of support as DSQ)
25 |
26 |
27 | .. _msgpack-only:
28 |
29 | Task arguments and result must be msgpack-friendly. Really?
30 | -----------------------------------------------------------
31 |
32 | Yep. It's an effective guard against complex objects, for example
33 | ORM instances with convoluted state. Tasks do simple things and should
34 | have simple arguments. I saw many real celery tasks which
35 | take ``user`` object and use only ``user.id`` from it. It's better
36 | to write a task wrapper with simple arguments for and use underlying api
37 | function then have fun with a pickle magic.
38 |
39 |
40 | What about JSON?
41 | ----------------
42 |
43 | JSON can't into byte strings and non-string dict keys. It is the most
44 | dumb way to serialize data.
45 |
--------------------------------------------------------------------------------
/docs/http.rst:
--------------------------------------------------------------------------------
1 | .. _http:
2 |
3 | HTTP
4 | ====
5 |
6 | To start http interface you have two options.
7 |
8 | Built-in simple http server::
9 |
10 | $ dsq http -t tasks
11 |
12 | Or use external server with ``dsq.wsgi`` app::
13 |
14 | $ DSQ_TASKS=tasks uwsgi --http-socket :9042 -w dsq.wsgi
15 |
16 | HTTP interface supports ``application/x-msgpack`` and ``application/json`` types
17 | in ``Content-Type`` and ``Accept`` headers.
18 |
19 | Example tasks.py::
20 |
21 | # tasks.py
22 | import logging
23 | import dsq
24 |
25 | logging.basicConfig(level=logging.INFO)
26 | manager = dsq.create_manager()
27 |
28 | @manager.task(queue='normal')
29 | def div(a, b):
30 | return a/b
31 |
32 |
33 | Push tasks
34 | ----------
35 |
36 | Use ``POST /push`` with body in json or msgpack with appropriate
37 | ``Content-Type``::
38 |
39 | # Request
40 | POST /push
41 | {
42 | "queue": "normal",
43 | "name": "div",
44 | "args": [10, 2],
45 | "keep_result": 600
46 | }
47 |
48 | # Response
49 | {
50 | "id": "Uy3buqMTRzOfBXdQ5v4eQA"
51 | }
52 |
53 | .. note::
54 |
55 | Body fields are the same as for :py:meth:`Manager.push `.
56 |
57 |
58 | Getting result
59 | --------------
60 |
61 | ``GET /result`` can be used to retrieve task result if ``keep_result`` was
62 | used::
63 |
64 | # Request
65 | GET /result?id=Uy3buqMTRzOfBXdQ5v4eQA
66 |
67 | # Response
68 | {
69 | "result": 5
70 | }
71 |
72 |
73 | Error result
74 | ------------
75 |
76 | Error can be returned in case of exception::
77 |
78 | # Push request
79 | POST /push
80 | {
81 | "queue": "normal",
82 | "name": "div",
83 | "args": [10, 0],
84 | "keep_result": 600
85 | }
86 |
87 | # Result request
88 | GET /result?id=0Zukd-zyTCC3qUoJ-Pf14Q
89 |
90 | # Result response
91 | {
92 | "error": "ZeroDivisionError",
93 | "message": "integer division or modulo by zero",
94 | "trace": "Traceback (most recent call last):\n File \"/home/bobrov/work/dsq/dsq/manager.py\", line 241, in process\n result = func(*args, **kwargs)\n File \"./tasks.py\", line 10, in div\n return a/b\nZeroDivisionError: integer division or modulo by zero\n"
95 | }
96 |
--------------------------------------------------------------------------------
/docs/index.rst:
--------------------------------------------------------------------------------
1 | DSQ
2 | ===
3 |
4 | Dead simple task queue using redis. `GitHub `_.
5 |
6 | .. code:: python
7 |
8 | # tasks.py
9 | import dsq
10 | manager = dsq.create_manager()
11 |
12 | @manager.task(queue='normal')
13 | def add(a, b):
14 | print a + b
15 |
16 | if __name__ == '__main__':
17 | add.push(1, 2)
18 |
19 | .. code:: bash
20 |
21 | $ python tasks.py
22 | $ dsq worker -bt tasks normal
23 |
24 | See :ref:`tutorial` for introduction.
25 |
26 |
27 | Features
28 | --------
29 |
30 | * Low latency.
31 | * :ref:`Enforcing of simple task arguments and result `.
32 | * Expiring tasks (TTL).
33 | * :ref:`delayed-tasks` (ETA).
34 | * Retries (forever or particular amount).
35 | * Dead letters.
36 | * :ref:`queue-priorities`.
37 | * Worker lifetime.
38 | * Task execution timeout.
39 | * Task forwarder from one redis instance to another.
40 | * :ref:`HTTP interface `.
41 | * Inspect tools.
42 | * Supports 2.7, 3.4, 3.5 and PyPy.
43 | * 100% test coverage.
44 |
45 | The goal is a simple design. There is no worker manager, one can use
46 | supervisord/circus/whatever to spawn N workers.
47 | Simple storage model. Queue is a list and scheduled tasks are a sorted set.
48 | There are no task keys. Tasks are items of list and sorted set. There is no
49 | any registry to manage workers, basic requirements
50 | (die after some lifetime and do not hang) can be handled by workers themselves.
51 | Worker do not store result by default.
52 |
53 |
54 | Documentation
55 | =============
56 |
57 | .. toctree::
58 | :maxdepth: 1
59 |
60 | tutorial
61 | api
62 | faq
63 | http
64 |
65 | * :ref:`genindex`
66 | * :ref:`modindex`
67 | * :ref:`search`
68 |
--------------------------------------------------------------------------------
/docs/make.bat:
--------------------------------------------------------------------------------
1 | @ECHO OFF
2 |
3 | REM Command file for Sphinx documentation
4 |
5 | if "%SPHINXBUILD%" == "" (
6 | set SPHINXBUILD=sphinx-build
7 | )
8 | set BUILDDIR=_build
9 | set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% .
10 | set I18NSPHINXOPTS=%SPHINXOPTS% .
11 | if NOT "%PAPER%" == "" (
12 | set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS%
13 | set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS%
14 | )
15 |
16 | if "%1" == "" goto help
17 |
18 | if "%1" == "help" (
19 | :help
20 | echo.Please use `make ^` where ^ is one of
21 | echo. html to make standalone HTML files
22 | echo. dirhtml to make HTML files named index.html in directories
23 | echo. singlehtml to make a single large HTML file
24 | echo. pickle to make pickle files
25 | echo. json to make JSON files
26 | echo. htmlhelp to make HTML files and a HTML help project
27 | echo. qthelp to make HTML files and a qthelp project
28 | echo. devhelp to make HTML files and a Devhelp project
29 | echo. epub to make an epub
30 | echo. epub3 to make an epub3
31 | echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter
32 | echo. text to make text files
33 | echo. man to make manual pages
34 | echo. texinfo to make Texinfo files
35 | echo. gettext to make PO message catalogs
36 | echo. changes to make an overview over all changed/added/deprecated items
37 | echo. xml to make Docutils-native XML files
38 | echo. pseudoxml to make pseudoxml-XML files for display purposes
39 | echo. linkcheck to check all external links for integrity
40 | echo. doctest to run all doctests embedded in the documentation if enabled
41 | echo. coverage to run coverage check of the documentation if enabled
42 | echo. dummy to check syntax errors of document sources
43 | goto end
44 | )
45 |
46 | if "%1" == "clean" (
47 | for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i
48 | del /q /s %BUILDDIR%\*
49 | goto end
50 | )
51 |
52 |
53 | REM Check if sphinx-build is available and fallback to Python version if any
54 | %SPHINXBUILD% 1>NUL 2>NUL
55 | if errorlevel 9009 goto sphinx_python
56 | goto sphinx_ok
57 |
58 | :sphinx_python
59 |
60 | set SPHINXBUILD=python -m sphinx.__init__
61 | %SPHINXBUILD% 2> nul
62 | if errorlevel 9009 (
63 | echo.
64 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
65 | echo.installed, then set the SPHINXBUILD environment variable to point
66 | echo.to the full path of the 'sphinx-build' executable. Alternatively you
67 | echo.may add the Sphinx directory to PATH.
68 | echo.
69 | echo.If you don't have Sphinx installed, grab it from
70 | echo.http://sphinx-doc.org/
71 | exit /b 1
72 | )
73 |
74 | :sphinx_ok
75 |
76 |
77 | if "%1" == "html" (
78 | %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html
79 | if errorlevel 1 exit /b 1
80 | echo.
81 | echo.Build finished. The HTML pages are in %BUILDDIR%/html.
82 | goto end
83 | )
84 |
85 | if "%1" == "dirhtml" (
86 | %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml
87 | if errorlevel 1 exit /b 1
88 | echo.
89 | echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml.
90 | goto end
91 | )
92 |
93 | if "%1" == "singlehtml" (
94 | %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml
95 | if errorlevel 1 exit /b 1
96 | echo.
97 | echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml.
98 | goto end
99 | )
100 |
101 | if "%1" == "pickle" (
102 | %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle
103 | if errorlevel 1 exit /b 1
104 | echo.
105 | echo.Build finished; now you can process the pickle files.
106 | goto end
107 | )
108 |
109 | if "%1" == "json" (
110 | %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json
111 | if errorlevel 1 exit /b 1
112 | echo.
113 | echo.Build finished; now you can process the JSON files.
114 | goto end
115 | )
116 |
117 | if "%1" == "htmlhelp" (
118 | %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp
119 | if errorlevel 1 exit /b 1
120 | echo.
121 | echo.Build finished; now you can run HTML Help Workshop with the ^
122 | .hhp project file in %BUILDDIR%/htmlhelp.
123 | goto end
124 | )
125 |
126 | if "%1" == "qthelp" (
127 | %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp
128 | if errorlevel 1 exit /b 1
129 | echo.
130 | echo.Build finished; now you can run "qcollectiongenerator" with the ^
131 | .qhcp project file in %BUILDDIR%/qthelp, like this:
132 | echo.^> qcollectiongenerator %BUILDDIR%\qthelp\DSQ.qhcp
133 | echo.To view the help file:
134 | echo.^> assistant -collectionFile %BUILDDIR%\qthelp\DSQ.ghc
135 | goto end
136 | )
137 |
138 | if "%1" == "devhelp" (
139 | %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp
140 | if errorlevel 1 exit /b 1
141 | echo.
142 | echo.Build finished.
143 | goto end
144 | )
145 |
146 | if "%1" == "epub" (
147 | %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub
148 | if errorlevel 1 exit /b 1
149 | echo.
150 | echo.Build finished. The epub file is in %BUILDDIR%/epub.
151 | goto end
152 | )
153 |
154 | if "%1" == "epub3" (
155 | %SPHINXBUILD% -b epub3 %ALLSPHINXOPTS% %BUILDDIR%/epub3
156 | if errorlevel 1 exit /b 1
157 | echo.
158 | echo.Build finished. The epub3 file is in %BUILDDIR%/epub3.
159 | goto end
160 | )
161 |
162 | if "%1" == "latex" (
163 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
164 | if errorlevel 1 exit /b 1
165 | echo.
166 | echo.Build finished; the LaTeX files are in %BUILDDIR%/latex.
167 | goto end
168 | )
169 |
170 | if "%1" == "latexpdf" (
171 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
172 | cd %BUILDDIR%/latex
173 | make all-pdf
174 | cd %~dp0
175 | echo.
176 | echo.Build finished; the PDF files are in %BUILDDIR%/latex.
177 | goto end
178 | )
179 |
180 | if "%1" == "latexpdfja" (
181 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
182 | cd %BUILDDIR%/latex
183 | make all-pdf-ja
184 | cd %~dp0
185 | echo.
186 | echo.Build finished; the PDF files are in %BUILDDIR%/latex.
187 | goto end
188 | )
189 |
190 | if "%1" == "text" (
191 | %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text
192 | if errorlevel 1 exit /b 1
193 | echo.
194 | echo.Build finished. The text files are in %BUILDDIR%/text.
195 | goto end
196 | )
197 |
198 | if "%1" == "man" (
199 | %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man
200 | if errorlevel 1 exit /b 1
201 | echo.
202 | echo.Build finished. The manual pages are in %BUILDDIR%/man.
203 | goto end
204 | )
205 |
206 | if "%1" == "texinfo" (
207 | %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo
208 | if errorlevel 1 exit /b 1
209 | echo.
210 | echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo.
211 | goto end
212 | )
213 |
214 | if "%1" == "gettext" (
215 | %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale
216 | if errorlevel 1 exit /b 1
217 | echo.
218 | echo.Build finished. The message catalogs are in %BUILDDIR%/locale.
219 | goto end
220 | )
221 |
222 | if "%1" == "changes" (
223 | %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes
224 | if errorlevel 1 exit /b 1
225 | echo.
226 | echo.The overview file is in %BUILDDIR%/changes.
227 | goto end
228 | )
229 |
230 | if "%1" == "linkcheck" (
231 | %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck
232 | if errorlevel 1 exit /b 1
233 | echo.
234 | echo.Link check complete; look for any errors in the above output ^
235 | or in %BUILDDIR%/linkcheck/output.txt.
236 | goto end
237 | )
238 |
239 | if "%1" == "doctest" (
240 | %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest
241 | if errorlevel 1 exit /b 1
242 | echo.
243 | echo.Testing of doctests in the sources finished, look at the ^
244 | results in %BUILDDIR%/doctest/output.txt.
245 | goto end
246 | )
247 |
248 | if "%1" == "coverage" (
249 | %SPHINXBUILD% -b coverage %ALLSPHINXOPTS% %BUILDDIR%/coverage
250 | if errorlevel 1 exit /b 1
251 | echo.
252 | echo.Testing of coverage in the sources finished, look at the ^
253 | results in %BUILDDIR%/coverage/python.txt.
254 | goto end
255 | )
256 |
257 | if "%1" == "xml" (
258 | %SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml
259 | if errorlevel 1 exit /b 1
260 | echo.
261 | echo.Build finished. The XML files are in %BUILDDIR%/xml.
262 | goto end
263 | )
264 |
265 | if "%1" == "pseudoxml" (
266 | %SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml
267 | if errorlevel 1 exit /b 1
268 | echo.
269 | echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml.
270 | goto end
271 | )
272 |
273 | if "%1" == "dummy" (
274 | %SPHINXBUILD% -b dummy %ALLSPHINXOPTS% %BUILDDIR%/dummy
275 | if errorlevel 1 exit /b 1
276 | echo.
277 | echo.Build finished. Dummy builder generates no files.
278 | goto end
279 | )
280 |
281 | :end
282 |
--------------------------------------------------------------------------------
/docs/tutorial.rst:
--------------------------------------------------------------------------------
1 | .. _tutorial:
2 |
3 | Tutorial
4 | ========
5 |
6 | Install
7 | -------
8 |
9 | ::
10 |
11 | $ pip install dsq
12 |
13 |
14 | Also you need to start redis.
15 |
16 |
17 | Register and push task
18 | ----------------------
19 |
20 | Task is user-defined function which actual execution can be
21 | postponed by pushing name and arguments into some queue.
22 | One can have multiple queues. Queues are created on the fly.
23 |
24 | ::
25 |
26 | # tasks.py
27 | import sys
28 | import logging
29 | import dsq
30 |
31 | # dsq does not init any logger by itself
32 | # and one must do it explicitly
33 | logging.basicConfig(level=logging.INFO)
34 |
35 | # using 127.0.0.1:6379/0 redis by default
36 | manager = dsq.create_manager()
37 |
38 | def task(value):
39 | print value
40 |
41 | # tasks should be registered so workers can execute them
42 | manager.register('my-task', task)
43 |
44 | if __name__ == '__main__':
45 | # put my-task into normal queue
46 | manager.push('normal', 'my-task', args=[sys.argv[1]])
47 |
48 | Now run push by executing::
49 |
50 | $ python tasks.py Hello
51 | $ python tasks.py World
52 |
53 | You can see queue size via ``stat`` command::
54 |
55 | $ dsq stat -t tasks
56 | normal 2
57 | schedule 0
58 |
59 | Start worker for ``normal`` queue::
60 |
61 | $ dsq worker -b -t tasks normal
62 | INFO:dsq.worker:Executing task(Hello)#CLCKs0nNRQqC4TKVkwDFRw
63 | Hello
64 | INFO:dsq.worker:Executing task(World)#LjCRG7yiQIqVKms-QfhmGg
65 | World
66 |
67 | ``-b`` stops worker after queue is empty.
68 |
69 |
70 | Task decorator
71 | --------------
72 |
73 | There is a shortcut to register tasks and push them via
74 | :py:meth:`dsq.manager.Manager.task` decorator::
75 |
76 | # tasks.py
77 | import sys
78 | import logging
79 | import dsq
80 |
81 | logging.basicConfig(level=logging.INFO)
82 | manager = dsq.create_manager()
83 |
84 | @manager.task(queue='normal')
85 | def task(value):
86 | print value
87 |
88 | if __name__ == '__main__':
89 | task.push(sys.argv[1])
90 |
91 |
92 | .. _queue-priorities:
93 |
94 | Queue priorities
95 | ----------------
96 |
97 | Worker queue list is prioritized. It processes tasks from a first queue, then
98 | from a second if first is empty and so on::
99 |
100 | # tasks.py
101 | import logging
102 | import dsq
103 |
104 | logging.basicConfig(level=logging.INFO)
105 | manager = dsq.create_manager()
106 |
107 | @manager.task(queue='high')
108 | def high(value):
109 | print 'urgent', value
110 |
111 | @manager.task(queue='normal')
112 | def normal(value):
113 | print 'normal', value
114 |
115 | if __name__ == '__main__':
116 | normal.push(1)
117 | normal.push(2)
118 | normal.push(3)
119 | high.push(4)
120 | normal.push(5)
121 | high.push(6)
122 |
123 | And processing::
124 |
125 | $ python tasks.py
126 | $ dsq stat -t tasks
127 | high 2
128 | normal 4
129 | schedule 0
130 | $ dsq worker -bt tasks high normal
131 | INFO:dsq.worker:Executing high(4)#w9RKVQ4oQoO9ivB8q198QA
132 | urgent 4
133 | INFO:dsq.worker:Executing high(6)#SEss1H0QQB2TAqLQjbBpmw
134 | urgent 6
135 | INFO:dsq.worker:Executing normal(1)#NY-e_Nu3QT-4zCDU9LvIvA
136 | normal 1
137 | INFO:dsq.worker:Executing normal(2)#yy44h7tcToe5yyTSUJ7dLw
138 | normal 2
139 | INFO:dsq.worker:Executing normal(3)#Hx3iau2MRW2xwwOFNinJIg
140 | normal 3
141 | INFO:dsq.worker:Executing normal(5)#DTDpF9xkSkaChwFURRCzDQ
142 | normal 5
143 |
144 |
145 | .. _delayed-tasks:
146 |
147 | Delayed tasks
148 | -------------
149 |
150 | You can use ``eta`` or ``delay`` parameter to postpone task::
151 |
152 | # tasks.py
153 | import sys
154 | import logging
155 | import dsq
156 |
157 | logging.basicConfig(level=logging.INFO)
158 | manager = dsq.create_manager()
159 |
160 | @manager.task(queue='normal')
161 | def task(value):
162 | print value
163 |
164 | if __name__ == '__main__':
165 | task.modify(delay=30).push(sys.argv[1])
166 |
167 | You should use ``scheduler`` command to queue such tasks::
168 |
169 | $ python tasks.py boo
170 | $ python tasks.py foo
171 | $ date
172 | Sun Jul 17 13:41:10 MSK 2016
173 | $ dsq stat -t tasks
174 | schedule 2
175 | $ dsq schedule -t tasks
176 | 2016-07-17 13:41:32 normal {"args": ["boo"], "id": "qWbsEnu2SRyjwIXga35yqA", "name": "task"}
177 | 2016-07-17 13:41:34 normal {"args": ["foo"], "id": "xVm3OyWjQB2XDiskTsCN4w", "name": "task"}
178 |
179 | # next command waits until all tasks will be scheduled
180 | $ dsq scheduler -bt tasks
181 | $ dsq stat -t tasks
182 | normal 2
183 | schedule 0
184 | $ dsq queue -t tasks
185 | {"args": ["boo"], "id": "qWbsEnu2SRyjwIXga35yqA", "name": "task"}
186 | {"args": ["foo"], "id": "xVm3OyWjQB2XDiskTsCN4w", "name": "task"}
187 | $ dsq worker -bt tasks normal
188 | INFO:dsq.worker:Executing task(boo)#qWbsEnu2SRyjwIXga35yqA
189 | boo
190 | INFO:dsq.worker:Executing task(foo)#xVm3OyWjQB2XDiskTsCN4w
191 | foo
192 |
193 | .. note::
194 |
195 | In production you need to start N workers and one scheduler to be able to
196 | process delayed tasks.
197 |
198 |
199 | Task result
200 | -----------
201 |
202 | Provide ``keep_result`` parameter to be able fetch task result later::
203 |
204 | # tasks.py
205 | import sys
206 | import logging
207 | import dsq
208 |
209 | logging.basicConfig(level=logging.INFO)
210 | manager = dsq.create_manager()
211 |
212 | @manager.task(queue='normal', keep_result=600)
213 | def div(a, b):
214 | return a/b
215 |
216 | if __name__ == '__main__':
217 | result = div.push(int(sys.argv[1]), int(sys.argv[2]))
218 | if result.ready(5):
219 | if result.error:
220 | print result.error, result.error_message
221 | else:
222 | print 'Result is: ', result.value
223 | else:
224 | print 'Result is not ready'
225 |
226 | Process::
227 |
228 | # start worker in background
229 | $ dsq worker -t tasks normal &
230 | [1] 6419
231 | $ python tasks.py 10 2
232 | INFO:dsq.worker:Executing div(10, 2)#6S_UlsECSxSddtluBLB6yQ
233 | Result is: 5
234 | $ python tasks.py 10 0
235 | INFO:dsq.worker:Executing div(10, 0)#_WQxcUDYQH6ZtqfSe1-0-Q
236 | ERROR:dsq.manager:Error during processing task div(10, 0)#_WQxcUDYQH6ZtqfSe1-0-Q
237 | Traceback (most recent call last):
238 | File "/home/bobrov/work/dsq/dsq/manager.py", line 242, in process
239 | result = func(*args, **kwargs)
240 | File "./tasks.py", line 11, in div
241 | return a/b
242 | ZeroDivisionError: integer division or modulo by zero
243 | ZeroDivisionError integer division or modulo by zero
244 | # kill worker
245 | $ kill %1
246 | [1]+ Done dsq worker -t tasks normal
247 | $ python tasks.py 10 1
248 | Result is not ready
249 |
--------------------------------------------------------------------------------
/dsq/__init__.py:
--------------------------------------------------------------------------------
1 | version = '0.9'
2 | _is_main = False
3 |
4 |
5 | def create_manager(queue=None, result=None, sync=False,
6 | unknown=None, default_queue=None): # pragma: no cover
7 | from .manager import Manager
8 | from .store import QueueStore, ResultStore
9 | from .utils import redis_client
10 | '''Helper to create dsq manager
11 |
12 | :param queue: Redis url for queue store. [redis://]host[:port]/dbnum.
13 | :param result: Redis url for result store. By default it is the
14 | same as queue. [redis://]host[:port]/dbnum.
15 | :returns: :py:class:`~.manager.Manager`
16 |
17 | ``sync``, ``unknown`` and ``default_queue`` params are the same as for
18 | :py:class:`~.manager.Manager` constructor.
19 | ::
20 |
21 | manager = create_manager()
22 |
23 | @manager.task(queue='high', keep_result=600)
24 | def add(a, b):
25 | return a + b
26 | '''
27 | return Manager(QueueStore(redis_client(queue)),
28 | ResultStore(redis_client(result or queue)),
29 | sync=sync, unknown=unknown, default_queue=default_queue)
30 |
31 |
32 | def is_main(): # pragma: no cover
33 | '''Returns True if ``dsq`` command is in progress
34 |
35 | May be useful for tasks module which imports
36 | other tasks to avoid recursive imports::
37 |
38 | #tasks.py
39 | import dsq
40 |
41 | if dsq.is_main():
42 | import sms_tasks
43 | import billing_tasks
44 | '''
45 | return _is_main
46 |
--------------------------------------------------------------------------------
/dsq/cli.py:
--------------------------------------------------------------------------------
1 | from __future__ import print_function
2 |
3 | import sys
4 | import time
5 | import logging
6 | import click
7 | import json
8 |
9 | from datetime import datetime
10 |
11 |
12 | @click.group()
13 | def cli():
14 | if '.' not in sys.path:
15 | sys.path.insert(0, '.')
16 | import dsq
17 | dsq._is_main = True
18 |
19 |
20 | tasks_help=('Task module. By default dsq searches `manager` '
21 | 'variable in it. But one can provide custom var via '
22 | 'package.module:varname syntax.')
23 |
24 |
25 | @cli.command()
26 | @click.option('-t', '--tasks', required=True, help=tasks_help)
27 | @click.option('--lifetime', type=int, help='Max worker lifetime.')
28 | @click.option('--task-timeout', type=int, help='Kill task after this period of time.')
29 | @click.option('-b', '--burst', is_flag=True, help='Stop worker after all queue is empty.')
30 | @click.argument('queue', nargs=-1, required=True)
31 | def worker(tasks, lifetime, task_timeout, burst, queue):
32 | '''Task executor.
33 |
34 | QUEUE is a prioritized queue list. Worker will take tasks from the first queue
35 | then from the second if first is empty and so on. For example:
36 |
37 | dsq worker -t tasks high normal low
38 |
39 | Allows to handle tasks from `high` queue first.
40 | '''
41 | from .utils import load_manager
42 | from .worker import Worker
43 | worker = Worker(load_manager(tasks), lifetime=lifetime,
44 | task_timeout=task_timeout)
45 | worker.process(queue, burst)
46 |
47 |
48 | @cli.command()
49 | @click.option('-t', '--tasks', required=True, help=tasks_help)
50 | @click.option('-b', '--burst', is_flag=True, help='Stop scheduler after queue is empty.')
51 | def scheduler(tasks, burst):
52 | '''Schedule delayed tasks into execution queues.'''
53 | from .utils import RunFlag, load_manager
54 | manager = load_manager(tasks)
55 | run = RunFlag()
56 | if burst:
57 | while run:
58 | if not manager.queue.reschedule():
59 | break
60 | time.sleep(1)
61 | else:
62 | now = time.time()
63 | timer = manager.periodic.timer(now)
64 | timer.add(manager.queue.reschedule, now, 1)
65 | timer.add('check-crontab', now, 60)
66 | crontab_check = manager.crontab.checker()
67 | for next_run, action in timer:
68 | now = time.time()
69 | if next_run > now:
70 | time.sleep(next_run - now)
71 |
72 | if not run:
73 | break
74 |
75 | if action == 'check-crontab':
76 | crontab_check(next_run)
77 | else:
78 | action()
79 |
80 |
81 | @cli.command()
82 | @click.option('-t', '--tasks', help=tasks_help)
83 | @click.option('-i', '--interval', type=float, default=1)
84 | @click.option('-b', '--batch-size', type=int, default=5000)
85 | @click.option('-s', '--source')
86 | @click.argument('dest')
87 | def forwarder(tasks, interval, batch_size, source, dest):
88 | '''Forward items from one storage to another.'''
89 | from .utils import RunFlag, load_manager, redis_client
90 | from .store import QueueStore
91 | log = logging.getLogger('dsq.forwarder')
92 |
93 | if not tasks and not source:
94 | print('--tasks or --source must be provided')
95 | sys.exit(1)
96 |
97 | s = QueueStore(redis_client(source)) if source else load_manager(tasks).queue
98 | d = QueueStore(redis_client(dest))
99 | run = RunFlag()
100 | while run:
101 | batch = s.take_many(batch_size)
102 | if batch['schedule'] or batch['queues']:
103 | try:
104 | d.put_many(batch)
105 | except Exception:
106 | s.put_many(batch)
107 | log.exception('Forward error')
108 | raise
109 | else:
110 | time.sleep(interval)
111 |
112 |
113 | @cli.command()
114 | @click.option('-t', '--tasks', required=True, help=tasks_help)
115 | @click.option('-b', '--bind', help='Listen on [host]:port', default='127.0.0.1:9042')
116 | def http(tasks, bind):
117 | """Http interface using built-in simple wsgi server"""
118 | from wsgiref.simple_server import make_server
119 | from .utils import load_manager
120 | from .http import Application
121 |
122 | host, _, port = bind.partition(':')
123 | app = Application(load_manager(tasks))
124 | httpd = make_server(host, int(port), app)
125 | print('Listen on {}:{} ...'.format(host or '0.0.0.0', port), file=sys.stderr)
126 | httpd.serve_forever()
127 |
128 |
129 | @cli.command('queue')
130 | @click.option('-t', '--tasks', required=True, help=tasks_help)
131 | @click.argument('queue', nargs=-1)
132 | def dump_queue(tasks, queue):
133 | """Dump queue content"""
134 | from .utils import load_manager
135 | manager = load_manager(tasks)
136 | if not queue:
137 | queue = manager.queue.queue_list()
138 |
139 | count = 5000
140 | for q in queue:
141 | offset = 0
142 | while True:
143 | items = manager.queue.get_queue(q, offset, count)
144 | if not items:
145 | break
146 |
147 | for r in items:
148 | print(json.dumps(r, ensure_ascii=False, sort_keys=True))
149 |
150 | offset += count
151 |
152 |
153 | @cli.command('schedule')
154 | @click.option('-t', '--tasks', required=True, help=tasks_help)
155 | def dump_schedule(tasks):
156 | """Dump schedule content"""
157 | from .utils import load_manager
158 | manager = load_manager(tasks)
159 |
160 | count = 5000
161 | offset = 0
162 | while True:
163 | items = manager.queue.get_schedule(offset, count)
164 | if not items:
165 | break
166 |
167 | for ts, queue, item in items:
168 | print(datetime.fromtimestamp(ts).strftime('%Y-%m-%d %H:%M:%S'),
169 | queue,
170 | json.dumps(item, ensure_ascii=False, sort_keys=True),
171 | sep='\t')
172 |
173 | offset += count
174 |
175 |
176 | @cli.command('queue-list')
177 | @click.option('-t', '--tasks', required=True, help=tasks_help)
178 | def queue_list(tasks):
179 | """Print non empty queues"""
180 | from .utils import load_manager
181 | manager = load_manager(tasks)
182 | for r in manager.queue.queue_list():
183 | print(r)
184 |
185 |
186 | @cli.command('stat')
187 | @click.option('-t', '--tasks', required=True, help=tasks_help)
188 | def stat(tasks):
189 | """Print queue and schedule sizes"""
190 | from .utils import load_manager
191 | manager = load_manager(tasks)
192 | for q, size in sorted(manager.queue.stat().items()):
193 | print(q, size, sep='\t')
194 |
--------------------------------------------------------------------------------
/dsq/compat.py:
--------------------------------------------------------------------------------
1 | import sys
2 |
3 | PY2 = sys.version_info[0] == 2
4 |
5 | if PY2: # pragma: no cover
6 | import __builtin__ as builtins
7 | import urlparse
8 | range = builtins.xrange
9 | reduce = builtins.reduce
10 | string_types = (str, unicode)
11 |
12 | iterkeys = lambda d: d.iterkeys()
13 | itervalues = lambda d: d.itervalues()
14 | iteritems = lambda d: d.iteritems()
15 | listkeys = lambda d: d.keys()
16 | listvalues = lambda d: d.values()
17 | listitems = lambda d: d.items()
18 |
19 | def bytestr(data, encoding='utf-8'):
20 | if isinstance(data, unicode):
21 | data = data.encode(encoding)
22 |
23 | return data
24 | else: # pragma: no cover
25 | import builtins
26 | from functools import reduce
27 | from urllib import parse as urlparse
28 | range = builtins.range
29 | string_types = (str, )
30 |
31 | iterkeys = lambda d: d.keys()
32 | itervalues = lambda d: d.values()
33 | iteritems = lambda d: d.items()
34 | listkeys = lambda d: list(d.keys())
35 | listvalues = lambda d: list(d.values())
36 | listitems = lambda d: list(d.items())
37 |
38 | def bytestr(data, encoding='utf-8'):
39 | if isinstance(data, str):
40 | data = data.encode(encoding)
41 |
42 | return data
43 |
--------------------------------------------------------------------------------
/dsq/http.py:
--------------------------------------------------------------------------------
1 | import json
2 | import msgpack
3 | import logging
4 | import codecs
5 |
6 | from .compat import bytestr, PY2, urlparse
7 |
8 | log = logging.getLogger('dsq.http')
9 |
10 | utf8_reader = codecs.getreader('utf-8')
11 |
12 |
13 | class Response(object):
14 | def __init__(self, body, status, content_type):
15 | self.body = body
16 | self.status = status
17 | self.content_type = content_type
18 |
19 | def __call__(self, environ, start_response):
20 | response_headers = [('Content-type', self.content_type)]
21 | start_response(self.status, response_headers)
22 | return self
23 |
24 | def __iter__(self):
25 | yield bytestr(self.body)
26 |
27 |
28 | class Error(object):
29 | def __init__(self, status, error, message):
30 | self.status = status
31 | self.error = error
32 | self.message = message
33 |
34 |
35 | class Application(object):
36 | def __init__(self, manager):
37 | self.manager = manager
38 |
39 | def push(self, environ):
40 | ct = environ.get('CONTENT_TYPE')
41 | stream = environ['wsgi.input']
42 | content = stream.read(int(environ['CONTENT_LENGTH']))
43 | if ct == 'application/json':
44 | try:
45 | task = json.loads(content if PY2 else content.decode('utf-8'))
46 | except:
47 | return Error('400 BAD REQUEST', 'invalid-encoding', 'Can\'t decode body')
48 | elif ct == 'application/x-msgpack':
49 | try:
50 | task = msgpack.loads(content, encoding='utf-8')
51 | except:
52 | return Error('400 BAD REQUEST', 'invalid-encoding', 'Can\'t decode body')
53 | else:
54 | return Error('400 BAD REQUEST', 'invalid-content-type',
55 | 'Content must be json or msgpack')
56 |
57 | if not task.get('queue'):
58 | return Error('400 BAD REQUEST', 'bad-params', 'queue required')
59 |
60 | if not task.get('name'):
61 | return Error('400 BAD REQUEST', 'bad-params', 'name required')
62 |
63 | return {'id': self.manager.push(**task).id}
64 |
65 | def result(self, environ):
66 | qs = urlparse.parse_qs(environ.get('QUERY_STRING'))
67 | tid = qs.get('id')
68 | if not tid:
69 | return Error('400 BAD REQUEST', 'bad-params', 'id required')
70 |
71 | return self.manager.result.get(tid[0])
72 |
73 | def __call__(self, environ, start_response):
74 | url = environ['PATH_INFO'].rstrip('/')
75 | method = environ['REQUEST_METHOD']
76 | try:
77 | if method == 'POST' and url == '/push':
78 | result = self.push(environ)
79 | elif method in ('GET', 'HEAD') and url == '/result':
80 | result = self.result(environ)
81 | else:
82 | result = Error('404 NOT FOUND', 'not-found', 'Not found')
83 | except Exception as e: # pragma: no cover
84 | log.exception('Unhandled exception')
85 | result = Error('500 SERVER ERROR', 'internal-error', e.message)
86 |
87 | status = '200 OK'
88 | if isinstance(result, Error):
89 | status = result.status
90 | result = {'error': result.error, 'message': result.message}
91 |
92 | if not isinstance(result, Response):
93 | if 'application/x-msgpack' in environ.get('HTTP_ACCEPT', ''):
94 | result = Response(msgpack.dumps(result, use_bin_type=True),
95 | status, 'application/x-msgpack')
96 | else:
97 | result = Response(json.dumps(result), status, 'application/json; charset=UTF-8')
98 |
99 | return result(environ, start_response)
100 |
--------------------------------------------------------------------------------
/dsq/manager.py:
--------------------------------------------------------------------------------
1 | from __future__ import print_function
2 |
3 | import sys
4 | import logging
5 | import traceback
6 | from time import time, sleep
7 |
8 | from .utils import make_id, task_fmt, safe_call
9 | from .worker import StopWorker
10 | from .sched import Timer, Crontab
11 | from .compat import PY2
12 |
13 | log = logging.getLogger(__name__)
14 |
15 |
16 | def make_task(name, **kwargs):
17 | kwargs['id'] = make_id()
18 | kwargs['name'] = name
19 | return dict(r for r in kwargs.items() if r[1] is not None)
20 |
21 |
22 | class Task(object):
23 | def __init__(self, manager, func, **params):
24 | self.manager = manager
25 | self.func = func
26 | self.params = params
27 |
28 | def __call__(self, *args, **kwargs):
29 | return self.func(*args, **kwargs)
30 |
31 | def push(self, *args, **kwargs):
32 | return self.manager.push(args=args or None, kwargs=kwargs or None, **self.params)
33 |
34 | def modify(self, **params):
35 | new_params = self.params.copy()
36 | new_params.update(params)
37 | return Task(self.manager, self.func, **new_params)
38 |
39 |
40 | class Context(object):
41 | def __init__(self, manager, task, state=None):
42 | self.manager = manager
43 | self.task = task
44 | self.state = state
45 |
46 | def set_result(self, *args, **kwargs):
47 | self.manager.set_result(self.task, *args, **kwargs)
48 |
49 |
50 | class EMPTY: pass
51 |
52 |
53 | class Result(object):
54 | def __init__(self, manager, id, value=EMPTY):
55 | self.manager = manager
56 | self.id = id
57 | self.error = None
58 | self._ready = False
59 | if value is not EMPTY:
60 | self._ready = True
61 | self.value = value
62 |
63 | def _fetch(self, timeout, interval):
64 | now = time() + (timeout or 0)
65 | while True:
66 | result = self.manager.result.get(self.id)
67 | if result is not None:
68 | return result
69 | if time() > now:
70 | break
71 | sleep(interval)
72 |
73 | def ready(self, timeout=None, interval=1.0):
74 | if self._ready:
75 | return self
76 |
77 | value = self._fetch(timeout, interval=interval)
78 | if value is not None:
79 | if 'error' in value:
80 | self.error = value['error']
81 | self.error_message = value['message']
82 | self.error_trace = value['trace']
83 | else:
84 | self.value = value['result']
85 | self._ready = True
86 | return self
87 |
88 | return None
89 |
90 |
91 | class Manager(object):
92 | """DSQ manager
93 |
94 | Allows to register task functions, push tasks and get task results
95 |
96 | :param queue: :py:class:`~.store.QueueStore` to use for tasks.
97 | :param result: :py:class:`~.store.ResultStore` to use for task results.
98 | :param sync: Synchronous operation. Task will be executed immediately during
99 | :py:meth:`push` call.
100 | :param unknown: Name of unknown queue for tasks for which there is no registered functions.
101 | Default is 'unknown'.
102 | :param default_queue: Name of default queue. Default is 'dsq'.
103 | """
104 | def __init__(self, queue, result=None, sync=False, unknown=None, default_queue=None):
105 | self.queue = queue
106 | self.result = result
107 | self.sync = sync
108 | self.registry = {}
109 | self.states = {}
110 | self.unknown = unknown or 'unknown'
111 | self.default_queue = default_queue or 'dsq'
112 | self.default_retry_delay = 60
113 | self.crontab = CrontabCollector()
114 | self.periodic = PeriodicCollector()
115 |
116 | def get_state(self, name, init):
117 | try:
118 | return self.states[name]
119 | except KeyError:
120 | pass
121 |
122 | result = self.states[name] = init()
123 | return result
124 |
125 | def close(self):
126 | for s in self.states.values():
127 | if hasattr(s, 'close'):
128 | s.close()
129 |
130 | def task(self, name=None, queue=None, with_context=False, init_state=None, **kwargs):
131 | r"""Task decorator
132 |
133 | Function wrapper to register task in manager and provide simple interface to calling it.
134 |
135 | :param name: Task name, dsq will use func.__name__ if not provided.
136 | :param queue: Queue name to use.
137 | :param with_context: Provide task context as first task argument.
138 | :param init_state: Task state initializer.
139 | :param \*\*kwrags: Rest params as for :py:meth:`push`.
140 |
141 | ::
142 |
143 | @manager.task
144 | def task1(arg):
145 | long_running_func(arg)
146 |
147 | @manager.task(name='custom-name', queue='low', with_context=True)
148 | def task2(ctx, arg):
149 | print ctx.task['id']
150 | return long_running_func(arg)
151 |
152 | task1.push('boo') # push task to queue
153 | task2.modify(keep_result=300).push('foo') # push task with keep_result option.
154 | task1('boo') # direct call of task1.
155 | """
156 | def decorator(func):
157 | fname = tname or func.__name__
158 | self.register(fname, func, with_context, init_state)
159 | return Task(self, func, queue=queue or self.default_queue, name=fname, **kwargs)
160 |
161 | if callable(name):
162 | tname = None
163 | return decorator(name)
164 |
165 | tname = name
166 | return decorator
167 |
168 | def register(self, name, func, with_context=False, init_state=None):
169 | """Register task
170 |
171 | :param name: Task name.
172 | :param func: Function.
173 | :param with_context: Provide task context as first task argument.
174 | :param init_state: Task state initializer.
175 |
176 | ::
177 |
178 | def add(a, b):
179 | return a + b
180 |
181 | manager.register('add', add)
182 | manager.push('normal', 'add', (1, 2), keep_result=300)
183 | """
184 | self.registry[name] = (func, with_context or init_state, init_state)
185 |
186 | def push(self, queue, name, args=None, kwargs=None, meta=None, ttl=None,
187 | eta=None, delay=None, dead=None, retry=None, retry_delay=None,
188 | timeout=None, keep_result=None):
189 | """Add task into queue
190 |
191 | :param queue: Queue name.
192 | :param name: Task name.
193 | :param args: Task args.
194 | :param kwargs: Task kwargs.
195 | :param meta: Task additional info.
196 | :param ttl: Task time to live.
197 | :param eta: Schedule task execution for particular unix timestamp.
198 | :param delay: Postpone task execution for particular amount of seconds.
199 | :param dead: Name of dead-letter queue.
200 | :param retry: Retry task execution after exception. True - forever,
201 | number - retry this amount.
202 | :param retry_delay: Delay between retry attempts.
203 | :param timeout: Task execution timeout.
204 | :param keep_result: Keep task return value for this amount of seconds.
205 | Result is ignored by default.
206 | """
207 | if self.sync:
208 | task = make_task(name=name, args=args, kwargs=kwargs, meta=meta)
209 | result = self.process(task)
210 | return Result(self, task['id'], result)
211 |
212 | if delay:
213 | eta = time() + delay
214 |
215 | task = make_task(name=name, args=args, kwargs=kwargs, meta=meta,
216 | expire=ttl and (time() + ttl), dead=dead, retry=retry,
217 | retry_delay=retry_delay, timeout=timeout,
218 | keep_result=keep_result)
219 | self.queue.push(queue, task, eta=eta)
220 | task_id = task['id'] if PY2 else task['id'].decode()
221 | return Result(self, task_id)
222 |
223 | def pop(self, queue_list, timeout=None):
224 | """Pop item from the first not empty queue in ``queue_list``
225 |
226 | :param queue_list: List of queue names.
227 | :param timeout: Wait item for this amount of seconds (integer).
228 | By default blocks forever.
229 |
230 | ::
231 |
232 | item = manager.pop(['high', 'normal'], 1)
233 | if item:
234 | manager.process(item)
235 | """
236 | queue, task = self.queue.pop(queue_list, timeout)
237 | if task:
238 | task['queue'] = queue
239 | return task
240 |
241 | def process(self, task, now=None, log_exc=True):
242 | """Process task item
243 |
244 | :param task: Task.
245 | :param now: Unix timestamp to compare with ``task.expire`` time and set ``eta`` on retry.
246 | :param log_exc: Log any exception during task execution. ``True`` by default.
247 | """
248 | expire = task.get('expire')
249 | tname = task['name']
250 | if expire is not None and (now or time()) > expire:
251 | return
252 |
253 | try:
254 | func, with_context, init_state = self.registry[tname]
255 | except KeyError:
256 | if self.sync:
257 | raise
258 | self.queue.push(self.unknown, task)
259 | log.error('Function for task "%s" not found', tname)
260 | return
261 |
262 | args = task.get('args', ())
263 | kwargs = task.get('kwargs', {})
264 | log.info('Executing %s', task_fmt(task))
265 | try:
266 | if with_context:
267 | ctx = Context(self, task, init_state and self.get_state(tname, init_state))
268 | result = func(ctx, *args, **kwargs)
269 | else:
270 | result = func(*args, **kwargs)
271 |
272 | if not init_state:
273 | self.set_result(task, result, now=now)
274 | return result
275 | except StopWorker:
276 | raise
277 | except Exception:
278 | self.set_result(task, exc_info=True, log_exc=log_exc, now=now)
279 |
280 | def set_result(self, task, result=None, exc_info=None, now=None, log_exc=True):
281 | """Set result for task item
282 |
283 | :param task: Task.
284 | :param result: Result value.
285 | :param exc_info: Set exception info, retrieve it via sys.exc_info() if True.
286 | :param now: Unix timestamp to set ``eta`` on retry.
287 | :param log_exc: Log exc_info if any. ``True`` by default.
288 | """
289 | keep_result = task.get('keep_result')
290 | if exc_info:
291 | if exc_info is True:
292 | exc_info = sys.exc_info()
293 |
294 | if self.sync:
295 | raise
296 |
297 | if log_exc:
298 | log.exception('Error during processing task %s', task_fmt(task), exc_info=exc_info)
299 |
300 | retry = task.get('retry')
301 | if retry and retry > 0:
302 | if retry is not True:
303 | task['retry'] -= 1
304 |
305 | retry_delay = task.get('retry_delay', self.default_retry_delay)
306 | eta = retry_delay and (now or time()) + retry_delay
307 | self.queue.push(task['queue'], task, eta=eta)
308 | return
309 |
310 | if task.get('dead'):
311 | task.pop('retry', None)
312 | task.pop('retry_delay', None)
313 | self.queue.push(task['dead'], task)
314 |
315 | if keep_result:
316 | result = {'error': exc_info[0].__name__,
317 | 'message': '{}'.format(exc_info[1]),
318 | 'trace': ''.join(traceback.format_exception(*exc_info))}
319 | self.result.set(task['id'], result, keep_result)
320 | else:
321 | if keep_result:
322 | self.result.set(task['id'], {'result': result}, keep_result)
323 | log.info('Done %s', task_fmt(task))
324 |
325 |
326 | class CrontabCollector(object):
327 | def __init__(self):
328 | self.entries = []
329 |
330 | def checker(self):
331 | crontab = Crontab()
332 | for r in self.entries:
333 | crontab.add(*r)
334 |
335 | def check(ts):
336 | for action in crontab.actions_ts(ts):
337 | action()
338 |
339 | return check
340 |
341 | def __call__(self, minute=-1, hour=-1, day=-1, month=-1, wday=-1):
342 | def inner(func):
343 | if isinstance(func, Task):
344 | func = func.push
345 | self.entries.append((safe_call(func, log), minute, hour, day, month, wday))
346 | return func
347 | return inner
348 |
349 |
350 | class PeriodicCollector(object):
351 | def __init__(self):
352 | self.entries = []
353 |
354 | def timer(self, now):
355 | t = Timer()
356 | for action, interval in self.entries:
357 | t.add(action, now, interval)
358 | return t
359 |
360 | def __call__(self, interval):
361 | def inner(func):
362 | if isinstance(func, Task):
363 | func = func.push
364 | self.entries.append((safe_call(func, log), interval))
365 | return func
366 | return inner
367 |
--------------------------------------------------------------------------------
/dsq/sched.py:
--------------------------------------------------------------------------------
1 | from heapq import heappush, heappop
2 | from datetime import datetime
3 |
4 | from .compat import range
5 |
6 |
7 | class Event(object):
8 | def __init__(self, at, interval, action):
9 | self.point = (at, interval)
10 | self.action = action
11 |
12 | def __lt__(self, other):
13 | return self.point < other.point
14 |
15 | def shift(self):
16 | n, i = self.point
17 | self.point = (n + i, i)
18 | return self
19 |
20 |
21 | class Timer(object):
22 | def __init__(self):
23 | self.intervals = []
24 |
25 | def add(self, action, at, interval):
26 | heappush(self.intervals, Event(at, interval, action))
27 |
28 | def __iter__(self):
29 | if not self.intervals:
30 | return
31 |
32 | while True:
33 | e = heappop(self.intervals)
34 | next_run = e.point[0]
35 | heappush(self.intervals, e.shift())
36 | yield next_run, e.action
37 |
38 |
39 | def get_points(desc, min, max):
40 | if isinstance(desc, (list, tuple, set)):
41 | return desc
42 | if desc < 0:
43 | return [r for r in range(min, max+1, -desc)]
44 | else:
45 | return [desc]
46 |
47 |
48 | def update_set(s, action, points):
49 | for p in points:
50 | s.setdefault(p, set()).add(action)
51 |
52 |
53 | class Crontab(object):
54 | def __init__(self):
55 | self.minutes = {}
56 | self.hours = {}
57 | self.days = {}
58 | self.months = {}
59 | self.wdays = {}
60 |
61 | def add(self, action, minute=-1, hour=-1, day=-1, month=-1, wday=-1):
62 | update_set(self.minutes, action, get_points(minute, 0, 59))
63 | update_set(self.hours, action, get_points(hour, 0, 23))
64 | update_set(self.days, action, get_points(day, 1, 31))
65 | update_set(self.months, action, get_points(month, 1, 12))
66 | update_set(self.wdays, action, [r or 7 for r in get_points(wday, 1, 7)])
67 |
68 | def actions(self, minute, hour, day, month, wday):
69 | empty = set()
70 | return (self.minutes.get(minute, empty)
71 | & self.hours.get(hour, empty)
72 | & self.days.get(day, empty)
73 | & self.months.get(month, empty)
74 | & self.wdays.get(wday, empty))
75 |
76 | def actions_ts(self, ts):
77 | dt = datetime.fromtimestamp(ts)
78 | return self.actions(dt.minute, dt.hour, dt.day, dt.month, dt.isoweekday())
79 |
--------------------------------------------------------------------------------
/dsq/store.py:
--------------------------------------------------------------------------------
1 | from time import time
2 |
3 | from msgpack import dumps, loads
4 |
5 | from .utils import iter_chunks
6 | from .compat import iteritems, PY2, string_types
7 |
8 | SCHEDULE_KEY = 'schedule'
9 |
10 | if PY2: # pragma: no cover
11 | def qname(name):
12 | return name.rpartition(':')[2]
13 |
14 | def sitem(queue, task):
15 | return '{}:{}'.format(queue, task)
16 |
17 | def rqname(name):
18 | return 'queue:{}'.format(name)
19 | else: # pragma: no cover
20 | def qname(name):
21 | return name.rpartition(b':')[2].decode('utf-8')
22 |
23 | def sitem(queue, task):
24 | return queue.encode('utf-8') + b':' + task
25 |
26 | def rqname(name):
27 | if isinstance(name, string_types):
28 | name = name.encode('utf-8')
29 | return b'queue:' + name
30 |
31 |
32 | class QueueStore(object):
33 | """Queue store"""
34 | def __init__(self, client):
35 | self.client = client
36 |
37 | def push(self, queue, task, eta=None):
38 | assert ':' not in queue, 'Queue name must not contain colon: "{}"'.format(queue)
39 | body = dumps(task, use_bin_type=True) # TODO: may be better to move task packing to manager
40 | if eta:
41 | self.client.zadd(SCHEDULE_KEY, {sitem(queue, body): eta})
42 | else:
43 | self.client.rpush(rqname(queue), body)
44 |
45 | def pop(self, queue_list, timeout=None, now=None):
46 | if timeout is None: # pragma: no cover
47 | timeout = 0
48 |
49 | item = self.client.blpop([rqname(r) for r in queue_list],
50 | timeout=timeout)
51 | if not item:
52 | return None, None
53 |
54 | return qname(item[0]), loads(item[1], encoding='utf-8')
55 |
56 | def reschedule(self, now=None):
57 | now = now or time()
58 | items, _, size = (self.client.pipeline()
59 | .zrangebyscore(SCHEDULE_KEY, '-inf', now)
60 | .zremrangebyscore(SCHEDULE_KEY, '-inf', now)
61 | .zcard(SCHEDULE_KEY)
62 | .execute())
63 |
64 | for chunk in iter_chunks(items, 5000):
65 | pipe = self.client.pipeline(False)
66 | for r in chunk:
67 | queue, _, task = r.partition(b':')
68 | pipe.rpush(rqname(queue), task)
69 | pipe.execute()
70 |
71 | return size
72 |
73 | def take_many(self, count):
74 | queues = self.queue_list()
75 |
76 | pipe = self.client.pipeline()
77 | pipe.zrange(SCHEDULE_KEY, 0, count - 1, withscores=True)
78 | for q in queues:
79 | pipe.lrange(rqname(q), 0, count - 1)
80 |
81 | pipe.zremrangebyrank(SCHEDULE_KEY, 0, count - 1)
82 | for q in queues:
83 | pipe.ltrim(rqname(q), count, -1)
84 |
85 | cmds = pipe.execute()
86 | qresult = {}
87 | result = {'schedule': cmds[0], 'queues': qresult}
88 | for q, r in zip(queues, cmds[1:]):
89 | if r:
90 | qresult[q] = r
91 |
92 | return result
93 |
94 | def put_many(self, batch):
95 | pipe = self.client.pipeline(False)
96 |
97 | if batch['schedule']:
98 | pipe.zadd(SCHEDULE_KEY, dict(batch['schedule']))
99 |
100 | for q, items in iteritems(batch['queues']):
101 | if items:
102 | pipe.rpush(rqname(q), *items)
103 |
104 | pipe.execute()
105 |
106 | def queue_list(self):
107 | return [qname(r) for r in self.client.keys(rqname('*'))]
108 |
109 | def stat(self):
110 | pipe = self.client.pipeline(False)
111 | pipe.zcard(SCHEDULE_KEY)
112 | queues = self.queue_list()
113 | for q in queues:
114 | pipe.llen(rqname(q))
115 | result = pipe.execute()
116 | return dict(zip(['schedule'] + queues, result))
117 |
118 | def get_queue(self, queue, offset=0, limit=100):
119 | items = self.client.lrange(rqname(queue), offset, offset + limit - 1)
120 | return [loads(r, encoding='utf-8') for r in items]
121 |
122 | def get_schedule(self, offset=0, limit=100):
123 | items = [(ts, r.partition(b':'))
124 | for r, ts in self.client.zrange(SCHEDULE_KEY, offset,
125 | offset + limit - 1, withscores=True)]
126 | return [(ts, q if PY2 else q.decode('utf-8'), loads(r, encoding='utf-8'))
127 | for ts, (q, _, r) in items]
128 |
129 |
130 | class ResultStore(object):
131 | """Result store"""
132 | def __init__(self, client):
133 | self.client = client
134 |
135 | def set(self, id, value, ttl):
136 | self.client.set(id, dumps(value, use_bin_type=True), ttl)
137 |
138 | def get(self, id):
139 | value = self.client.get(id)
140 | if value is not None:
141 | return loads(value, encoding='utf-8')
142 |
--------------------------------------------------------------------------------
/dsq/utils.py:
--------------------------------------------------------------------------------
1 | import sys
2 | import signal
3 | from uuid import uuid4
4 | from base64 import urlsafe_b64encode
5 | from itertools import islice
6 | from functools import wraps
7 |
8 | from redis import StrictRedis
9 |
10 |
11 | def make_id():
12 | """Make uniq short id"""
13 | return urlsafe_b64encode(uuid4().bytes).rstrip(b'=')
14 |
15 |
16 | def task_fmt(task):
17 | arglist = []
18 | arglist.extend('{}'.format(r) for r in task.get('args', ()))
19 | arglist.extend('{}={}'.format(*r) for r in task.get('kwargs', {}).items())
20 | return '{}({})#{}'.format(task.get('name', '__no_name__'),
21 | ', '.join(arglist), task.get('id', '__no_id__'))
22 |
23 |
24 | def iter_chunks(seq, chunk_size):
25 | it = iter(seq)
26 | while True:
27 | chunk = list(islice(it, chunk_size))
28 | if chunk:
29 | yield chunk
30 | else:
31 | pass # coverage under python 3.8 marks break as missing
32 | break # pragma: no cover
33 |
34 |
35 | class RunFlag(object): # pragma: no cover
36 | def __init__(self):
37 | self._flag = True
38 | signal.signal(signal.SIGINT, self.handler)
39 | signal.signal(signal.SIGTERM, self.handler)
40 |
41 | def __bool__(self):
42 | return self._flag
43 |
44 | def __nonzero__(self):
45 | return self._flag
46 |
47 | def stop(self):
48 | self._flag = False
49 |
50 | def handler(self, signal, frame):
51 | self.stop()
52 |
53 |
54 | def redis_client(url): # pragma: no cover
55 | if url:
56 | if not url.startswith('redis://'):
57 | url = 'redis://' + url
58 | return StrictRedis.from_url(url)
59 | else:
60 | return StrictRedis()
61 |
62 |
63 | class LoadError(Exception):
64 | def __init__(self, var, module):
65 | self.var = var
66 | self.module = module
67 |
68 |
69 | def load_var(module_name, default_var):
70 | """Loads variable from a module
71 |
72 | :param module_name: module.name or module.name:var
73 | :param default_var: default var name
74 | :raises ImportError: if module can't be imported
75 | :raises LoadError: if module has no var
76 | """
77 | module_name, _, mvar = module_name.partition(':')
78 | if not mvar:
79 | mvar = default_var
80 |
81 | __import__(module_name)
82 | module = sys.modules[module_name]
83 | manager = getattr(module, mvar, None)
84 | if not manager:
85 | raise LoadError(mvar, module_name)
86 |
87 | return manager
88 |
89 |
90 | def load_manager(module_name): # pragma: no cover
91 | try:
92 | return load_var(module_name, 'manager')
93 | except LoadError as e:
94 | print('{} not found in {}'.format(e.var, e.module))
95 | sys.exit(1)
96 |
97 |
98 | def safe_call(func, logger):
99 | @wraps(func)
100 | def inner(*args, **kwargs):
101 | try:
102 | return func(*args, **kwargs)
103 | except Exception:
104 | logger.exception('Error in safe call:')
105 | return inner
106 |
--------------------------------------------------------------------------------
/dsq/worker.py:
--------------------------------------------------------------------------------
1 | import traceback
2 | import logging
3 | import signal
4 | import random
5 | from time import time
6 |
7 | from .utils import RunFlag, task_fmt
8 |
9 | log = logging.getLogger(__name__)
10 |
11 |
12 | class StopWorker(Exception):
13 | pass
14 |
15 |
16 | class Worker(object):
17 | def __init__(self, manager, lifetime=None, task_timeout=None):
18 | self.manager = manager
19 | self.lifetime = lifetime and random.randint(lifetime, lifetime + lifetime // 10)
20 | self.task_timeout = task_timeout
21 | self.current_task = None
22 |
23 | def process_one(self, task):
24 | timeout = task.get('timeout', self.task_timeout)
25 | if timeout: signal.alarm(timeout)
26 |
27 | self.current_task = task
28 | self.manager.process(task)
29 |
30 | if timeout: signal.alarm(0)
31 |
32 | def alarm_handler(self, signum, frame): # pragma: no cover
33 | trace = ''.join(traceback.format_stack(frame))
34 | log.error(
35 | 'Timeout during processing task {}\n %s'.format(
36 | task_fmt(self.current_task)),
37 | trace)
38 | raise StopWorker()
39 |
40 | def process(self, queue_list, burst=False): # pragma: no cover
41 | signal.signal(signal.SIGALRM, self.alarm_handler)
42 |
43 | run = RunFlag()
44 | start = time()
45 | while run:
46 | task = self.manager.pop(queue_list, 1)
47 | if task:
48 | try:
49 | self.process_one(task)
50 | except StopWorker:
51 | break
52 | elif burst:
53 | break
54 |
55 | if self.lifetime and time() - start > self.lifetime:
56 | break
57 |
58 | self.manager.close()
59 |
--------------------------------------------------------------------------------
/dsq/wsgi.py:
--------------------------------------------------------------------------------
1 | import os
2 | from .utils import load_manager
3 | from .http import Application
4 |
5 | application = Application(load_manager(os.environ.get('DSQ_TASKS')))
6 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | click>=5.0
2 | msgpack-python==0.4.7
3 | redis>=3
4 | WebOb>=1.5.0
5 | pytest-cov>=2.2.0
6 |
--------------------------------------------------------------------------------
/run.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | from dsq import cli
3 | cli.cli()
4 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | from setuptools import setup, find_packages
2 | import dsq
3 |
4 | setup(
5 | name='dsq',
6 | version=dsq.version,
7 | url='https://github.com/baverman/dsq/',
8 | license='MIT',
9 | author='Anton Bobrov',
10 | author_email='baverman@gmail.com',
11 | description='Dead simple task queue using redis',
12 | long_description=open('README.rst').read(),
13 | packages=find_packages(exclude=['tests']),
14 | include_package_data=True,
15 | zip_safe=False,
16 | platforms='any',
17 | install_requires=[
18 | 'redis >= 3',
19 | 'click >= 5.0.0',
20 | 'msgpack-python>=0.4.0',
21 | ],
22 | entry_points={
23 | 'console_scripts': ['dsq = dsq.cli:cli']
24 | },
25 | classifiers=[
26 | 'Development Status :: 4 - Beta',
27 | # 'Development Status :: 5 - Production/Stable',
28 | 'Intended Audience :: Developers',
29 | 'Intended Audience :: Information Technology',
30 | 'Intended Audience :: System Administrators',
31 | 'License :: OSI Approved :: MIT License',
32 | 'Operating System :: POSIX',
33 | 'Operating System :: MacOS',
34 | 'Operating System :: Unix',
35 | 'Programming Language :: Python',
36 | 'Programming Language :: Python :: 2',
37 | 'Programming Language :: Python :: 3',
38 | 'Topic :: Software Development :: Libraries :: Python Modules',
39 | 'Topic :: Internet',
40 | 'Topic :: Scientific/Engineering',
41 | 'Topic :: System :: Distributed Computing',
42 | 'Topic :: System :: Systems Administration',
43 | 'Topic :: System :: Monitoring',
44 | ]
45 | )
46 |
--------------------------------------------------------------------------------
/tests/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/baverman/dsq/d275a8820b3128dc05afc75ea78b9db61618bd60/tests/__init__.py
--------------------------------------------------------------------------------
/tests/conftest.py:
--------------------------------------------------------------------------------
1 | import logging
2 |
3 | logging.basicConfig()
4 |
--------------------------------------------------------------------------------
/tests/test_http.py:
--------------------------------------------------------------------------------
1 | import redis
2 | import msgpack
3 | import json
4 | import pytest
5 | from webob import Request
6 |
7 | from dsq.store import QueueStore, ResultStore
8 | from dsq.manager import Manager
9 | from dsq.http import Application
10 | from dsq.compat import bytestr
11 |
12 |
13 | @pytest.fixture
14 | def app(request):
15 | cl = redis.StrictRedis()
16 | cl.flushdb()
17 | return Application(Manager(QueueStore(cl), ResultStore(cl)))
18 |
19 |
20 | def test_json_404(app):
21 | res = Request.blank('/not-found').get_response(app)
22 | assert res.status_code == 404
23 | assert res.json == {'message': 'Not found', 'error': 'not-found'}
24 |
25 |
26 | def test_msgpack_404(app):
27 | res = Request.blank('/not-found', headers={'Accept': 'application/x-msgpack'}).get_response(app)
28 | assert res.status_code == 404
29 | assert msgpack.loads(res.body, encoding='utf-8') == {'message': 'Not found', 'error': 'not-found'}
30 |
31 |
32 | def test_invalid_content_type(app):
33 | req = Request.blank('/push')
34 | req.method = 'POST'
35 | req.body = b'garbage'
36 | res = req.get_response(app)
37 | assert res.status_code == 400
38 | assert res.json == {'message': 'Content must be json or msgpack',
39 | 'error': 'invalid-content-type'}
40 |
41 | def test_json_invalid_payload(app):
42 | req = Request.blank('/push')
43 | req.method = 'POST'
44 | req.content_type = 'application/json'
45 | req.body = b'"dddd'
46 | res = req.get_response(app)
47 | assert res.status_code == 400
48 | assert res.json == {'message': 'Can\'t decode body', 'error': 'invalid-encoding'}
49 |
50 |
51 | def test_msgpack_invalid_payload(app):
52 | req = Request.blank('/push')
53 | req.method = 'POST'
54 | req.content_type = 'application/x-msgpack'
55 | req.body = b'"dddd'
56 | res = req.get_response(app)
57 | assert res.status_code == 400
58 | assert res.json == {'message': 'Can\'t decode body', 'error': 'invalid-encoding'}
59 |
60 |
61 | def test_json_push(app):
62 | req = Request.blank('/push')
63 | req.method = 'POST'
64 | req.content_type = 'application/json'
65 | req.body = bytestr(json.dumps({'queue': 'normal', 'name': 'boo', 'args': [1, 2, 3]}))
66 | res = req.get_response(app)
67 | assert res.status_code == 200
68 | assert app.manager.queue.get_queue('normal')
69 |
70 |
71 | def test_msgpack_push(app):
72 | req = Request.blank('/push')
73 | req.method = 'POST'
74 | req.content_type = 'application/x-msgpack'
75 | req.body = msgpack.dumps({'queue': 'normal', 'name': 'boo', 'args': [1, 2, 3]})
76 | res = req.get_response(app)
77 | assert app.manager.queue.get_queue('normal')
78 |
79 |
80 | def test_task_without_queue(app):
81 | req = Request.blank('/push')
82 | req.method = 'POST'
83 | req.content_type = 'application/json'
84 | req.body = bytestr(json.dumps({'name': 'boo', 'args': [1, 2, 3]}))
85 | res = req.get_response(app)
86 | assert res.status_code == 400
87 | assert res.json == {'message': 'queue required', 'error': 'bad-params'}
88 |
89 |
90 | def test_task_without_name(app):
91 | req = Request.blank('/push')
92 | req.method = 'POST'
93 | req.content_type = 'application/json'
94 | req.body = bytestr(json.dumps({'queue': 'boo'}))
95 | res = req.get_response(app)
96 | assert res.status_code == 400
97 | assert res.json == {'message': 'name required', 'error': 'bad-params'}
98 |
99 |
100 | def test_result_get(app):
101 | @app.manager.task
102 | def add(a, b):
103 | return a + b
104 |
105 | req = Request.blank('/push')
106 | req.method = 'POST'
107 | req.content_type = 'application/json'
108 | req.body = bytestr(json.dumps({'queue': 'boo', 'name': 'add',
109 | 'args': (1, 2), 'keep_result': 100}))
110 | res = req.get_response(app)
111 | tid = res.json['id']
112 | assert Request.blank('/result?id={}'.format(tid)).get_response(app).json == None
113 | app.manager.process(app.manager.pop(['boo'], 1))
114 | assert Request.blank('/result?id={}'.format(tid)).get_response(app).json == {'result': 3}
115 |
116 |
117 | def test_get_without_id(app):
118 | res = Request.blank('/result').get_response(app)
119 | assert res.status_code == 400
120 | assert res.json == {'message': 'id required', 'error': 'bad-params'}
121 |
--------------------------------------------------------------------------------
/tests/test_manager.py:
--------------------------------------------------------------------------------
1 | import time
2 | import signal
3 |
4 | import pytest
5 | import redis
6 | import msgpack
7 |
8 | from dsq.store import QueueStore, ResultStore
9 | from dsq.manager import Manager, make_task
10 | from dsq.worker import Worker, StopWorker
11 |
12 |
13 | @pytest.fixture
14 | def manager(request):
15 | cl = redis.StrictRedis()
16 | cl.flushdb()
17 | return Manager(QueueStore(cl), ResultStore(cl))
18 |
19 |
20 | def task_names(tasks):
21 | return [msgpack.loads(r)['name'] for r in tasks]
22 |
23 |
24 | def test_expired_task(manager):
25 | called = []
26 |
27 | @manager.task
28 | def foo():
29 | called.append(True)
30 |
31 | manager.process(make_task('foo', expire=10), now=15)
32 | assert not called
33 |
34 | manager.process(make_task('foo', expire=10), now=5)
35 | assert called
36 |
37 |
38 | def test_unknown_task(manager):
39 | manager.process(make_task('foo'))
40 | assert manager.pop(['unknown'], 1)['name'] == 'foo'
41 |
42 |
43 | def test_worker_alarm(manager):
44 | called = []
45 | def handler(signal, frame):
46 | called.append(True)
47 | signal.signal(signal.SIGALRM, handler)
48 |
49 | @manager.task
50 | def foo(sleep):
51 | time.sleep(sleep)
52 |
53 | w = Worker(manager, task_timeout=1)
54 | w.process_one(make_task('foo', args=(0.1,)))
55 | assert not called
56 |
57 | w.process_one(make_task('foo', args=(1.1,)))
58 | assert called
59 |
60 |
61 | def test_retry_task(manager):
62 | @manager.task
63 | def foo():
64 | raise Exception()
65 |
66 | manager.default_retry_delay = None
67 | t = make_task('foo', retry=True)
68 | t['queue'] = 'test'
69 | manager.process(t)
70 | assert manager.pop(['test'], 1)['name'] == 'foo'
71 |
72 | t['retry_delay'] = 10
73 | manager.process(t, now=20)
74 | assert not manager.pop(['test'], 1)
75 | manager.queue.reschedule(50)
76 | assert manager.pop(['test'], 1)['name'] == 'foo'
77 |
78 | t['retry_delay'] = None
79 | t['retry'] = 1
80 | manager.process(t, now=20)
81 | assert manager.pop(['test'], 1)['retry'] == 0
82 |
83 |
84 | def test_dead_task(manager):
85 | @manager.task
86 | def foo():
87 | raise Exception()
88 |
89 | manager.process(make_task('foo', dead='dead'))
90 | assert manager.pop(['dead'], 1)['name'] == 'foo'
91 |
92 |
93 | def test_task_calling(manager):
94 | @manager.task(queue='test')
95 | def foo(bar, boo):
96 | assert bar == 'bar'
97 | assert boo == 'boo'
98 | foo.called = True
99 |
100 | foo.push('bar', boo='boo')
101 | task = manager.pop(['test'], 1)
102 | manager.process(task)
103 | assert foo.called
104 |
105 |
106 | def test_string_types(manager):
107 | @manager.task(queue='test')
108 | def foo(bstr, ustr):
109 | assert type(bstr) == type(b'')
110 | assert type(ustr) == type(u'')
111 |
112 | foo.push(b'boo', u'boo')
113 | task = manager.pop(['test'], 1)
114 | manager.process(task)
115 |
116 |
117 | def test_task_modification(manager):
118 | @manager.task
119 | def foo():
120 | pass
121 |
122 | foo.modify(queue='bar', ttl=10, dead='dead').push()
123 | task = manager.pop(['bar'], 1)
124 | assert task['queue'] == 'bar'
125 | assert task['expire']
126 | assert task['dead'] == 'dead'
127 |
128 |
129 | def test_task_sync(manager):
130 | @manager.task
131 | def foo(a, b):
132 | return a + b
133 |
134 | assert foo(1, 2) == 3
135 | assert foo.modify(queue='normal')(1, 2) == 3
136 |
137 |
138 | def test_sync_manager(manager):
139 | manager.sync = True
140 |
141 | @manager.task
142 | def foo(a, b):
143 | foo.called = True
144 | return a + b
145 |
146 | assert foo.push(1, 2).ready().value == 3
147 | assert foo.called
148 |
149 | with pytest.raises(KeyError):
150 | manager.process(make_task('boo'))
151 |
152 | @manager.task
153 | def bad():
154 | raise ZeroDivisionError()
155 |
156 | with pytest.raises(ZeroDivisionError):
157 | bad.push()
158 |
159 |
160 | def test_task_with_context(manager):
161 | manager.sync = True
162 | @manager.task(with_context=True)
163 | def foo(ctx, a, b):
164 | foo.called = True
165 | assert ctx.manager is manager
166 | assert ctx.task['name'] == 'foo'
167 | assert ctx
168 | assert a + b == 3
169 |
170 | foo.push(1, 2)
171 | assert foo.called
172 |
173 |
174 | def test_delayed_task(manager):
175 | now = time.time()
176 | manager.push('test', 'foo', delay=10)
177 | (ts, q, t), = manager.queue.get_schedule()
178 | assert now + 9 < ts < now + 11
179 | assert q == 'test'
180 | assert t['name'] == 'foo'
181 |
182 |
183 | def test_manager_must_pass_stop_worker_exc(manager):
184 | @manager.task
185 | def alarm():
186 | raise StopWorker()
187 |
188 | with pytest.raises(StopWorker):
189 | manager.process(make_task('alarm'))
190 |
191 |
192 | def test_get_result(manager):
193 | @manager.task
194 | def task():
195 | return 'result'
196 |
197 | result = manager.push('normal', 'task', keep_result=10)
198 | assert not result.ready()
199 | assert not result.ready(0.1, 0.05)
200 | manager.process(manager.pop(['normal'], 1))
201 | assert result.ready().value == 'result'
202 |
203 |
204 | def test_result_exception(manager):
205 | @manager.task(queue='normal', keep_result=10)
206 | def task():
207 | 1/0
208 |
209 | result = task.push()
210 | manager.process(manager.pop(['normal'], 1))
211 | assert result.ready()
212 | assert result.error == 'ZeroDivisionError'
213 | assert result.error_message
214 | assert result.error_trace
215 | assert not hasattr(result, 'value')
216 |
217 |
218 | def test_tasks_should_have_non_none_fields(manager):
219 | manager.push('boo', 'foo')
220 | t = manager.pop(['boo'], 1)
221 | assert t['id']
222 | assert t['name'] == 'foo'
223 | assert t['queue'] == 'boo'
224 | assert set(t) == set(('id', 'name', 'queue'))
225 |
226 |
227 | def test_stateful_tasks(manager):
228 | @manager.task(queue='normal', init_state=lambda: [0])
229 | def task(ctx):
230 | ctx.state[0] += 1
231 | ctx.set_result(None)
232 |
233 | manager.process(make_task('task'))
234 | manager.process(make_task('task'))
235 | manager.process(make_task('task'))
236 | assert manager.states['task'] == [3]
237 |
238 |
239 | def test_stateful_tasks_close(manager):
240 | class State(object):
241 | def close(self):
242 | self.closed = True
243 |
244 | @manager.task(queue='normal', init_state=State)
245 | def task(ctx):
246 | pass
247 |
248 | manager.process(make_task('task'))
249 | manager.close()
250 | assert manager.states['task'].closed
251 |
252 |
253 | def test_crontab_collector(manager):
254 | @manager.crontab()
255 | @manager.task(queue='normal')
256 | def boo():
257 | pass
258 |
259 | @manager.crontab()
260 | def bar():
261 | bar.called = True
262 | 1/0
263 |
264 | check = manager.crontab.checker()
265 | check(1)
266 | manager.process(manager.pop(['normal'], 1))
267 | assert bar.called == True
268 |
269 |
270 | def test_periodic_collector(manager):
271 | @manager.periodic(1)
272 | @manager.task(queue='normal')
273 | def boo():
274 | pass
275 |
276 | @manager.periodic(1)
277 | def bar():
278 | bar.called = True
279 | 1/0
280 |
281 | timer = manager.periodic.timer(0)
282 | titer = iter(timer)
283 | _, a = next(titer)
284 | a()
285 | _, a = next(titer)
286 | a()
287 |
288 | manager.process(manager.pop(['normal'], 1))
289 | assert bar.called == True
290 |
--------------------------------------------------------------------------------
/tests/test_queue_store.py:
--------------------------------------------------------------------------------
1 | import time
2 | import pytest
3 | import redis
4 | import msgpack
5 |
6 | from dsq.store import QueueStore
7 |
8 |
9 | @pytest.fixture
10 | def store(request):
11 | cl = redis.StrictRedis()
12 | cl.flushdb()
13 | return QueueStore(cl)
14 |
15 |
16 | def test_push_pop(store):
17 | assert store.pop(['test'], 1) == (None, None)
18 | store.push('test', 't1')
19 | result = store.pop(['test'], 1)
20 | assert result == ('test', 't1')
21 |
22 |
23 | def test_reschedule(store):
24 | store.push('test', 't1', eta=500)
25 | store.reschedule(now=490)
26 | assert store.pop(['test'], 1) == (None, None)
27 | store.reschedule(now=510)
28 | assert store.pop(['test'], 1)[1] == 't1'
29 |
30 |
31 | def test_stat(store):
32 | store.push('boo', 't1', eta=500)
33 | store.push('boo', 't2')
34 | store.push('boo', 't3')
35 | assert store.stat() == {'schedule': 1, 'boo': 2}
36 |
37 |
38 | def task_names(tasks):
39 | return [msgpack.loads(r) for r in tasks]
40 |
41 |
42 | def stask_names(tasks):
43 | return [msgpack.loads(r[0].partition(b':')[2]) for r in tasks]
44 |
45 |
46 | def test_take_and_put(store):
47 | store.push('boo', 'boo1')
48 | store.push('boo', 'boo2')
49 | store.push('boo', 'boo3')
50 | store.push('foo', 'foo1')
51 | store.push('foo', 'foo2')
52 |
53 | store.push('boo', 'boo4', eta=10)
54 | store.push('boo', 'boo5', eta=15)
55 | store.push('foo', 'foo3', eta=20)
56 | assert set(store.queue_list()) == set(('boo', 'foo'))
57 |
58 | result = store.take_many(2)
59 | assert stask_names(result['schedule']) == [b'boo4', b'boo5']
60 | assert task_names(result['queues']['boo']) == [b'boo1', b'boo2']
61 | assert task_names(result['queues']['foo']) == [b'foo1', b'foo2']
62 |
63 | result = store.take_many(10)
64 | assert stask_names(result['schedule']) == [b'foo3']
65 | assert task_names(result['queues']['boo']) == [b'boo3']
66 | assert 'foo' not in result['queues']
67 |
68 | store.put_many(result)
69 | assert set(store.queue_list()) == set(('boo',))
70 |
71 | assert store.get_schedule() == [(20, 'foo', 'foo3')]
72 | assert store.get_queue('boo') == ['boo3']
73 | assert store.get_queue('foo') == []
74 |
--------------------------------------------------------------------------------
/tests/test_result_store.py:
--------------------------------------------------------------------------------
1 | import time
2 | import pytest
3 | import redis
4 |
5 | from dsq.store import ResultStore
6 |
7 |
8 | @pytest.fixture
9 | def store(request):
10 | cl = redis.StrictRedis()
11 | cl.flushdb()
12 | return ResultStore(cl)
13 |
14 |
15 | def test_set(store):
16 | store.set('id', 10, 20)
17 | assert store.get('id') == 10
18 | assert store.client.ttl('id') == 20
19 |
20 |
21 | def test_empty_get(store):
22 | assert store.get('not-exists') == None
23 |
--------------------------------------------------------------------------------
/tests/test_sched.py:
--------------------------------------------------------------------------------
1 | from itertools import islice
2 | from dsq.sched import Timer, Crontab
3 |
4 | from datetime import datetime
5 | from time import mktime
6 |
7 |
8 | def test_interval_timer():
9 | t = Timer()
10 | assert not list(t)
11 |
12 | t.add('foo', 10, 10)
13 | t.add('boo', 20, 20)
14 | t.add('bar', 30, 30)
15 |
16 | result = list(islice(t, 11))
17 | assert result == [(10, 'foo'), (20, 'foo'), (20, 'boo'), (30, 'foo'),
18 | (30, 'bar'), (40, 'foo'), (40, 'boo'), (50, 'foo'),
19 | (60, 'foo'), (60, 'boo'), (60, 'bar')]
20 |
21 |
22 | def test_crontab():
23 | c = Crontab()
24 | c.add('boo')
25 | c.add('foo', 0)
26 | c.add('bar', [1, 3], -5, -1, -1, 0)
27 |
28 | assert c.actions(0, 1, 1, 1, 1) == {'boo', 'foo'}
29 | assert c.actions(1, 1, 1, 1, 1) == {'boo'}
30 | assert c.actions(1, 5, 1, 1, 7) == {'boo', 'bar'}
31 | assert c.actions(3, 5, 1, 1, 7) == {'boo', 'bar'}
32 |
33 | ts = mktime(datetime(2016, 1, 17, 5, 1).timetuple())
34 | assert c.actions_ts(ts) == {'boo', 'bar'}
35 |
--------------------------------------------------------------------------------
/tests/test_utils.py:
--------------------------------------------------------------------------------
1 | import sys
2 | from dsq.utils import load_var, LoadError, task_fmt
3 |
4 |
5 | def test_load_var():
6 | class mod:
7 | boo = 10
8 | foo = 20
9 | sys.modules['fake_module'] = mod
10 | assert load_var('fake_module', 'boo') == 10
11 | assert load_var('fake_module:foo', 'boo') == 20
12 |
13 |
14 | def test_load_error():
15 | try:
16 | load_var('sys:not_exist', 'boo')
17 | except LoadError as e:
18 | assert e.module == 'sys'
19 | assert e.var == 'not_exist'
20 |
21 |
22 | def test_task_fmt():
23 | assert task_fmt({}) == '__no_name__()#__no_id__'
24 | assert task_fmt({'name': 'boo', 'id': 'foo'}) == 'boo()#foo'
25 |
26 | result = task_fmt({'name': 'boo', 'id': 'foo', 'args': (1, [2]),
27 | 'kwargs': {'bar': {'baz': "10"}}})
28 | assert result == "boo(1, [2], bar={'baz': '10'})#foo"
29 |
--------------------------------------------------------------------------------