is one of"
26 | @echo " html to make standalone HTML files"
27 | @echo " watch to watch for changes and build html dynamically"
28 | @echo " dirhtml to make HTML files named index.html in directories"
29 | @echo " singlehtml to make a single large HTML file"
30 | @echo " pickle to make pickle files"
31 | @echo " json to make JSON files"
32 | @echo " htmlhelp to make HTML files and a HTML help project"
33 | @echo " qthelp to make HTML files and a qthelp project"
34 | @echo " devhelp to make HTML files and a Devhelp project"
35 | @echo " epub to make an epub"
36 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
37 | @echo " latexpdf to make LaTeX files and run them through pdflatex"
38 | @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx"
39 | @echo " text to make text files"
40 | @echo " man to make manual pages"
41 | @echo " texinfo to make Texinfo files"
42 | @echo " info to make Texinfo files and run them through makeinfo"
43 | @echo " gettext to make PO message catalogs"
44 | @echo " changes to make an overview of all changed/added/deprecated items"
45 | @echo " xml to make Docutils-native XML files"
46 | @echo " pseudoxml to make pseudoxml-XML files for display purposes"
47 | @echo " linkcheck to check all external links for integrity"
48 | @echo " doctest to run all doctests embedded in the documentation (if enabled)"
49 | @echo " ipython to generate all example html files in ipython notebook format"
50 |
51 | clean:
52 | rm -rf $(BUILDDIR)/*
53 |
54 | html:
55 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
56 | @echo
57 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
58 |
59 | watch:
60 | fswatch -e _build -o ./ | xargs -n1 -I{} make html
61 |
62 | dirhtml:
63 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
64 | @echo
65 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
66 |
67 | singlehtml:
68 | $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
69 | @echo
70 | @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
71 |
72 | pickle:
73 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
74 | @echo
75 | @echo "Build finished; now you can process the pickle files."
76 |
77 | json:
78 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
79 | @echo
80 | @echo "Build finished; now you can process the JSON files."
81 |
82 | htmlhelp:
83 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
84 | @echo
85 | @echo "Build finished; now you can run HTML Help Workshop with the" \
86 | ".hhp project file in $(BUILDDIR)/htmlhelp."
87 |
88 | qthelp:
89 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
90 | @echo
91 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \
92 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:"
93 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/pypackage.qhcp"
94 | @echo "To view the help file:"
95 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/pypackage.qhc"
96 |
97 | devhelp:
98 | $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
99 | @echo
100 | @echo "Build finished."
101 | @echo "To view the help file:"
102 | @echo "# mkdir -p $$HOME/.local/share/devhelp/pypackage"
103 | @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/pypackage"
104 | @echo "# devhelp"
105 |
106 | epub:
107 | $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
108 | @echo
109 | @echo "Build finished. The epub file is in $(BUILDDIR)/epub."
110 |
111 | latex:
112 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
113 | @echo
114 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
115 | @echo "Run \`make' in that directory to run these through (pdf)latex" \
116 | "(use \`make latexpdf' here to do that automatically)."
117 |
118 | latexpdf:
119 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
120 | @echo "Running LaTeX files through pdflatex..."
121 | $(MAKE) -C $(BUILDDIR)/latex all-pdf
122 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
123 |
124 | latexpdfja:
125 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
126 | @echo "Running LaTeX files through platex and dvipdfmx..."
127 | $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja
128 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
129 |
130 | text:
131 | $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
132 | @echo
133 | @echo "Build finished. The text files are in $(BUILDDIR)/text."
134 |
135 | man:
136 | $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
137 | @echo
138 | @echo "Build finished. The manual pages are in $(BUILDDIR)/man."
139 |
140 | texinfo:
141 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
142 | @echo
143 | @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
144 | @echo "Run \`make' in that directory to run these through makeinfo" \
145 | "(use \`make info' here to do that automatically)."
146 |
147 | info:
148 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
149 | @echo "Running Texinfo files through makeinfo..."
150 | make -C $(BUILDDIR)/texinfo info
151 | @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
152 |
153 | gettext:
154 | $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
155 | @echo
156 | @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
157 |
158 | changes:
159 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
160 | @echo
161 | @echo "The overview file is in $(BUILDDIR)/changes."
162 |
163 | linkcheck:
164 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
165 | @echo
166 | @echo "Link check complete; look for any errors in the above output " \
167 | "or in $(BUILDDIR)/linkcheck/output.txt."
168 |
169 | doctest:
170 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
171 | @echo "Testing of doctests in the sources finished, look at the " \
172 | "results in $(BUILDDIR)/doctest/output.txt."
173 |
174 | xml:
175 | $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml
176 | @echo
177 | @echo "Build finished. The XML files are in $(BUILDDIR)/xml."
178 |
179 | pseudoxml:
180 | $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml
181 | @echo
182 | @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml."
183 |
184 |
--------------------------------------------------------------------------------
/docs/_static/.gitkeep:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bprinty/Flask-Execute/9406914a7996e73034db94f15b79f70e246d7084/docs/_static/.gitkeep
--------------------------------------------------------------------------------
/docs/_static/default.css:
--------------------------------------------------------------------------------
1 | @import url("flasky.css");
2 | div.warning, div.attention{
3 | background-color: #ffedcc;
4 | }
5 | div.danger {
6 | background-color: #fdf3f2;
7 | }
8 | div.info, div.note {
9 | background-color: #e7f2fa;
10 | }
11 | div.tip, div.important {
12 | background-color: #dbfaf4;
13 | }
14 | div.alert {
15 | background-color: #ffedcc;
16 | }
17 | div.admonition{
18 | border: none;
19 | }
20 | div.admonition p.admonition-title{
21 | font-variant: small-caps;
22 | }
23 | p.admonition-title:after{
24 | content: "";
25 | }
--------------------------------------------------------------------------------
/docs/_static/logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bprinty/Flask-Execute/9406914a7996e73034db94f15b79f70e246d7084/docs/_static/logo.png
--------------------------------------------------------------------------------
/docs/_static/side-icon.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bprinty/Flask-Execute/9406914a7996e73034db94f15b79f70e246d7084/docs/_static/side-icon.png
--------------------------------------------------------------------------------
/docs/_templates/sidebarintro.html:
--------------------------------------------------------------------------------
1 | About
2 |
3 | Flask-Execute is a package that simplifies the process of configuring Celery to run alongside a Flask application.
4 |
5 | Useful Links
6 |
11 |
--------------------------------------------------------------------------------
/docs/api.rst:
--------------------------------------------------------------------------------
1 |
2 | API
3 | ===
4 |
5 |
6 | Plugin
7 | ------
8 |
9 | .. autoclass:: flask_execute.Celery
10 | :members:
11 |
12 | .. autofunction:: flask_execute.plugin.dispatch
13 |
14 |
15 | Futures
16 | -------
17 |
18 | .. autoclass:: flask_execute.futures.Future
19 | :members:
20 |
21 | .. autoclass:: flask_execute.futures.FuturePool
22 | :members:
23 |
24 |
25 | Managers
26 | --------
27 |
28 | .. autoclass:: flask_execute.managers.TaskManager
29 | :members:
30 |
31 |
32 | .. autoclass:: flask_execute.managers.CommandManager
33 | :members:
34 |
--------------------------------------------------------------------------------
/docs/conf.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 | #
4 | # documentation build configuration file, created by
5 | # sphinx-quickstart on Tue Jul 9 22:26:36 2013.
6 | #
7 | # This file is execfile()d with the current directory set to its
8 | # containing dir.
9 | #
10 | # Note that not all possible configuration values are present in this
11 | # autogenerated file.
12 | #
13 | # All configuration values have a default; values that are commented out
14 | # serve to show the default.
15 |
16 | import sys
17 | import os
18 |
19 | # Add any Sphinx extension module names here, as strings. They can be
20 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
21 | # ones.
22 | extensions = [
23 | 'sphinx.ext.imgmath',
24 | 'sphinx.ext.autodoc'
25 | ]
26 |
27 | # Napoleon settings
28 | napoleon_google_docstring = True
29 | napoleon_numpy_docstring = False
30 | napoleon_include_private_with_doc = False
31 | napoleon_include_special_with_doc = True
32 | napoleon_use_admonition_for_examples = False
33 | napoleon_use_admonition_for_notes = False
34 | napoleon_use_admonition_for_references = False
35 | napoleon_use_ivar = False
36 | napoleon_use_param = True
37 | napoleon_use_rtype = True
38 |
39 | # Get the project root dir, which is the parent dir of this
40 | cwd = os.getcwd()
41 | project_root = os.path.dirname(cwd)
42 |
43 | # Insert the project root dir as the first element in the PYTHONPATH.
44 | # This lets us ensure that the source is imported, and that its
45 | # version is used.
46 | sys.path.insert(0, project_root)
47 |
48 | from flask_execute import __author__, __pkg__, __version__, __info__
49 |
50 | # -- General configuration ---------------------------------------------
51 |
52 | # If your documentation needs a minimal Sphinx version, state it here.
53 | #needs_sphinx = '1.0'
54 |
55 | # Add any paths that contain templates here, relative to this directory.
56 | templates_path = ['_templates']
57 |
58 | # The suffix of source filenames.
59 | source_suffix = '.rst'
60 |
61 | # The encoding of source files.
62 | #source_encoding = 'utf-8-sig'
63 |
64 | # The master toctree document.
65 | master_doc = 'index'
66 |
67 | # General information about the project.
68 | project = __pkg__
69 | copyright = u'2016, {}'.format(__author__)
70 |
71 | # The version info for the project you're documenting, acts as replacement
72 | # for |version| and |release|, also used in various other places throughout
73 | # the built documents.
74 | #
75 | # The short X.Y version.
76 | version = __version__
77 | # The full version, including alpha/beta/rc tags.
78 | release = __version__
79 |
80 | # The language for content autogenerated by Sphinx. Refer to documentation
81 | # for a list of supported languages.
82 | #language = None
83 |
84 | # There are two options for replacing |today|: either, you set today to
85 | # some non-false value, then it is used:
86 | #today = ''
87 | # Else, today_fmt is used as the format for a strftime call.
88 | #today_fmt = '%B %d, %Y'
89 |
90 | # List of patterns, relative to source directory, that match files and
91 | # directories to ignore when looking for source files.
92 | exclude_patterns = ['_build']
93 |
94 | # The reST default role (used for this markup: `text`) to use for all
95 | # documents.
96 | #default_role = None
97 |
98 | # If true, '()' will be appended to :func: etc. cross-reference text.
99 | #add_function_parentheses = True
100 |
101 | # If true, the current module name will be prepended to all description
102 | # unit titles (such as .. function::).
103 | #add_module_names = True
104 |
105 | # If true, sectionauthor and moduleauthor directives will be shown in the
106 | # output. They are ignored by default.
107 | #show_authors = False
108 |
109 | # The name of the Pygments (syntax highlighting) style to use.
110 | pygments_style = 'sphinx'
111 |
112 | # A list of ignored prefixes for module index sorting.
113 | #modindex_common_prefix = []
114 |
115 | # If true, keep warnings as "system message" paragraphs in the built
116 | # documents.
117 | #keep_warnings = False
118 |
119 |
120 | # -- Options for HTML output -------------------------------------------
121 |
122 | # The theme to use for HTML and HTML Help pages. See the documentation for
123 | # a list of builtin themes.
124 | html_theme = 'flask'
125 |
126 | # Theme options are theme-specific and customize the look and feel of a
127 | # theme further. For a list of options available for each theme, see the
128 | # documentation.
129 | html_theme_options = {"index_logo": "logo.png"}
130 |
131 | # Add any paths that contain custom themes here, relative to this directory.
132 | html_theme_path = []
133 |
134 | # The name for this set of Sphinx documents. If None, it defaults to
135 | # " v documentation".
136 | #html_title = None
137 |
138 | # A shorter title for the navigation bar. Default is the same as
139 | # html_title.
140 | #html_short_title = None
141 |
142 | # The name of an image file (relative to this directory) to place at the
143 | # top of the sidebar.
144 | html_logo = "_static/side-icon.png"
145 | html_favicon = "_static/side-icon.png"
146 |
147 | # The name of an image file (within the static path) to use as favicon
148 | # of the docs. This file should be a Windows icon file (.ico) being
149 | # 16x16 or 32x32 pixels large.
150 | html_favicon = None
151 |
152 | # Add any paths that contain custom static files (such as style sheets)
153 | # here, relative to this directory. They are copied after the builtin
154 | # static files, so a file named "default.css" will overwrite the builtin
155 | # "default.css".
156 | html_static_path = ['_static']
157 |
158 | # If not '', a 'Last updated on:' timestamp is inserted at every page
159 | # bottom, using the given strftime format.
160 | #html_last_updated_fmt = '%b %d, %Y'
161 |
162 | # If true, SmartyPants will be used to convert quotes and dashes to
163 | # typographically correct entities.
164 | #html_use_smartypants = True
165 |
166 | # Custom sidebar templates, maps document names to template names.
167 | html_sidebars = {
168 | 'index': ['sidebarintro.html', 'localtoc.html', 'sourcelink.html', 'searchbox.html'],
169 | '**': ['localtoc.html', 'relations.html',
170 | 'sourcelink.html', 'searchbox.html']
171 | }
172 |
173 | # Additional templates that should be rendered to pages, maps page names
174 | # to template names.
175 | #html_additional_pages = {}
176 |
177 | # If false, no module index is generated.
178 | #html_domain_indices = True
179 |
180 | # If false, no index is generated.
181 | #html_use_index = True
182 |
183 | # If true, the index is split into individual pages for each letter.
184 | #html_split_index = False
185 |
186 | # If true, links to the reST sources are added to the pages.
187 | #html_show_sourcelink = True
188 |
189 | # If true, "Created using Sphinx" is shown in the HTML footer.
190 | # Default is True.
191 | #html_show_sphinx = True
192 |
193 | # If true, "(C) Copyright ..." is shown in the HTML footer.
194 | # Default is True.
195 | #html_show_copyright = True
196 |
197 | # If true, an OpenSearch description file will be output, and all pages
198 | # will contain a tag referring to it. The value of this option
199 | # must be the base URL from which the finished HTML is served.
200 | #html_use_opensearch = ''
201 |
202 | # This is the file name suffix for HTML files (e.g. ".xhtml").
203 | #html_file_suffix = None
204 |
205 | # Output file base name for HTML help builder.
206 | htmlhelp_basename = 'htmlhelpdoc'
207 |
208 |
209 | # -- Options for LaTeX output ------------------------------------------
210 |
211 | latex_elements = {
212 | # The paper size ('letterpaper' or 'a4paper').
213 | #'papersize': 'letterpaper',
214 |
215 | # The font size ('10pt', '11pt' or '12pt').
216 | #'pointsize': '10pt',
217 |
218 | # Additional stuff for the LaTeX preamble.
219 | #'preamble': '',
220 | }
221 |
222 | # Grouping the document tree into LaTeX files. List of tuples
223 | # (source start file, target name, title, author, documentclass
224 | # [howto/manual]).
225 | latex_documents = [
226 | ('index', '{}.tex'.format(__pkg__),
227 | u'{} Documentation'.format(__pkg__),
228 | __author__, 'manual'),
229 | ]
230 |
231 | # The name of an image file (relative to this directory) to place at
232 | # the top of the title page.
233 | #latex_logo = None
234 |
235 | # For "manual" documents, if this is true, then toplevel headings
236 | # are parts, not chapters.
237 | #latex_use_parts = False
238 |
239 | # If true, show page references after internal links.
240 | #latex_show_pagerefs = False
241 |
242 | # If true, show URL addresses after external links.
243 | #latex_show_urls = False
244 |
245 | # Documents to append as an appendix to all manuals.
246 | #latex_appendices = []
247 |
248 | # If false, no module index is generated.
249 | #latex_domain_indices = True
250 |
251 |
252 | # -- Options for manual page output ------------------------------------
253 |
254 | # One entry per manual page. List of tuples
255 | # (source start file, name, description, authors, manual section).
256 | man_pages = [
257 | ('index', __pkg__,
258 | u'{} Documentation'.format(__pkg__),
259 | [__author__], 1)
260 | ]
261 |
262 | # If true, show URL addresses after external links.
263 | #man_show_urls = False
264 |
265 |
266 | # -- Options for Texinfo output ----------------------------------------
267 |
268 | # Grouping the document tree into Texinfo files. List of tuples
269 | # (source start file, target name, title, author,
270 | # dir menu entry, description, category)
271 | texinfo_documents = [
272 | ('index', __pkg__,
273 | u'{} Documentation'.format(__pkg__),
274 | __author__,
275 | __pkg__,
276 | __info__,
277 | 'Miscellaneous'),
278 | ]
279 |
280 | # Documents to append as an appendix to all manuals.
281 | #texinfo_appendices = []
282 |
283 | # If false, no module index is generated.
284 | #texinfo_domain_indices = True
285 |
286 | # How to display URL addresses: 'footnote', 'no', or 'inline'.
287 | #texinfo_show_urls = 'footnote'
288 |
289 | # If true, do not generate a @detailmenu in the "Top" node's menu.
290 | #texinfo_no_detailmenu = False
291 |
--------------------------------------------------------------------------------
/docs/index.rst:
--------------------------------------------------------------------------------
1 | =============
2 | Flask-Execute
3 | =============
4 |
5 |
6 | .. include:: overview.rst
7 |
8 |
9 |
10 | User Guide
11 | ==========
12 |
13 | .. toctree::
14 | :maxdepth: 3
15 |
16 | overview
17 | install
18 | usage
19 | api
20 |
--------------------------------------------------------------------------------
/docs/install.rst:
--------------------------------------------------------------------------------
1 |
2 | Installation
3 | ============
4 |
5 |
6 | To install the latest stable release via pip, run:
7 |
8 | .. code-block:: bash
9 |
10 | $ pip install Flask-Execute
11 |
12 |
13 | Alternatively with easy_install, run:
14 |
15 | .. code-block:: bash
16 |
17 | $ easy_install Flask-Execute
18 |
19 |
20 | To install the bleeding-edge version of the project:
21 |
22 | .. code-block:: bash
23 |
24 | $ git clone http://github.com/bprinty/Flask-Execute.git
25 | $ cd Flask-Execute
26 | $ python setup.py install
27 |
--------------------------------------------------------------------------------
/docs/overview.rst:
--------------------------------------------------------------------------------
1 |
2 | Overview
3 | ========
4 |
5 | Flask-Execute is a plugin for simplifying the configuration and management of Celery alongside a Flask application. It also slightly changes the paradigm for registering and dispatching celery tasks, exposing an API similar to the ``concurrent.futures`` API for submitting tasks to a separate executor.
6 |
7 | Other features of the plugin include:
8 |
9 | * Automatic spin-up of local workers, queues, schedulers, and monitoring tools via configuration.
10 | * Automatic application context wrapping for celery workers.
11 | * Simpler API for submitting tasks to workers that doesn't require pre-registration of tasks.
12 | * Result object API similar to ``concurrent.futures.Future`` API.
13 | * Flask CLI wrapper around the ``celery`` command that automatically wraps celery commands with an application context.
14 |
15 | The Flask `documentation `_ details how to configure Celery with Flask without this plugin, and readers are encouraged to check out that documentation before working with this plugin.
16 |
17 | .. Other alternatives to consider when choosing an execution engine for your app are:
18 | ..
19 | .. * `Flask-Dask `_
20 | .. * `Flask-Executor `_
21 |
22 |
23 | A Minimal Application
24 | ---------------------
25 |
26 | To set up an application with the extension, you can register the application directly:
27 |
28 | .. code-block:: python
29 |
30 | from flask import Flask
31 | from flask_execute import Celery
32 |
33 | app = Flask(__name__)
34 | plugin = Celery(app)
35 |
36 |
37 | Or, via factory pattern:
38 |
39 | .. code-block:: python
40 |
41 | celery = Celery()
42 | app = Flask(__name__)
43 | celery.init_app(app)
44 |
45 |
46 | Once the plugin has been registered, you can submit a task using:
47 |
48 | .. code-block:: python
49 |
50 | def add(x, y):
51 | return x + y
52 |
53 | future = celery.submit(add, 1, 2)
54 |
55 | # wait for result (not required)
56 | future.result(timeout=1)
57 |
58 | # cancel result
59 | future.cancel()
60 |
61 | # add callback function
62 | def callback():
63 | # do something ...
64 | return
65 |
66 | future.add_done_callback(callback)
67 |
68 |
69 | Below details a full example application utilizing this package for a simple ``add`` task:
70 |
71 | .. code-block:: python
72 |
73 | from flask import Flask, jsonify
74 | from flask_execute import Celery
75 |
76 |
77 | app = Flask(__name__)
78 | celery = Celery()
79 | celery.init_app(app)
80 | future = None # placeholder
81 |
82 |
83 | def add(x, y):
84 | return x + y
85 |
86 |
87 | @app.route('/status')
88 | def status():
89 | """
90 | Check status of celery task.
91 | """
92 | global future
93 | if future is None:
94 | return jsonify(msg='WAITING')
95 | else:
96 | return jsonify(msg=future.status)
97 |
98 |
99 | @app.route('/submit')
100 | def submit_add():
101 | """
102 | Submit add task and return.
103 | """
104 | global future
105 | future = celery.submit(add, 1, 1)
106 | return jsonify(msg='Submitted add task')
107 |
108 |
109 | @app.route('/result')
110 | def result_add():
111 | """
112 | Get result of submitted celery task.
113 | """
114 | global future
115 |
116 | if future is None:
117 | return jsonify(msg='Task must be submitted via /submit.'), 400
118 | else:
119 | result = future.result(timeout=1)
120 | future = None
121 | return jsonify(result=result)
122 |
123 |
124 | if __name__ == '__main__':
125 | app.run()
126 |
127 |
128 | Note that this plugin does not require users to pre-register tasks via the ``@celery.task`` decorator. This enables developers to more easily control whether or not task execution happens within the current session or on a separate worker. It also makes the API similar to the API provided by `Dask `_ and `concurrent.futures `_. Also note that the ``celery`` command-line tool for spinning up local workers is no longer necessary. If no workers are connected, this plugin will automatically spin them up the first time a ``celery.submit()`` call is made.
129 |
130 | Once a task as been submitted, you can monitor the state via:
131 |
132 | .. code-block:: python
133 |
134 | task_id = future.id
135 |
136 | # later in code
137 |
138 | future = celery.get(task_id)
139 | print(future.state)
140 |
141 |
142 | You can also manage state updates within tasks with a more Flask-y syntax:
143 |
144 | .. code-block:: python
145 |
146 | from flask_execute import current_task
147 |
148 | def add(a, b):
149 | current_task.update_state(state='PROGRESS')
150 | return a + b
151 |
152 |
153 | This plugin will also manage the process of spinning up local workers bound to your application the first time a ``celery.submit()`` call is made (if configured to do so). Additionally, the plugin will automatically wrap ``celery`` cli calls with your flask application (using the factory method or not), so you can more easily interact with celery:
154 |
155 | .. code-block:: bash
156 |
157 | # start local celery cluster with workers, flower monitor, and celerybeat scheduler
158 | ~$ flask celery cluster
159 |
160 | # start local worker
161 | ~$ flask celery worker
162 |
163 | # check status of running workers
164 | ~$ flask celery status
165 |
166 | # shutdown all celery workers
167 | ~$ flask celery control shutdown
168 |
169 | # shutdown all celery workers
170 | ~$ flask celery control shutdown
171 |
172 |
173 | If your application uses the factory pattern with a ``create_app`` function for registering blueprints and plugin, you can use the standard ``flask cli`` syntax for automatically wrapping ``celery`` commands with your application context:
174 |
175 | .. code-block:: bash
176 |
177 | # check status of running workers
178 | ~$ FLASK_APP=app:create_app flask celery status
179 |
180 |
181 | For more in-depth discussion on design considerations and how to fully utilize the plugin, see the `User Guide <./usage.html>`_.
182 |
--------------------------------------------------------------------------------
/docs/requirements.txt:
--------------------------------------------------------------------------------
1 | Flask
2 | Flask-Sphinx-Themes>=1.0.2
3 | celery
4 |
--------------------------------------------------------------------------------
/docs/usage.rst:
--------------------------------------------------------------------------------
1 |
2 | .. _Celery documentation: https://docs.celeryproject.org/en/latest/userguide/
3 | .. _Celery Setup documentation: https://docs.celeryproject.org/en/latest/getting-started/first-steps-with-celery.html
4 | .. _Celery Result documentation: https://docs.celeryproject.org/en/latest/reference/celery.result.html
5 | .. _Celery Task documentation: https://docs.celeryproject.org/en/latest/userguide/tasks.html
6 | .. _Celery Worker documentation: https://docs.celeryproject.org/en/latest/userguide/workers.html
7 | .. _Celery Periodic Tasks documentation: https://docs.celeryproject.org/en/latest/userguide/periodic-tasks.html
8 | .. _Celery Config documentation: https://docs.celeryproject.org/en/latest/userguide/configuration.html
9 |
10 |
11 | Usage
12 | =====
13 |
14 | The sections below detail how to fully use this module, along with context for design decisions made during development of the plugin.
15 |
16 |
17 | Why is this Necessary?
18 | ----------------------
19 |
20 | If you've configured Flask to use Celery before, you may have run into the motivating factor behind the creation of this package - it's not particularly straightforward to either 1) connect celery workers to a flask instance, 2) wrap celery workers in a flask application context, 3) use the application factory pattern alongside a celery configuration, or 4) manage starting workers in development mode. Like other Flask extensions, configuration for an external tool should be as simple as instantiating the extension and registering the Flask application:
21 |
22 | .. code-block:: python
23 |
24 | app = Flask()
25 | celery = Celery(app)
26 |
27 |
28 | This package is functionally a wrapper around the process of configuring celery that resolves the annoyances listed above and adds the following additional functionality:
29 |
30 | 1. Removes the need to manually start local celery workers and configure celery ``Tasks`` with separate application contexts.
31 | 2. Provides simpler worker and queue configuration (related to 1).
32 | 3. Provides ``flask`` command-line extensions for configuring celery with the application context.
33 | 4. Homogenizes the API for interacting with tasks with other execution tools like ``concurrent.futures`` and ``Dask``.
34 | 5. Allows developers to dynamically submit tasks to Celery, instead of developers needing to pre-register tasks to run on workers.
35 |
36 | The features listed above simplify the process of configuring Celery to work with Flask and make working with Celery a more enjoyable experience. If you don't agree with those sentiments or like the way Celery historically has been configured with Flask applications, feel free to ignore the rest of this documentation. This extension isn't necessary for configuring your application to use celery, just like ``Flask-SQLAlchemy`` isn't necessary for configuring your application to use ``SQLAlchemy``.
37 |
38 |
39 | Prerequisites
40 | -------------
41 |
42 | Just like celery, this package requires a message broker as a prerequisite. For information on how to install and set up the various celery message brokers, see the `Celery Setup documentation`_.
43 |
44 | For those who just want to get moving quickly, here's how to install ``Redis`` on OSX:
45 |
46 | .. code-block:: bash
47 |
48 | ~$ brew install redis
49 |
50 |
51 | And on ``*nix`` systems:
52 |
53 | .. code-block:: bash
54 |
55 | ~$ wget http://download.redis.io/redis-stable.tar.gz
56 | ~$ tar xvzf redis-stable.tar.gz
57 | ~$ cd redis-stable
58 | ~$ make
59 |
60 | To start redis manually (most installers will configure it to start on boot), run:
61 |
62 | .. code-block:: bash
63 |
64 | ~$ redis-server
65 |
66 |
67 | Setup
68 | -----
69 |
70 | As mentioned in the overview section of the documentation, to configure your application to use Celery via this extension you can register it directly:
71 |
72 | .. code-block:: python
73 |
74 | from flask import Flask
75 | from flask_execute import Celery
76 |
77 | app = Flask(__name__)
78 | plugin = Celery(app)
79 |
80 |
81 | Or, via the application factory pattern:
82 |
83 | .. code-block:: python
84 |
85 | celery = Celery()
86 | app = Flask(__name__)
87 | celery.init_app(app)
88 |
89 |
90 | That's it! all of the other tedium around wrapping tasks in an application context, creating a ``make_celery`` function, or pre-registering tasks is no longer necessary. Additionally, you don't need to manually use the ``celery`` CLI tool to start workers if your workers are meant to run on the server the application is running. This package will automatically spin them up the first time an executable is sent to the workers. More fine-grained control over worker configuration and command-line extensions this tool provides is detailed later in the documentation.
91 |
92 | Once this extension has been registered with the application, you can submit tasks to workers via ``celery.submit()``:
93 |
94 | .. code-block:: python
95 |
96 | def add(x, y):
97 | return x + y
98 |
99 | celery.submit(add, 1, 2)
100 |
101 | More information on task execution and other tools the ``Celery`` object provides is detailed below.
102 |
103 |
104 | Tasks
105 | -----
106 |
107 | Submitting Task to Workers
108 | ++++++++++++++++++++++++++
109 |
110 | There are a couple of divergences this extension introduces against the historical Flask/Celery setup. First, developers aren't required to pre-register tasks to submit them to celery workers. With this extension, you just need to call ``celery.submit`` to send an arbitrary function (with arguments) to a worker for external execution:
111 |
112 | .. code-block:: python
113 |
114 | def add(x, y):
115 | return x + y
116 |
117 | celery.submit(add, 1, 2)
118 | celery.submit(add, 1, y=2)
119 | celery.submit(add, x=1, y=2)
120 |
121 |
122 | The result of ``celery.submit`` will return a ``Future`` object that can be used to query the status of the task:
123 |
124 | .. code-block:: python
125 |
126 | >>> future = celery.submit(add, 1, 2)
127 | >>> future.running()
128 | True
129 | >>> future.done()
130 | False
131 | >>> future.result(timeout=1) # wait for result
132 | 3
133 |
134 |
135 | Just like with other executor tools, this extension also provides a built-in ``map`` operator for submitting an iterable object to remote workers:
136 |
137 | .. code-block:: python
138 |
139 | # arguments
140 | >>> future_pool = celery.map(add, [1, 2], [3, 4], [5, 6])
141 | >>> for future in future_pool:
142 | >>> print(future.result(timeout=1))
143 | 3
144 | 7
145 | 11
146 |
147 | # with constant keyword arguments
148 | >>> future_pool = celery.map(add, [1], [3], [5], y=2)
149 | >>> for future in future_pool:
150 | >>> print(future.result(timeout=1))
151 | 3
152 | 5
153 | 7
154 |
155 |
156 | The return value for the ``celery.map()`` function is a ``FuturePool`` object that can serve as a proxy for querying the overall status of the submitted tasks. All API methods on the ``Future`` object are also available on the ``FuturePool`` object:
157 |
158 | .. code-block:: python
159 |
160 | >>> pool = celery.map(add, [1, 2], [3, 4], [5, 6])
161 |
162 | # check if any tasks in the pool are still running
163 | >>> pool.running()
164 | True
165 |
166 | # check if all tasks in the pool are done
167 | >>> future.done()
168 | False
169 |
170 | # return a list with the map results
171 | >>> future.result(timeout=1)
172 | [3, 7 , 11]
173 |
174 |
175 | For more information about the methods available on ``Future`` and ``FuturePool`` objects, see the `Working with Futures`_ section of the documentation.
176 |
177 |
178 | Working with Futures
179 | ++++++++++++++++++++
180 |
181 | As alluded to previously in the documentation, the return value for submitting a task is a ``Future`` object, which wraps the ``celery.AsyncResult`` object with an API similar to the ``concurrent.futures`` `Future `_ API. With this object you can do the following:
182 |
183 | .. code-block:: python
184 |
185 | # submitting future
186 | future = celery.submit(add, 1, 2)
187 |
188 | # cancel task
189 | future.cancel()
190 |
191 | # check if task has been cancelled
192 | future.cancelled() # True
193 |
194 | # check if task is currently running
195 | future.running() # True
196 |
197 | # check if task is finished running
198 | future.done()
199 |
200 | # wait for result (with optional timeout)
201 | future.result(timeout=1)
202 |
203 | # raise exception returned by future
204 | future.exception()
205 |
206 |
207 | You can also query properties of the ``celery.AsyncResult`` object from ``Future`` objects:
208 |
209 | .. code-block:: python
210 |
211 | # query status/state
212 | future.state
213 | future.status
214 |
215 | # query task id
216 | future.id
217 |
218 | # query task name
219 | future.name
220 |
221 |
222 | For more information on available properties, see the `Celery Result documentation`_.
223 |
224 | Finally, you can also add a callback to be executed when the task finishes running.
225 |
226 | .. code-block:: python
227 |
228 | def callback():
229 | # callback function
230 | return
231 |
232 | # submitting future
233 | future = celery.submit(add, 1, 2)
234 |
235 | # adding callback
236 | future.add_done_callback(callback)
237 |
238 |
239 | This will ensure that the specified callback function is automatically executed when the task returns a ``done`` status.
240 |
241 | If you have the task ID (obtained via ``Future.id``), you can query a task Future via:
242 |
243 | .. code-block:: python
244 |
245 | >>> future = celery.submit(add, 1, 2)
246 | >>> task_id = future.id
247 |
248 | # later in code ...
249 |
250 | >>> future = celery.get(task_id)
251 | >>> future.done()
252 | False
253 |
254 |
255 | Similarly to ``Future`` objects, ``FuturePool`` objects are a wrapper around the ``GroupResult`` object available from celery. Accordingly, the ``FuturePool`` object has a very similar API:
256 |
257 | .. code-block:: python
258 |
259 | # submitting future
260 | pool = celery.map(add, [1, 2], [3, 4], [5, 6])
261 |
262 | # cancel *all* tasks in the pool
263 | pool.cancel()
264 |
265 | # check if *any* task in the pool has been cancelled
266 | pool.cancelled() # True
267 |
268 | # check if *any task in the pool is currently running
269 | pool.running() # True
270 |
271 | # check if *all* tasks in the pool are finished running
272 | pool.done()
273 |
274 | # wait for *all* task results (with optional timeout)
275 | pool.result(timeout=1)
276 |
277 | # raise *any* exception returned by the pool
278 | pool.exception()
279 |
280 |
281 | Task Registration
282 | +++++++++++++++++
283 |
284 | If you like the declarative syntax celery uses to register tasks, you can still do so via:
285 |
286 | .. code-block:: python
287 |
288 | app = Flask(__name__)
289 | celery = Celery(app)
290 |
291 | @celery.task
292 | def add(x, y):
293 | return x + y
294 |
295 | add.delay(1, 2)
296 |
297 | However, using the ``delay`` method on the registered task will only work if the application was not configured using the Factory pattern with a ``create_app`` function. If you want to use the celery task API within an app configured using the factory pattern, call the task from the ``celery`` plugin object:
298 |
299 | .. code-block:: python
300 |
301 | celery = Celery()
302 |
303 | @celery.task
304 | def add(x, y):
305 | return x + y
306 |
307 | app = Flask(__name__)
308 | celery.init_app(app)
309 |
310 | celery.task.add.delay(1, 2)
311 |
312 |
313 | Alternatively, if you don't need the celery workers to have tasks registered and are happy with just submitting them dynamically, use the ``celery.submit()`` method detailed above.
314 |
315 | For more information on registering tasks and configuration options available, see the `Celery Task documentation`_.
316 |
317 |
318 | Task Scheduling
319 | +++++++++++++++
320 |
321 | Another useful feature provided by this function is declarative mechanism for scheduling tasks. With this extension, developers no longer need to manually add entries to the celery ``beat`` configuration (or even worry about starting a celery ``beat`` service).
322 |
323 | To schedule a periodic task to run alongside the application, use the ``celery.schedule()`` decorator. For instance, to schedule a task to run every night at midnight:
324 |
325 | .. code-block:: python
326 |
327 | @celery.schedule(hour=0, minute=0, name='scheduled-task-to-run-at-midnight')
328 | def scheduled_task():
329 | # do something ...
330 | return
331 |
332 | The arguments to the schedule decorator can either be numeric:
333 |
334 | .. code-block:: python
335 |
336 | @celery.schedule(30, args=(1, 2), kwargs=dict(arg3='foo'))
337 | def task_to_run_every_30_seconds(arg1, arg2, arg3='test'):
338 | # do something ...
339 | return
340 |
341 | Keyword arguments to the ``celery.crontab`` function:
342 |
343 | .. code-block:: python
344 |
345 | @celery.schedule(hour=7, minute=30, day_of_week=1)
346 | def task_to_run_every_monday_morning():
347 | # do something ...
348 | return
349 |
350 | Or, a solar schedule:
351 |
352 | .. code-block:: python
353 |
354 | from celery.schedules import solar
355 |
356 | @celery.schedule(solar('sunset', -37.81753, 144.96715), name='solar-task')
357 | def task_to_run_every_sunset():
358 | # do something ...
359 | return
360 |
361 |
362 | In addition, if you don't want to use this decorator, you can still schedule tasks via the ``CELERYBEAT_SCHEDULE`` configuration option. For more information on task scheduling, including ``crontab`` and ``solar`` schedule configuration, see the `Celery Periodic Tasks documentation`_.
363 |
364 |
365 | Status Updates
366 | ++++++++++++++
367 |
368 | Another divergence from the original Celery API is how ``Task`` objects are referenced in code. This extension takes a more Flask-y approach to accessing said information, where a proxied object called ``current_task`` is available for developers to reference throughout their application. This paradigm is similar to the ``current_app`` or ``current_user`` object commonly referenced in flask applications. For example, to reference the current task and update the state metadata:
369 |
370 | .. code-block:: python
371 |
372 | from flask_execute import current_task
373 |
374 | def add(a, b):
375 | current_task.update_state(state='PROGRESS')
376 | return a + b
377 |
378 | More information about the ``update_state`` method or ``Task`` objects can be found in the `Celery Task documentation`_.
379 |
380 | If the function is not currently running in a task, this will return an error because the proxy object will be ``None``. If the method you're using will run both within and outside celery tasks, you'll want to check if the ``current_task`` proxy is available:
381 |
382 | .. code-block:: python
383 |
384 | def add(x, y):
385 | if current_task:
386 | current_task.update_state(state='PROGRESS')
387 | return x + y
388 |
389 |
390 | Writing Safe Code
391 | +++++++++++++++++
392 |
393 | As with any program that executes code across multiple threads or processes, developers must be cognizant of how IO is managed at the boundaries across separate application contexts (i.e. how data are passed to and returned from functions). In general, try to write thread-safe code when working on functions that might be sent to celery workers. Some recommendations are as follows:
394 |
395 | * Don't pass instantiated SQLAlchemy objects or file streams as arguments to functions. Instead, pass in references (primary keys or other identifiers) to the objects you want to use and query them from within the function before executing other logic.
396 |
397 | * Don't pass lambda functions or other non-pickle-able objects as arguments to functions. For information on which objects can and cannot pickle, see the `pickle documentation `_.
398 |
399 | * Don't reference global variables that might change values when the application is created on an external executor. LocalProxy objects in Flask are safe to reference.
400 |
401 | * Ensure that functions either return or fail with appropriate and manageable exceptions. This allows developers to more easily diagnose failures that occur on external executors.
402 |
403 | * If external libraries are used, import the external libraries within functions using them.
404 |
405 |
406 | If you run into an issue sending data back and forth to executors, feel free to file a question in the GitHub Issue Tracker for this project.
407 |
408 |
409 | Management
410 | ----------
411 |
412 | Starting Celery
413 | +++++++++++++++
414 |
415 | As mentioned in the overview of the documentation, this extension can manage the process of starting celery workers the first time a ``celery.submit()`` call is made. It will also pass all celery configuration (i.e. any option starting with ``CELERY``) specified in your application config to Celery. Accordingly, this means you **do not have to manually start workers, beat schedulers, or flower** if all of your workers are to run locally. With this extension, the first time you run a ``celery.submit()`` call:
416 |
417 | .. code-block:: python
418 |
419 | def add(x, y):
420 | return x + y
421 |
422 | celery.submit(add, 1, 2)
423 |
424 | The following services will be started in the backround:
425 |
426 | 1. All workers referenced by the ``CELERY_WORKERS`` config variable. This configuration value can take a numeric number of workers or explicit worker names. This can be disabled using ``CELERY_START_LOCAL_WORKERS=False`` in your application config (recommended for production).
427 |
428 | 2. The `Celery Flower `_ monitoring tool for monitoring celery workers and statuses. This can be disabled using ``CELERY_FLOWER=False`` in your application config (recommended for production).
429 |
430 | 3. If any tasks are registered via ``celery.schedule``, the `Celery Beat `_ scheduler tool for managing scheduled tasks. This can be disabled using ``CELERY_SCHEDULER=False`` in your application config (recommended for production).
431 |
432 | An example **production**, **development**, and **testing** config are shown here:
433 |
434 | .. code-block:: python
435 |
436 | # set worker names, don't start services (started externally)
437 | class ProdConfig:
438 | ENV = 'development'
439 | CELERY_WORKERS = ['foo', 'bar']
440 | CELERY_START_LOCAL_WORKERS = False
441 | CELERY_FLOWER = False
442 | CELERY_SCHEDULER = False
443 |
444 | # start workers, flower, and scheduler on first submit call
445 | class DevConfig:
446 | ENV = 'development'
447 | CELERY_WORKERS = 2
448 |
449 |
450 | # don't start local workers - run in eager mode
451 | class TestConfig:
452 | ENV = 'testing'
453 | CELERY_ALWAYS_EAGER = True
454 |
455 |
456 | Above, the ``ProdConfig`` will tell the plugin to not start local workers, because they should be configured externally via the ``flask celery cluster`` or ``flask celery worker`` command-line tools (more info below).
457 |
458 | The ``DevConfig`` will start local workers, flower, and the scheduler lazily (i.e. whenever the first ``celery.submit()`` call is made). Whenever the application is torn down, all forked services will be terminated.
459 |
460 | The ``TestConfig`` will use the same dispatch tools, but will execute the functions in the main application thread instead of on remote workers (accordingly, workers will not be started on ``celery.submit()``). This is particularly useful during unit testing when running separate workers requires unnecessary overhead.
461 |
462 |
463 | Command-Line Extensions
464 | +++++++++++++++++++++++
465 |
466 | Alternatively, you can still start celery workers manually for your application and reference them via config (recommended for production). Instead of invoking celery directly and specifying the path to the application, you should either use the built-in CLI ``flask celery cluster`` or ``flask celery worker`` methods:
467 |
468 | .. code-block:: bash
469 |
470 | # start all specified workers, flower, and scheduler
471 | ~$ flask celery cluster
472 |
473 | # start single worker
474 | ~$ flask celery worker
475 |
476 | # start single named worker
477 | ~$ flask celery worker -n foo@%h
478 |
479 | # start flower
480 | ~$ flask celery worker
481 |
482 | # start beat scheduler
483 | ~$ flask celery worker
484 |
485 |
486 | Each of these cli extensions wrap ``celery`` calls with the application context (even an application factory function). Other cli extensions provided by celery are also available:
487 |
488 | .. code-block:: bash
489 |
490 | # ping workers
491 | ~$ flask celery inspect ping
492 |
493 | # inspect worker stats
494 | ~$ flask celery inspect stats
495 |
496 | # shut down all workers
497 | ~$ flask celery control shutdown
498 |
499 | # get status of all workers
500 | ~$ flask celery status
501 |
502 |
503 | Accordingly, when using the ``flask`` cli entypoint, you'll need to make sure the application is available as an ``app.py`` file in your local directory, or referenced via the ``FLASK_APP`` environment variable:
504 |
505 | .. code-block:: bash
506 |
507 | # without create app function
508 | ~$ FLASK_APP=my_app flask celery cluster
509 |
510 | # using factory method
511 | ~$ FLASK_APP=my_app:create_app flask celery cluster
512 |
513 |
514 | If you really want to invoke celery directly, you must reference ``flask_execute.celery`` as the celery application. This will automatically detect the flask application celery needs to work with using the auto-detection functionality provided by Flask:
515 |
516 | .. code-block:: bash
517 |
518 | # start worker with celery
519 | ~$ celery -A flask_execute.celery worker --loglevel=info
520 |
521 | As alluded to above, if you're using a factory pattern (i.e. with a ``create_app`` function) to create the app, you can reference the application factory at the command-line via environment variable (similar to Flask CLI methods):
522 |
523 | .. code-block:: bash
524 |
525 | # recommended
526 | ~$ FLASK_APP="app:create_app" flask celery worker
527 |
528 | # using celery directly
529 | ~$ FLASK_APP="app:create_app" celery -A flask_execute.celery worker --loglevel=info
530 |
531 |
532 | Configuring Workers
533 | +++++++++++++++++++
534 |
535 | As alluded to above, with this extension, you have control (via configuration) over how workers are initialized. For example, to configure your application to use a specific number of workers or specific worker names, use:
536 |
537 | .. code-block:: python
538 |
539 | >>> # number of workers, no name preference
540 | >>> class Config:
541 | >>> CELERY_WORKERS = 2
542 |
543 | >>> # named workers
544 | >>> class Config:
545 | >>> CELERY_WORKERS = ['foo', 'bar']
546 |
547 | >>> app.config.from_object(Config)
548 | >>> celery.init_app(app)
549 | >>> celery.start()
550 | >>> celery.status()
551 | {
552 | "foo@localhost": "OK",
553 | "bar@localhost": "OK"
554 | }
555 |
556 |
557 | For more advanced worker configuration, you can make the config option a dictionary with worker names and nested specific configuration options to be passed into celery when creating workers:
558 |
559 | .. code-block:: python
560 |
561 | class Config:
562 | CELERY_WORKERS = {
563 | 'foo': {
564 | 'concurrency': 10,
565 | 'loglevel': 'error',
566 | 'pidfile': '/var/run/celery/%n.pid',
567 | 'queues': ['low-priority', 'high-priority']
568 | },
569 | 'bar': {
570 | 'concurrency': 5,
571 | 'loglevel': 'info',
572 | 'queues': ['high-priority']
573 | }
574 | }
575 |
576 |
577 | This is equivalent to the following command-line specification:
578 |
579 | .. code-block:: bash
580 |
581 | # foo worker
582 | ~$ flask celery worker -n foo@%h --concurrency=10 --loglevel=error --pidfile=/var/run/celery/%n.pid --queues=low-priority,high-priority
583 |
584 | # bar worker
585 | ~$ flask celery worker -n bar@%h --concurrency=5 --loglevel=info --queues=high-priority
586 |
587 | For more information on the parameters available for configuring celery workers, see the `Celery Worker documentation`_.
588 |
589 |
590 | Queues
591 | ++++++
592 |
593 | As alluded to above, you can configure workers to subscribe to specific queues. This extension will automatically detect queues references in worker configuration, and will set them up for you. With this, there's no need to manually specify ``task_routes``, because tasks within this module can be dynamically sent to specific queues, instead of pre-registered as always needing to execute on a specific queue.
594 |
595 | For example, to configure your application with two workers that execute from two different queues, use the following configuration:
596 |
597 | .. code-block:: python
598 |
599 | class Config:
600 | CELERY_WORKERS = {
601 | # worker for priority items
602 | 'foo': {
603 | 'queues': ['low-priority', 'high-priority']
604 | },
605 |
606 | # worker for high-priority tasks only
607 | 'bar': {
608 | 'queues': ['high-priority']
609 | }
610 |
611 | # worker for any task
612 | 'baz': {}
613 | }
614 |
615 | Once the queues have been defined for workers, you can submit a task to a specific queue use the following syntax with ``submit()``:
616 |
617 | .. code-block:: python
618 |
619 | # submit to default queue
620 | >>> celery.submit(add, 1, 2)
621 |
622 | # submit to high priority queue
623 | >>> celery.submit(add, 1, 2, queue='high-priority')
624 |
625 | With this syntax, the ``queue`` keyword will be reserved on function calls. Accordingly, developers should be careful not to use that argument for functions that can be submitted to an executor.
626 |
627 |
628 | Inspection
629 | ----------
630 |
631 | This extension also provides various utilities for `inspecting `_ the state of submitted tasks and general stats about workers. These utilities are all available on the extension object once the application has been registered and workers started.
632 |
633 | .. code-block:: python
634 |
635 | # ping workers
636 | >>> celery.inspect.ping()
637 |
638 | # inspect active tasks
639 | >>> celery.inspect.active()
640 |
641 | # inspect scheduled tasks
642 | >>> celery.inspect.scheduled()
643 |
644 | # inspect reserved tasks
645 | >>> celery.inspect.reserved()
646 |
647 | # inspect revoked tasks
648 | >>> celery.inspect.revoked()
649 |
650 | # inspect registered tasks
651 | >>> celery.inspect.registered()
652 |
653 | # inspect worker stats
654 | >>> celery.inspect.stats()
655 |
656 |
657 | Note that all of this inspection information is also available via the ``Flower`` monitoring tool.
658 |
659 |
660 | Control
661 | -------
662 |
663 | Similarly to the `Inspection`_ tool, the extension provides a proxy for `controlling `_ celery directly.
664 |
665 | .. code-block:: python
666 |
667 | # shutdown all workers
668 | >>> celery.control.shutdown()
669 |
670 | # restart worker pool
671 | >>> celery.control.pool_restart()
672 |
673 | # shrink worker pool by 1
674 | >>> celery.control.pool_shrink(1)
675 |
676 | # expand worker pool by 1
677 | >>> celery.control.pool_grow(1)
678 |
679 | # manage autoscale settings
680 | >>> celery.control.autoscale(1, 5)
681 |
682 |
683 | Configuration
684 | -------------
685 |
686 | The majority of customizations for this plugin happen via configuration, and this section covers the various types of customizations available. Alongside new configuration options for this plugin, any celery configuration options (prefixed with ``CELERY*``) specified in your application config to the celery application. For a list of available configuration options, see the `Celery Config documentation`_.
687 |
688 |
689 | Plugin Configuration
690 | ++++++++++++++++++++
691 |
692 | New celery configuration keys that specifically change the features of this plugin (not celery) include:
693 |
694 | .. tabularcolumns:: |p{6.5cm}|p{10cm}|
695 |
696 | ================================== =========================================
697 | ``PLUGIN_DEFAULT_VARIABLE`` A variable used in the plugin for
698 | something important.
699 |
700 | ``CELERY_BROKER_URL`` The URL to use for the celery broker backend.
701 | Defaults to redis://localhost:6379.
702 |
703 | ``CELERY_WORKERS`` A number of workers, list of worker names, or
704 | dicationary options with worker names and
705 | configuration options. Defaults to ``1``
706 |
707 | ``CELERY_SANITIZE_ARGUMENTS`` Whether or not to automatically attempt to evaluate
708 | proxied inputs and re-query database models by
709 | ``id`` property. This is useful if you wish to pass
710 | database Models or proxy objects to functions running
711 | on remote executors. This can be turned off if you're
712 | not passing complex objects via task functions.
713 |
714 | ``CELERY_START_LOCAL_WORKERS`` Whether or not to automatically start workers
715 | locally whenever a ``celery.submit()`` call is
716 | made. Defaults to ``True``, and should be set
717 | to ``False`` in production.
718 |
719 | ``CELERY_START_TIMEOUT`` How long to wait for starting local workers
720 | before timing out and throwing an error. Defaults
721 | to ``10`` seconds and can be increased if many
722 | local workers will be started by this plugin.
723 |
724 | ``CELERY_LOG_LEVEL`` The default log level to use across celery services
725 | started by this application.
726 |
727 | ``CELERY_LOG_DIR`` A directory where all celery logs will be stored.
728 | The default for this option is the current directory
729 | where the application is run.
730 |
731 | ``CELERY_FLOWER`` Whether or not to start the flower monitoring tool
732 | alongside local workers. Default is ``True``, and
733 | the plugin assumes ``flower`` has been installed.
734 | Should be set to ``False`` in production.
735 |
736 | ``CELERY_FLOWER_PORT`` If flower is configured to run locally, the port
737 | it will run on. Default is ``5555``
738 |
739 | ``CELERY_FLOWER_ADDRESS`` If flower is configured to run locally the address
740 | flower will run on. Default is ``'127.0.0.1'``.
741 |
742 | ``CELERY_SCHEDULER`` Whether or not to start the celerybeat scheduler tool
743 | alongside local workers. Default is ``True``, and
744 | should be set to ``False`` in production.
745 |
746 | ================================== =========================================
747 |
748 |
749 | Default Overrides
750 | +++++++++++++++++
751 |
752 | Existing celery configuration options that have been overridden by this plugin to accommodate various plugin features include:
753 |
754 | ================================== =========================================
755 | ``CELERY_RESULT_SERIALIZER`` The celery result serialization format. To enable
756 | dynamic submission of celery tasks, this plugin
757 | has set the option to ``'pickle'``.
758 |
759 | ``CELERY_ACCEPT_CONTENT`` A white-list of content-types/serializers to allow.
760 | To enable dynamic submission of celery tasks, this plugin
761 | has set the option to ``['json', 'pickle']``.
762 |
763 | ``CELERY_TASK_SERIALIZER`` A string identifying the default serialization method to use.
764 | To enable dynamic submission of celery tasks, this plugin
765 | has set the option to ``'pickle'``.
766 |
767 | ================================== =========================================
768 |
769 |
770 | Other Customizations
771 | ++++++++++++++++++++
772 |
773 | In addition to configuration options, this plugin can be customized with specific triggers. The following detail what can be customized:
774 |
775 | * ``base_task`` - Base task object to use when creating celery tasks.
776 |
777 | The code below details how you can override all of these configuration options:
778 |
779 | .. code-block:: python
780 |
781 | from flask import Flask
782 | from flask_execute import Celery
783 | from celery import Task
784 |
785 | class MyBaseTask(Task):
786 | queue = 'hipri'
787 |
788 | app = Flask(__name__)
789 | celery = Celery(base_task=MyBaseTask)
790 | celery.init_app(app)
791 |
792 |
793 | For even more in-depth information on the module and the tools it provides, see the `API <./api.html>`_ section of the documentation.
794 |
795 |
796 | Troubleshooting
797 | ---------------
798 |
799 | Below is an evolving list of issues that developers may encounter when trying to set up the plugin. This list will grow and shrink throughout the lifecycle of this plugin. If you run into a new issue that you think should be added to this list, please file a ticket on the GitHub page for the project.
800 |
801 | 1. ``future.result()`` with timeout never returns and worker logs aren't available or showing changes:
802 |
803 | Celery workers likely can't connect to redis. Run ```flask celery worker``` to debug the connection. See the `Prerequisites`_ section for information on installing and running redis locally.
804 |
--------------------------------------------------------------------------------
/flask_execute/__init__.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 | __pkg__ = 'Flask-Execute'
4 | __url__ = 'https://github.com/bprinty/Flask-Execute'
5 | __info__ = 'Simple Celery integration for Flask applications.'
6 | __author__ = 'Blake Printy'
7 | __email__ = 'bprinty@gmail.com'
8 | __version__ = '0.1.6'
9 |
10 |
11 | from .plugin import Celery ## noqa
12 | from .plugin import current_task ## noqa
13 |
--------------------------------------------------------------------------------
/flask_execute/cli.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | #
3 | # Plugin Setup
4 | #
5 | # ------------------------------------------------
6 |
7 |
8 | # imports
9 | # -------
10 | import re
11 | import os
12 | import sys
13 | import click
14 | import subprocess
15 | from flask import current_app
16 | from flask.cli import with_appcontext
17 |
18 |
19 | # helpers
20 | # -------
21 | class cli:
22 | """
23 | Data structure for wrapping celery internal celery commands
24 | executed throughout the plugin.
25 | """
26 |
27 | @classmethod
28 | def popen(cls, cmd, stderr=subprocess.PIPE, stdout=subprocess.PIPE):
29 | """
30 | Run subprocess.popen for executing celery command in background.
31 | """
32 | args = 'celery -A flask_execute.ext.celery {}'.format(cmd).split(' ')
33 | return subprocess.Popen(args, stderr=stderr, stdout=stdout, env=os.environ.copy())
34 |
35 | @classmethod
36 | def call(cls, cmd, stderr=None, stdout=None):
37 | """
38 | Run subprocess.call for executing celery command.
39 | """
40 | args = 'celery -A flask_execute.ext.celery {}'.format(cmd).split(' ')
41 | return subprocess.call(args, stderr=stderr, stdout=stdout, env=os.environ.copy())
42 |
43 | @classmethod
44 | def output(cls, cmd, stderr=subprocess.STDOUT, **kwargs):
45 | """
46 | Run subprocess.check_output for command.
47 | """
48 | args = 'celery -A flask_execute.ext.celery {}'.format(cmd).split(' ')
49 | return subprocess.check_output(args, stderr=stderr, env=os.environ.copy(), **kwargs).decode('utf-8')
50 |
51 |
52 | # entry points
53 | # ------------
54 | @click.command('celery', context_settings=dict(
55 | ignore_unknown_options=True,
56 | ))
57 | @click.option('-h', '--help', is_flag=True, help='Returns celery cli help.')
58 | @click.argument('args', nargs=-1, type=click.UNPROCESSED)
59 | @with_appcontext
60 | def entrypoint(help, args):
61 | """
62 | Run celery command, wrapping application context and references.
63 |
64 | Examples:
65 |
66 | .. code-block:: python
67 |
68 | # start local worker
69 | ~$ flask celery worker
70 |
71 | # start flower monitoring tool
72 | ~$ flask celery flower
73 |
74 | # inspect worker stats
75 | ~$ flask celery inspect stats
76 |
77 | For more information on the commands available, see the celery
78 | documentation. You can also use ``-h`` to see the celery cli documentation:
79 |
80 | .. code-block:: python
81 |
82 | ~$ flask celery -h
83 |
84 | Along with celery commands, this CLI method also adds the
85 | ``cluster`` entrypoint for starting all workers associated
86 | with an application, alongside the ``flower`` monitoring tool.
87 |
88 | Example:
89 |
90 | # start workers (writing to worker logs), and flower (stream output)
91 | ~$ flask celery cluster
92 |
93 | # start workers, flower, and stream output from all (-f)
94 | ~$ flask celery cluster --foreground
95 |
96 | # start workers and stream output to foreground (no flower)
97 | ~$ flask celery cluster --foreground --no-flower
98 |
99 | To change the nubmer of workers bootstrapped by this command,
100 | see the ``CELERY_WORKERS`` configuration option with this plugin.
101 | """
102 | # add config options
103 | if 'worker' in args:
104 | args = list(args)
105 | args.append('--loglevel={}'.format(current_app.config['CELERY_LOG_LEVEL']))
106 |
107 | # dispatch additional entry point
108 | if 'cluster' in args:
109 | if help:
110 | message = re.sub('\n\s+', '\n', cluster.__doc__)
111 | sys.stderr.write('\nUsage: celery cluster [options]\n{}\n'.format(message))
112 | sys.exit(1)
113 | return cluster(args)
114 |
115 | # call command with arguments
116 | help = ' --help' if help else ''
117 | return cli.call(' '.join(args) + help)
118 |
119 |
120 | def cluster(args):
121 | """
122 | Start local cluster of celery workers, celerybeat monitor,
123 | and flower monitoring tool (if specified in configuration).
124 | See documentation for information on configuring Flower
125 | and the Celerybeat scheduler.
126 | """
127 | celery = current_app.extensions['celery']
128 |
129 | # starting configured celery workers
130 | celery.start()
131 |
132 | # check available processes
133 | if not len(celery.logs):
134 | sys.stderr.write('\nCelery cluster could not be started - workers already running or error starting workers. See worker logs for details\n')
135 | return
136 |
137 | # tail logs
138 | proc = subprocess.Popen(['tail', '-F'] + celery.logs, stdout=subprocess.PIPE)
139 | celery.processes['tail'] = proc
140 | while True:
141 | for line in iter(proc.stdout.readline, b''):
142 | sys.stderr.write(line.decode('utf-8'))
143 | return
144 |
--------------------------------------------------------------------------------
/flask_execute/ext.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | #
3 | # Module for spinning up application context
4 | # with celery worker, using Flask's auto-detection
5 | # functionality.
6 | #
7 | # ------------------------------------------------
8 |
9 | from flask.cli import ScriptInfo
10 | info = ScriptInfo()
11 | app = info.load_app()
12 |
13 | if not hasattr(app, 'extensions') or \
14 | 'celery' not in app.extensions:
15 | raise AssertionError(
16 | 'Celery plugin has not been registered with application. '
17 | 'Please see documentation for how to configure this extension.'
18 | )
19 |
20 | plugin = app.extensions['celery']
21 | celery = plugin.controller
22 |
--------------------------------------------------------------------------------
/flask_execute/futures.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | #
3 | # Objects for managing futures and pools of
4 | # future objects.
5 | #
6 | # ------------------------------------------------
7 |
8 |
9 | # imports
10 | # -------
11 | from celery.result import ResultSet
12 |
13 |
14 | # classes
15 | # -------
16 | class Future(object):
17 | """
18 | Wrapper around celery.AsyncResult to provide an API similar
19 | to the ``concurrent.futures`` API.
20 |
21 | Arguments:
22 | result (AsyncResult): ``celery.result.AsyncResult`` object
23 | containing task information and celery context.
24 | """
25 |
26 | def __init__(self, result):
27 | self.__proxy__ = result
28 | self.id = self.__proxy__.id
29 | return
30 |
31 | def __getattr__(self, key):
32 | return getattr(self.__proxy__, key)
33 |
34 | def result(self, timeout=None):
35 | """
36 | Wait for task to finish and return result.
37 |
38 | Arguments:
39 | timeout (int): Number of seconds to wait for
40 | result to finish before timing out and raising
41 | an error.
42 | """
43 | self.__proxy__.wait(timeout=timeout)
44 | return self.__proxy__.result
45 |
46 | def cancel(self, *args, **kwargs):
47 | """
48 | Attempt to cancel the call. If the call is currently
49 | being executed or finished running and cannot be cancelled
50 | then the method will return False, otherwise the call will
51 | be cancelled and the method will return True.
52 |
53 | Arguments and keyword arguments passed to this function will
54 | be passed into the internal AsyncResult.revoke() method.
55 | """
56 | if self.__proxy__.state in ['STARTED', 'FAILURE', 'SUCCESS', 'REVOKED']:
57 | return False
58 | kwargs.setdefault('terminate', True)
59 | kwargs.setdefault('wait', True)
60 | kwargs.setdefault('timeout', 1 if kwargs['wait'] else None)
61 | self.__proxy__.revoke(*args, **kwargs)
62 | return True
63 |
64 | def cancelled(self):
65 | """
66 | Return ``True`` if the call was successfully cancelled.
67 | """
68 | return self.__proxy__.state == 'REVOKED'
69 |
70 | def running(self):
71 | """
72 | Return ``True`` if the call is currently being
73 | executed and cannot be cancelled.
74 | """
75 | return self.__proxy__.state in ['STARTED', 'PENDING']
76 |
77 | def done(self):
78 | """
79 | Return True if the call was successfully cancelled
80 | or finished running.
81 | """
82 | return self.__proxy__.state in ['FAILURE', 'SUCCESS', 'REVOKED']
83 |
84 | def exception(self, timeout=None):
85 | """
86 | Return the exception raised by the call. If the call hasn’t yet
87 | completed then this method will wait up to ``timeout`` seconds. If the
88 | call hasn’t completed in ``timeout`` seconds. If the call completed
89 | without raising, None is returned.
90 |
91 | Arguments:
92 | timeout (int): Number of seconds to wait for
93 | result to finish before timing out and raising
94 | an error.
95 | """
96 | try:
97 | self.__proxy__.wait(timeout=timeout)
98 | except Exception as exe:
99 | return exe
100 | return
101 |
102 | def add_done_callback(self, fn):
103 | """
104 | Attaches the callable fn to the future. fn will be called, with
105 | the task as its only argument, when the future is cancelled
106 | or finishes running.
107 |
108 | Arguments:
109 | fn (callable): Callable object to issue after task has
110 | finished executing.
111 | """
112 | self.__proxy__.then(fn)
113 | return self
114 |
115 |
116 | class FuturePool(object):
117 | """
118 | Class for managing pool of futures for grouped operations.
119 |
120 | Arguments:
121 | futures (list, tuple): Iterable of ``celery.result.AsyncResult``
122 | objects to manage as a group of tasks.
123 | """
124 |
125 | def __init__(self, futures):
126 | self.futures = futures
127 | self.__proxy__ = ResultSet([future.__proxy__ for future in futures])
128 | return
129 |
130 | def __getattr__(self, key):
131 | return getattr(self.__proxy__, key)
132 |
133 | def __iter__(self):
134 | for future in self.futures:
135 | yield future
136 | return
137 |
138 | def __len__(self):
139 | return len(self.futures)
140 |
141 | @property
142 | def status(self):
143 | """
144 | Return status of future pool.
145 | """
146 | statuses = [future.status for future in self]
147 | for stat in ['PENDING', 'STARTED', 'RETRY', 'REVOKED', 'FAILURE', 'SUCCESS']:
148 | if stat in statuses:
149 | return stat
150 | return 'SUCCESS'
151 |
152 | @property
153 | def state(self):
154 | """
155 | Return state of future pool.
156 | """
157 | return self.status
158 |
159 | def add(self, future):
160 | """
161 | Add future object to pool.
162 |
163 | Arguments:
164 | future (Future): Future object to add to pool.
165 | """
166 | if not isinstance(future, Future):
167 | raise AssertionError('No rule for adding {} type to FuturePool.'.format(type(future)))
168 | self.futures.append(future)
169 | return
170 |
171 | def result(self, timeout=0):
172 | """
173 | Wait for entire future pool to finish and return result.
174 |
175 | Arguments:
176 | timeout (int): Number of seconds to wait for
177 | result to finish before timing out and raising
178 | an error.
179 | """
180 | return [
181 | future.result(timeout=timeout)
182 | for future in self.futures
183 | ]
184 |
185 | def cancel(self, *args, **kwargs):
186 | """
187 | Cancel all running tasks in future pool. Return value will be
188 | ``True`` if *all* tasks were successfully cancelled and ``False``
189 | if *any* tasks in the pool were running or done at the time of
190 | cancellation.
191 |
192 | Arguments and keyword arguments passed to this function will
193 | be passed into the internal AsyncResult.revoke() method.
194 | """
195 | result = True
196 | for future in self.futures:
197 | result &= future.cancel(*args, **kwargs)
198 | return result
199 |
200 | def cancelled(self):
201 | """
202 | Return ``True`` if any tasks were successfully cancelled.
203 | """
204 | for future in self.futures:
205 | if future.cancelled():
206 | return True
207 | return False
208 |
209 | def running(self):
210 | """
211 | Return boolean describing if *any* tasks in future pool
212 | are still running.
213 | """
214 | for future in self.futures:
215 | if future.running():
216 | return True
217 | return False
218 |
219 | def done(self):
220 | """
221 | Return boolean describing if *all* tasks in future pool
222 | are either finished or have been revoked.
223 | """
224 | for future in self.futures:
225 | if not future.done():
226 | return False
227 | return True
228 |
229 | def exception(self):
230 | """
231 | Return exception(s) thrown by task, if any were
232 | thrown during execution.
233 | """
234 | return [
235 | future.exception()
236 | for future in self.futures
237 | ]
238 |
239 | def add_done_callback(self, fn):
240 | """
241 | Attaches the callable fn to the future pool. fn will be
242 | called, with the task as its only argument, when the
243 | future is cancelled or finishes running.
244 |
245 | Arguments:
246 | fn (callable): Callable object to issue after task has
247 | finished executing.
248 | """
249 | self.__proxy__.then(fn)
250 | return self
251 |
--------------------------------------------------------------------------------
/flask_execute/managers.py:
--------------------------------------------------------------------------------
1 | #
2 | # All manager objects for complex operations provided
3 | # by plugin properties.
4 | #
5 | # ------------------------------------------------
6 |
7 |
8 | # imports
9 | # -------
10 | import re
11 | import sys
12 | import json
13 | import subprocess
14 | from celery.schedules import crontab
15 |
16 | from .cli import cli
17 |
18 |
19 | # classes
20 | # -------
21 | class TaskManager(object):
22 | """
23 | Object for managing registered celery tasks, providing
24 | users a way of submitting tasks via the celery API when
25 | using the factory pattern for configuring a flask application.
26 |
27 | This proxy for the @celery.task decorator is designed to
28 | manage two things:
29 |
30 | 1. For applications set up with the flask application directly,
31 | register tasks with the celery application directly. This
32 | has the same effect as the original mechanism for configuring
33 | celery alongside a Flask application.
34 |
35 | 2. For applications set up using the factory pattern,
36 | store all registered tasks internally so they can be
37 | registered with the celery application once the plugin
38 | as been initialized with a flask application instance.
39 | """
40 |
41 | def __init__(self):
42 | self.__app__ = None
43 | self.__registered__ = {}
44 | self.__tasks__ = {}
45 | self.__funcs__ = {}
46 | return
47 |
48 | def __call__(self, *args, **kwargs):
49 | """
50 | Internal decorator logic for ``celery.task``.
51 | """
52 | # plugin hasn't been initialized
53 | if self.__app__ is None:
54 | def _(func):
55 | self.__registered__[func.__name__] = {'func': func, 'args': args, 'kwargs': kwargs}
56 | return func
57 |
58 | # plugin has been initialized
59 | else:
60 | def _(func):
61 | func = self.__app__.task(*args, **kwargs)(func)
62 | if func.name not in self.__tasks__:
63 | self.__tasks__[func.name] = func
64 | self.__funcs__[func.__name__] = func
65 | return func
66 |
67 | # return decorated function if called directly
68 | if len(args) == 1 and len(kwargs) == 0 and callable(args[0]):
69 | func, args = args[0], args[1:]
70 | return _(func)
71 |
72 | # return inner decorator
73 | else:
74 | return _
75 |
76 | def __getattr__(self, key):
77 | if key not in self.__tasks__:
78 | if key not in self.__funcs__:
79 | raise AttributeError('Task ``{}`` has not been registered'.format(key))
80 | return self.__funcs__[key]
81 | return self.__tasks__[key]
82 |
83 | def __getitem__(self, key):
84 | return self.__getattr__(key)
85 |
86 | def init_celery(self, controller):
87 | """
88 | Initialize the task manager with a celery controller. This
89 | will register all decorated tasks with the specified
90 | ``controller`` (celery application).
91 |
92 | Args:
93 | controller (Celery): Celery application instance to
94 | register tasks for.
95 | """
96 | self.__app__ = controller
97 | for key, item in self.__registered__.items():
98 | if not len(item['args']) and not len(item['kwargs']):
99 | self(item['func'])
100 | else:
101 | self(*item['args'], **item['kwargs'])(item['func'])
102 | return
103 |
104 |
105 | class ScheduleManager(object):
106 | """
107 | Object for managing scheduled celery tasks, providing
108 | users a way of scheduling tasks via the celery API when
109 | using the factory pattern for configuring a flask application.
110 |
111 | This proxy for the @celery.task decorator is designed to
112 | manage two things:
113 |
114 | 1. For applications set up with the flask application directly,
115 | schedule tasks with the celery application directly. This
116 | has the same effect as the original mechanism for configuring
117 | celery alongside a Flask application.
118 |
119 | 2. For applications set up using the factory pattern,
120 | store all scheduled tasks internally so they can be
121 | registered with the celery application once the plugin
122 | as been initialized with a flask application instance.
123 | """
124 |
125 | def __init__(self):
126 | self.__app__ = None
127 | self.__registered__ = {}
128 | self.__tasks__ = {}
129 | self.__funcs__ = {}
130 | return
131 |
132 | def __call__(self, schedule=None, name=None, args=tuple(), kwargs=dict(), options=dict(), **skwargs):
133 | """
134 | Internal decorator logic for ``celery.schedule``.
135 | """
136 | # handle ambiguous schedule input
137 | if schedule is not None and len(skwargs):
138 | raise AssertionError(
139 | 'Invalid schedule arguments - please see documentation for '
140 | 'how to use @celery.schedule'
141 | )
142 |
143 | # handle crontab input
144 | if schedule is None and len(skwargs):
145 | schedule = crontab(**skwargs)
146 |
147 | # handle missing schedule input
148 | if schedule is None:
149 | raise AssertionError('Schedule for periodic task must be defined, either via numeric arguments or crontab keywords. See documentation for details.')
150 |
151 | # plugin hasn't been initialized
152 | if self.__app__ is None:
153 | def _(func):
154 | key = name or func.__module__ + '.' + func.__name__
155 | self.__registered__[key] = {
156 | 'func': func,
157 | 'schedule': schedule,
158 | 'args': args,
159 | 'kwargs': kwargs,
160 | 'options': options,
161 | }
162 | return func
163 |
164 | # plugin has been initialized
165 | else:
166 | def _(func):
167 | if not hasattr(func, 'name'):
168 | func = self.__app__.task(func)
169 |
170 | # add schedule to beat manager
171 | self.__app__.conf['CELERYBEAT_SCHEDULE'][name] = {
172 | 'task': func.name,
173 | 'schedule': schedule,
174 | 'args': args,
175 | 'kwargs': kwargs,
176 | 'options': options
177 | }
178 |
179 | # save in scheduled registry
180 | if func.name not in self.__tasks__:
181 | self.__tasks__[func.name] = func
182 | self.__funcs__[func.__name__] = func
183 |
184 | return func
185 |
186 | # return inner decorator
187 | return _
188 |
189 | def __getattr__(self, key):
190 | if key not in self.__tasks__:
191 | if key not in self.__funcs__:
192 | raise AttributeError('Task {} has not been registered'.format(key))
193 | return self.__funcs__[key]
194 | return self.__tasks__[key]
195 |
196 | def __getitem__(self, key):
197 | return self.__getattr__(key)
198 |
199 | def init_celery(self, controller):
200 | """
201 | Initialize the task manager with a celery controller. This
202 | will register all decorated tasks with the specified
203 | ``controller`` (celery application).
204 |
205 | Args:
206 | controller (Celery): Celery application instance to
207 | register tasks for.
208 | """
209 | self.__app__ = controller
210 | for key, item in self.__registered__.items():
211 | self(
212 | schedule=item['schedule'],
213 | args=item['args'],
214 | kwargs=item['kwargs'],
215 | options=item['options'],
216 | name=key,
217 | )(item['func'])
218 | return
219 |
220 |
221 | class CommandManager(object):
222 | """
223 | Manager for issuing celery ``inspect`` or ``control`` calls
224 | to the celery API.
225 |
226 | Example:
227 |
228 | .. code-block:: python
229 |
230 | >>> inspect = CommandManager('inspect')
231 |
232 | # no options
233 | >>> inspect.active()
234 |
235 | # options
236 | >>> inspect.active(timeout=5, destination=['w1@e.com', 'w2@e.com'])
237 |
238 | This tool is primarily used alongside the ``Celery`` plugin
239 | object, allowing developers to issue celery commands via
240 | property.
241 |
242 | Examples:
243 |
244 | .. code-block:: python
245 |
246 | >>> celery = Celery(app)
247 |
248 | # ``inspect`` command manager.
249 | >>> celery.inspect.ping()
250 | {'worker@localhost': {'ok': 'pong'}}
251 |
252 | # ``control`` command manager.
253 | >>> celery.control.pool_shrink(1)
254 | {'worker@localhost': {'ok': 'pool will shrink'}}
255 | >>> celery.control.shutdown()
256 | Shutdown signal sent to workers.
257 |
258 | Use ``celery.inspect.help()`` and ``celery.control.help()`` to see
259 | available celery commands.
260 | """
261 |
262 | def __init__(self, name):
263 | self.name = name
264 | return
265 |
266 | def __getattr__(self, key):
267 | def _(*args, **kwargs):
268 | return self.call(self.name + ' ' + key, *args, **kwargs)
269 | return _
270 |
271 | def __getitem__(self, key):
272 | return self.__getattr__(key)
273 |
274 | def help(self):
275 | """
276 | Return help message for specific command.
277 | """
278 | output = cli.output(self.name + ' --help', stderr=None)
279 | sys.stderr.write('\n>>> celery.' + self.name + '.command()\n\n')
280 | sys.stderr.write('Issue celery command to {} workers.\n\n'.format(self.name))
281 | sys.stderr.write('Commands:')
282 | for line in output.split('\n'):
283 | if line and line[0] == '|':
284 | sys.stderr.write(line + '\n')
285 | return
286 |
287 | def call(self, cmd, timeout=None, destination=None, quiet=False):
288 | """
289 | Issue celery subcommand and return output.
290 |
291 | Args:
292 | cmd (str): Command to call.
293 | timeout (float): Timeout in seconds (float) waiting for reply.
294 | destination (str, list): List of destination node names.
295 | """
296 | cmd += ' --json'
297 |
298 | # parse timeout
299 | if timeout is not None:
300 | cmd += ' --timeout={}'.format(timeout)
301 |
302 | # parse destination
303 | if destination is not None:
304 | if isinstance(destination, str):
305 | destination = destination.split(',')
306 | cmd += ' --destination={}'.format(','.join(destination))
307 |
308 | # make call accounting for forced error
309 | try:
310 | output = cli.output(cmd)
311 | except subprocess.CalledProcessError as err:
312 | if not quiet:
313 | if 'shutdown' in cmd:
314 | print('Shutdown signal sent to workers.')
315 | else:
316 | print(err.stdout.decode('utf-8'))
317 | return
318 |
319 | # make call and parse result
320 | output = output.split('\n')[0]
321 | try:
322 | data = json.loads(output)
323 | except json.JSONDecodeError:
324 | data = {}
325 | if isinstance(data, (list, tuple)):
326 | result = {}
327 | for item in data:
328 | result.update(item)
329 | data = result
330 | return data
331 |
--------------------------------------------------------------------------------
/flask_execute/plugin.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | #
3 | # Plugin Setup
4 | #
5 | # ------------------------------------------------
6 |
7 |
8 | # imports
9 | # -------
10 | import os
11 | import atexit
12 | import subprocess
13 | from datetime import datetime
14 | from flask import g, current_app
15 | from werkzeug.local import LocalProxy
16 | from celery import Celery as CeleryFactory
17 |
18 | from .cli import cli, entrypoint
19 | from .futures import Future, FuturePool
20 | from .managers import CommandManager, TaskManager, ScheduleManager
21 |
22 |
23 | # config
24 | # ------
25 | PROCESSES = {}
26 |
27 |
28 | # proxies
29 | # -------
30 | def get_current_db():
31 | if hasattr(current_app, 'extensions') and \
32 | 'sqlalchemy' in current_app.extensions:
33 | return current_app.extensions['sqlalchemy'].db
34 | else:
35 | return None
36 |
37 |
38 | def get_current_task():
39 | """
40 | Local proxy getter for managing current task and
41 | associated operations (i.e. state management).
42 | """
43 | if 'task' not in g:
44 | return None
45 | else:
46 | return g.task
47 |
48 |
49 | current_db = LocalProxy(get_current_db)
50 | current_task = LocalProxy(get_current_task)
51 |
52 |
53 | # helpers
54 | # -------
55 | def dispatch(func, *args, **kwargs):
56 | """
57 | Dynamic abstracted task for pre-registration of
58 | celery tasks.
59 |
60 | Arguments:
61 | func (callable): Function to deserialize and call.
62 | *args (list, tuple): Arguments to pass to function.
63 | **kwargs (dict): Keyword arguments to pass to function.
64 | """
65 | return func(*args, **kwargs)
66 |
67 |
68 | @atexit.register
69 | def stop_processes(timeout=None):
70 | """
71 | Clean all processes spawned by this plugin.
72 |
73 | timeout (int): Timeout to wait for processes to join
74 | after termination signal is sent.
75 | """
76 | global PROCESSES
77 | for key in PROCESSES:
78 | PROCESSES[key].terminate()
79 | PROCESSES[key].wait(timeout=timeout)
80 | return
81 |
82 |
83 | def requery(args, kwargs):
84 | """
85 | Re-query database models passed into function to enable
86 | safer threaded operations. This is typically reserved
87 | for api methods that submit asyncronous tasks to a separate
88 | executor that uses a different database session.
89 | """
90 | # get local db proxy
91 | db = current_db
92 |
93 | # re-query processor
94 | def process(obj):
95 | if db is not None and hasattr(db, 'Model'):
96 | if isinstance(obj, db.Model):
97 | if hasattr(obj, 'id'):
98 | obj = obj.__class__.query.filter_by(id=obj.id).first()
99 | return obj
100 |
101 | # arg processing
102 | args = list(args)
103 | for idx, arg in enumerate(args):
104 | args[idx] = process(arg)
105 |
106 | # kwarg processing
107 | for key in kwargs:
108 | kwargs[key] = process(kwargs[key])
109 |
110 | return args, kwargs
111 |
112 |
113 | def deproxy(args, kwargs):
114 | """
115 | Query local objects from proxies passed into function for
116 | safer threaded operations.
117 | """
118 | # deproxy processor
119 | def process(obj):
120 | if isinstance(obj, LocalProxy):
121 | obj = obj._get_current_object()
122 | return obj
123 |
124 | # arg processing
125 | args = list(args)
126 | for idx, arg in enumerate(args):
127 | args[idx] = process(arg)
128 |
129 | # kwarg processing
130 | for key in kwargs:
131 | kwargs[key] = process(kwargs[key])
132 |
133 | return args, kwargs
134 |
135 |
136 | # plugin
137 | # ------
138 | class Celery(object):
139 | """
140 | Plugin for managing celery task execution in Flask.
141 |
142 | Arguments:
143 | app (Flask): Flask application object.
144 | base_task (celery.Task): Celery task object to
145 | use as base task for celery operations.
146 | """
147 |
148 | def __init__(self, app=None, base_task=None):
149 | self._started = False
150 | self.base_task = None
151 | self.logs = []
152 | self.task = TaskManager()
153 | self.schedule = ScheduleManager()
154 | self.inspect = CommandManager('inspect')
155 | self.control = CommandManager('control')
156 | if app is not None:
157 | self.init_app(app)
158 | return
159 |
160 | def init_app(self, app):
161 |
162 | # defaults
163 | self.app = app
164 | self.app.config.setdefault('CELERY_BROKER_URL', 'redis://localhost:6379')
165 | self.app.config.setdefault('CELERY_RESULT_BACKEND', 'redis://')
166 | self.app.config.setdefault('CELERY_WORKERS', 1)
167 | self.app.config.setdefault('CELERY_START_LOCAL_WORKERS', True)
168 | self.app.config.setdefault('CELERY_START_TIMEOUT', 10)
169 | self.app.config.setdefault('CELERY_ACCEPT_CONTENT', ['json', 'pickle'])
170 | self.app.config.setdefault('CELERY_TASK_SERIALIZER', 'pickle')
171 | self.app.config.setdefault('CELERY_RESULT_SERIALIZER', 'pickle')
172 | self.app.config.setdefault('CELERY_SANITIZE_ARGUMENTS', True)
173 | self.app.config.setdefault('CELERY_ALWAYS_EAGER', False)
174 | self.app.config.setdefault('CELERY_LOG_LEVEL', 'info')
175 | self.app.config.setdefault('CELERY_LOG_DIR', os.getcwd())
176 | self.app.config.setdefault('CELERY_FLOWER', True)
177 | self.app.config.setdefault('CELERY_FLOWER_PORT', 5555)
178 | self.app.config.setdefault('CELERY_FLOWER_ADDRESS', '127.0.0.1')
179 | self.app.config.setdefault('CELERY_SCHEDULER', True)
180 | self.app.config.setdefault('CELERYBEAT_SCHEDULE', {})
181 |
182 | # set up controller
183 | self.controller = CeleryFactory(
184 | self.app.name,
185 | backend=self.app.config['CELERY_RESULT_BACKEND'],
186 | broker=self.app.config['CELERY_BROKER_URL'],
187 | )
188 | for key in self.app.config:
189 | self.controller.conf[key] = self.app.config[key]
190 | if self.base_task is not None:
191 | self.controller.Task = self.base_task
192 |
193 | # add custom task wrapping app context
194 | class ContextTask(self.controller.Task):
195 | """
196 | Custom celery task object that creates application context
197 | before dispatching celery command.
198 | """
199 | config = self.app.config
200 |
201 | def __call__(self, *args, **kwargs):
202 | # if eager, run without creating new context
203 | if self.config['CELERY_ALWAYS_EAGER']:
204 | g.task = self.request
205 | return self.run(*args, **kwargs)
206 |
207 | # otherwise, create new context and run the command
208 | else:
209 | from flask.cli import ScriptInfo
210 | info = ScriptInfo()
211 | app = info.load_app()
212 | with app.app_context():
213 | g.task = self.request
214 | if self.config['CELERY_SANITIZE_ARGUMENTS']:
215 | args, kwargs = requery(args, kwargs)
216 | return self.run(*args, **kwargs)
217 |
218 | self.controller.Task = ContextTask
219 |
220 | # link celery extension to registered application
221 | if not hasattr(self.app, 'extensions'):
222 | self.app.extensions = {}
223 | self.app.extensions['celery'] = self
224 |
225 | # register dynamic task
226 | self.wrapper = self.controller.task(dispatch)
227 | self.task.init_celery(self.controller)
228 | self.schedule.init_celery(self.controller)
229 |
230 | # register cli entry points
231 | self.app.cli.add_command(entrypoint)
232 | return
233 |
234 | @property
235 | def processes(self):
236 | """
237 | Proxy with list of all subprocesses managed by the plugin.
238 | """
239 | global PROCESSES
240 | return PROCESSES
241 |
242 | def start(self, timeout=None):
243 | """
244 | Start local celery workers specified in config.
245 |
246 | Arguments:
247 | timeout (int): Timeout to wait for processes to start
248 | after process is submitted. ``celery status`` is
249 | used to poll the status of workers.
250 | """
251 | timeout = timeout or self.app.config['CELERY_START_TIMEOUT']
252 | running = self.status()
253 |
254 | # TODO: Add ability to specify worker configuration via nested config option
255 |
256 | # reformat worker specification
257 | worker_args = {}
258 | if isinstance(self.app.config['CELERY_WORKERS'], int):
259 | workers = [
260 | 'worker{}'.format(i + 1)
261 | for i in range(self.app.config['CELERY_WORKERS'])
262 | ]
263 | elif isinstance(self.app.config['CELERY_WORKERS'], (list, tuple)):
264 | workers = self.app.config['CELERY_WORKERS']
265 | elif isinstance(self.app.config['CELERY_WORKERS'], dict):
266 | worker_args = self.app.config['CELERY_WORKERS']
267 | workers = list(worker_args.keys())
268 | else:
269 | raise AssertionError(
270 | 'No rule for processing input type {} for `CELERY_WORKERS` '
271 | 'option.'.format(type(self.app.config['CELERY_WORKERS'])))
272 |
273 | # make sure log directory exists
274 | if not os.path.exists(self.app.config['CELERY_LOG_DIR']):
275 | os.makedirs(self.app.config['CELERY_LOG_DIR'])
276 |
277 | # start flower (if specified)
278 | if self.app.config['CELERY_FLOWER']:
279 | logfile = os.path.join(self.app.config['CELERY_LOG_DIR'], 'flower.log')
280 | self.logs.append(logfile)
281 | self.processes['flower'] = cli.popen(
282 | 'flower --address={} --port={} --logging={} --log-file-prefix={}'.format(
283 | self.app.config['CELERY_FLOWER_ADDRESS'],
284 | self.app.config['CELERY_FLOWER_PORT'],
285 | self.app.config['CELERY_LOG_LEVEL'],
286 | logfile
287 | )
288 | )
289 |
290 | # start celerybeat (if specified and tasks registered)
291 | if self.app.config['CELERY_SCHEDULER'] and len(self.app.config['CELERYBEAT_SCHEDULE']):
292 | logfile = os.path.join(self.app.config['CELERY_LOG_DIR'], 'scheduler.log')
293 | pidfile = os.path.join(self.app.config['CELERY_LOG_DIR'], 'scheduler.pid')
294 | schedule = os.path.join(self.app.config['CELERY_LOG_DIR'], 'scheduler.db')
295 | self.logs.append(logfile)
296 | self.processes['scheduler'] = cli.popen(
297 | 'beat --loglevel={} --logfile={} --pidfile={} --schedule={}'.format(
298 | current_app.config['CELERY_LOG_LEVEL'],
299 | logfile, pidfile, schedule
300 | )
301 | )
302 |
303 | # spawn local workers
304 | for worker in workers:
305 |
306 | # don't start worker if already running
307 | available = False
308 | for name, status in running.items():
309 | if worker + '@' in name:
310 | available = status == 'OK'
311 | break
312 | if available:
313 | continue
314 |
315 | # configure extra arguments
316 | if worker not in worker_args:
317 | worker_args[worker] = {}
318 |
319 | # configure logging
320 | level = self.app.config['CELERY_LOG_LEVEL']
321 | logfile = os.path.join(self.app.config['CELERY_LOG_DIR'], worker + '.log')
322 | self.logs.append(logfile)
323 |
324 | # configure worker arg defaults
325 | worker_args[worker].setdefault('loglevel', level)
326 | worker_args[worker].setdefault('hostname', worker + '@%h')
327 | worker_args[worker].setdefault('logfile', logfile)
328 |
329 | # set up command using worker args
330 | cmd = 'worker'
331 | for key, value in worker_args[worker].items():
332 | cmd += ' --{}={}'.format(key, value)
333 |
334 | # start worker
335 | self.processes[worker] = cli.popen(cmd)
336 |
337 | # wait for workers to start
338 | then, delta = datetime.now(), 0
339 | while delta < timeout:
340 | delta = (datetime.now() - then).seconds
341 | if self.status():
342 | break
343 | if delta >= timeout:
344 | raise AssertionError(
345 | 'Could not connect to celery workers after {} seconds. '
346 | 'See worker logs for details.'.format(timeout)
347 | )
348 | return
349 |
350 | def stop(self, timeout=None):
351 | """
352 | Stop all processes started by this plugin.
353 |
354 | Arguments:
355 | timeout (int): Timeout to wait for processes to join
356 | after termination signal is sent.
357 | """
358 | timeout = timeout or self.app.config['CELERY_START_TIMEOUT']
359 | return stop_processes(timeout=timeout)
360 |
361 | def submit(self, func, *args, **kwargs):
362 | """
363 | Submit function to celery worker for processing.
364 |
365 | Arguments:
366 | queue (str): Name of queue to submit function to.
367 | countdown (int): Number of seconds to wait before
368 | submitting function.
369 | eta (datetime): Datetime object describing when
370 | task should be executed.
371 | retry (bool): Whether or not to retry the task
372 | upon failure.
373 | *args (list): Arguments to function.
374 | **kwargs (dict): Keyword arguments to function.
375 | """
376 | options = {}
377 | for key in ['queue', 'countdown', 'eta', 'retry']:
378 | if key in kwargs:
379 | options[key] = kwargs.pop(key)
380 |
381 | return self.apply(func, args=args, kwargs=kwargs, **options)
382 |
383 | def apply(self, func, args=tuple(), kwargs=dict(), **options):
384 | """
385 | Submit function to celery worker for processing.
386 |
387 | Arguments:
388 | args (list): Arguments to function.
389 | kwargs (dict): Keyword arguments to function.
390 | **options (dict): Arbitrary celery options to pass to
391 | underlying ``apply_async()`` function. See celery
392 | documentation for details.
393 | """
394 | # start celery if first ``submit()`` call.
395 | if not self.app.config['CELERY_ALWAYS_EAGER'] and \
396 | self.app.config['CELERY_START_LOCAL_WORKERS'] and \
397 | not self._started:
398 | self.start()
399 | self._started = True
400 |
401 | # reimport function for serialization if not using flask cli
402 | if '__main__' in func.__module__:
403 | mod = func.__module__.replace('__main__', self.app.name)
404 | app = __import__(mod, fromlist=[func.__name__])
405 | func = getattr(app, func.__name__)
406 |
407 | # submit task and sanitize inputs
408 | args, kwargs = deproxy(args, kwargs)
409 | args.insert(0, func)
410 | return Future(self.wrapper.apply_async(args=args, kwargs=kwargs, **options))
411 |
412 | def map(self, func, *args, **kwargs):
413 | """
414 | Submit function with iterable of arguments to celery and
415 | return ``FuturePool`` object containing all task result
416 | ``Future`` objects.
417 |
418 | Arguments:
419 | queue (str): Name of queue to submit function to.
420 | countdown (int): Number of seconds to wait before
421 | submitting function.
422 | eta (datetime): Datetime object describing when
423 | task should be executed.
424 | retry (bool): Whether or not to retry the task
425 | upon failure.
426 | *args (list): list of arguments to pass to functions.
427 | **kwargs (dict): Keyword arguments to apply for every
428 | function.
429 | """
430 | futures = []
431 | for arg in args:
432 | futures.append(self.submit(func, *arg, **kwargs))
433 | return FuturePool(futures)
434 |
435 | def get(self, ident):
436 | """
437 | Retrieve a Future object for the specified task.
438 |
439 | Arguments:
440 | ident (str): Identifier for task to query.
441 | """
442 | from celery.result import AsyncResult
443 | task = AsyncResult(ident)
444 | return Future(task)
445 |
446 | def status(self, timeout=5):
447 | """
448 | Return status of celery server as dictionary.
449 | """
450 | workers = {}
451 | try:
452 | output = cli.output('status', timeout=timeout)
453 | for stat in output.split('\n'):
454 | if '@' in stat:
455 | worker, health = stat.split(': ')
456 | workers[worker] = health
457 | except subprocess.CalledProcessError:
458 | pass
459 | return workers
460 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | Flask
2 | celery
3 | flower
4 | redis
5 |
--------------------------------------------------------------------------------
/setup.cfg:
--------------------------------------------------------------------------------
1 | [wheel]
2 | universal = 1
3 |
4 | [nosetests]
5 | verbosity=2
6 | nocapture=1
7 |
8 | [flake8]
9 | max-complexity=50
10 | max-line-length=175
11 | exclude=.git,tests,.venv,build
12 |
13 | [aliases]
14 | test = pytest
15 |
16 | [tool:pytest]
17 | addopts = --cov-report term --cov=flask_execute -v -s --ignore=tests/isolated
18 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 | #
4 | # Package setup
5 | #
6 | # ------------------------------------------------
7 |
8 |
9 | # imports
10 | # -------
11 | import re
12 | try:
13 | from setuptools import setup, find_packages
14 | except ImportError:
15 | from distutils.core import setup, find_packages
16 |
17 |
18 | # config
19 | # ------
20 | class Config:
21 | def __init__(self, fi):
22 | with open(fi) as meta:
23 | for m in re.findall(r'(__[a-z]+__).*=.*[\'"](.+)[\'"]', meta.read()):
24 | setattr(self, m[0], m[1])
25 | return
26 |
27 |
28 | config = Config('flask_execute/__init__.py')
29 |
30 |
31 | # requirements
32 | # ------------
33 | with open('requirements.txt', 'r') as reqs:
34 | requirements = list(map(lambda x: x.rstrip(), reqs.readlines()))
35 |
36 | test_requirements = [
37 | 'pytest',
38 | 'pytest-cov',
39 | 'pytest-runner'
40 | ]
41 |
42 |
43 | # readme
44 | # ------
45 | with open('README.rst') as readme_file:
46 | readme = readme_file.read()
47 |
48 |
49 | # exec
50 | # ----
51 | setup(
52 | name=config.__pkg__,
53 | version=config.__version__,
54 | description=config.__info__,
55 | long_description=readme,
56 | author=config.__author__,
57 | author_email=config.__email__,
58 | url=config.__url__,
59 | packages=find_packages(exclude=['tests']),
60 | license="MIT",
61 | zip_safe=False,
62 | include_package_data=True,
63 | platforms="any",
64 | install_requires=requirements,
65 | keywords=[
66 | config.__pkg__.lower(), 'flask', 'celery', 'executor', 'execute',
67 | 'dispatch', 'dask', 'distributed', 'futures', 'concurrent.futures',
68 | 'remote', 'executor', 'flask-celery', 'task', 'queue'
69 | ],
70 | classifiers=[
71 | 'Environment :: Web Environment',
72 | 'Intended Audience :: Developers',
73 | 'License :: OSI Approved :: MIT License',
74 | 'Operating System :: OS Independent',
75 | 'Programming Language :: Python',
76 | 'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
77 | 'Topic :: Software Development :: Libraries :: Python Modules',
78 | 'Natural Language :: English',
79 | "Programming Language :: Python :: 2",
80 | 'Programming Language :: Python :: 2.6',
81 | 'Programming Language :: Python :: 2.7',
82 | 'Programming Language :: Python :: 3',
83 | 'Programming Language :: Python :: 3.3',
84 | 'Programming Language :: Python :: 3.4',
85 | 'Programming Language :: Python :: 3.5',
86 | 'Programming Language :: Python :: 3.6',
87 | 'Programming Language :: Python :: 3.7',
88 | ],
89 | tests_require=test_requirements
90 | )
91 |
--------------------------------------------------------------------------------
/tests/__init__.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 | import os
4 |
5 | TESTS = os.path.dirname(os.path.realpath(__file__))
6 | BASE = os.path.realpath(os.path.join(TESTS, '..'))
7 | RESOURCES = os.path.join(TESTS, 'resources')
8 | SANDBOX = os.path.join(TESTS, 'sandbox')
9 |
--------------------------------------------------------------------------------
/tests/conftest.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | #
3 | # Pytest configuration
4 | #
5 | # ------------------------------------------------
6 |
7 |
8 | # imports
9 | # -------
10 | import os
11 | import pytest
12 | import logging
13 |
14 | from .fixtures import db, create_app, add
15 |
16 |
17 | # config
18 | # ------
19 | SETTINGS = dict(
20 | teardown=True,
21 | echo=False,
22 | )
23 | APP = None
24 | CLIENT = None
25 | logging.basicConfig(level=logging.ERROR)
26 | os.environ['FLASK_APP'] = 'tests.fixtures:create_app'
27 |
28 |
29 | # plugins
30 | # -------
31 | def pytest_addoption(parser):
32 | parser.addoption("-N", "--no-teardown", default=False, help="Do not tear down sandbox directory after testing session.")
33 | parser.addoption("-E", "--echo", default=False, help="Be verbose in query logging.")
34 | return
35 |
36 |
37 | def pytest_configure(config):
38 | global TEARDOWN
39 | SETTINGS['teardown'] = not config.getoption('-N')
40 | SETTINGS['echo'] = config.getoption('-E')
41 | return
42 |
43 |
44 | @pytest.fixture(autouse=True, scope='session')
45 | def sandbox(request):
46 | from . import SANDBOX
47 | global SETTINGS, APP, CLIENT
48 |
49 | # create sandbox for testing
50 | if not os.path.exists(SANDBOX):
51 | os.makedirs(SANDBOX)
52 |
53 | yield
54 |
55 | # teardown sandbox
56 | if SETTINGS['teardown']:
57 | import shutil
58 | shutil.rmtree(SANDBOX)
59 | return
60 |
61 |
62 | @pytest.fixture(scope='session')
63 | def application(sandbox):
64 | from . import SANDBOX
65 | global SETTINGS, APP, CLIENT
66 |
67 | # create application
68 | app = create_app()
69 | if SETTINGS['echo']:
70 | app.config['SQLALCHEMY_ECHO'] = True
71 |
72 | # create default user
73 | with app.app_context():
74 | db.drop_all()
75 | db.create_all()
76 | yield app
77 | return
78 |
79 |
80 | @pytest.fixture(scope='session')
81 | def client(application):
82 | global CLIENT
83 | if CLIENT is not None:
84 | yield CLIENT
85 | else:
86 | with application.test_client() as CLIENT:
87 | yield CLIENT
88 | return
89 |
90 |
91 | @pytest.fixture(scope='session')
92 | def celery(application, client):
93 | celery = application.extensions['celery']
94 | future = celery.submit(add, 1, 2)
95 | result = future.result(timeout=5)
96 | assert result == 3
97 |
98 | yield celery
99 |
100 | celery.stop()
101 | celery.control.shutdown(quiet=True)
102 | return
103 |
--------------------------------------------------------------------------------
/tests/fixtures.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | #
3 | # Fixtures for administration.
4 | #
5 | # ------------------------------------------------
6 |
7 |
8 | # imports
9 | # -------
10 | import os
11 | import time
12 | from flask import Flask, Blueprint, jsonify
13 | from flask_sqlalchemy import SQLAlchemy
14 |
15 | from flask_execute import Celery, current_task
16 |
17 | from . import SANDBOX
18 |
19 |
20 | # helpers
21 | # -------
22 | class timeout(object):
23 |
24 | def __init__(self, seconds):
25 | self.seconds = seconds
26 |
27 | def __enter__(self):
28 | self.expiration = time.time() + self.seconds
29 | return self
30 |
31 | def __exit__(self, type, value, traceback):
32 | pass
33 |
34 | def tick(self, seconds=1):
35 | time.sleep(seconds)
36 | return
37 |
38 | @property
39 | def expired(self):
40 | return time.time() > self.expiration
41 |
42 |
43 | # plugins
44 | # -------
45 | db = SQLAlchemy()
46 | celery = Celery()
47 | api = Blueprint('api', __name__)
48 |
49 |
50 | # configs
51 | # -------
52 | class Config:
53 | SQLALCHEMY_TRACK_MODIFICATIONS = False
54 | SQLALCHEMY_ECHO = False
55 | PROPAGATE_EXCEPTIONS = False
56 | SQLALCHEMY_DATABASE_URI = 'sqlite:///{}/dev.db'.format(SANDBOX)
57 | CELERY_LOG_DIR = SANDBOX
58 | CELERY_WORKERS = ['quorra']
59 | CELERYD_CONCURRENCY = 4
60 |
61 |
62 | # factory
63 | # -------
64 | def create_app():
65 | """
66 | Application factory to use for spinning up development
67 | server tests.
68 | """
69 | app = Flask(__name__)
70 | app.config.from_object(Config)
71 | db.init_app(app)
72 | celery.init_app(app)
73 | app.register_blueprint(api)
74 | return app
75 |
76 |
77 | # functions
78 | # ---------
79 | def sleep(n=5):
80 | import time
81 | for count in range(n):
82 | time.sleep(0.1)
83 | return True
84 |
85 |
86 | def add(*args):
87 | from functools import reduce
88 | import time
89 | time.sleep(0.1)
90 | return reduce(lambda x, y: x + y, args)
91 |
92 |
93 | def fail():
94 | raise AssertionError('fail')
95 |
96 |
97 | def task_id():
98 | return current_task.id
99 |
100 |
101 | @celery.task
102 | def registered():
103 | return True
104 |
105 |
106 | @celery.schedule(hour=0, minute=0, name='scheduled-task')
107 | # @celery.schedule(5, name='scheduled-task')
108 | def scheduled():
109 | return True
110 |
111 |
112 | # endpoints
113 | # ---------
114 | @api.route('/submit', methods=['POST'])
115 | def submit():
116 | pool = celery.map(add, [1, 2], [3, 4])
117 | pool.add(celery.submit(fail))
118 | pool.add(celery.submit(sleep))
119 | pool.add(celery.submit(task_id))
120 | return jsonify([future.id for future in pool])
121 |
122 |
123 | @api.route('/monitor/', methods=['GET'])
124 | def monitor(ident):
125 | return jsonify(status=celery.get(ident).status)
126 |
127 |
128 | @api.route('/ping', methods=['POST'])
129 | def ping():
130 | result = celery.submit(add, 1, 1).result(timeout=1)
131 | return jsonify(result='pong' if result == 2 else 'miss')
132 |
133 |
134 | # models
135 | # ------
136 | class Item(db.Model):
137 | __tablename__ = 'items'
138 |
139 | id = db.Column(db.Integer, primary_key=True)
140 | name = db.Column(db.String)
141 |
--------------------------------------------------------------------------------
/tests/isolated/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bprinty/Flask-Execute/9406914a7996e73034db94f15b79f70e246d7084/tests/isolated/__init__.py
--------------------------------------------------------------------------------
/tests/isolated/test_direct.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | #
3 | # Testing for application pattern setup differences.
4 | #
5 | # ------------------------------------------------
6 |
7 |
8 | # imports
9 | # -------
10 | import os
11 | import requests
12 | import subprocess
13 | import pytest
14 | import signal
15 |
16 | from ..fixtures import timeout
17 |
18 |
19 | # fixtures
20 | # --------
21 | @pytest.fixture(scope='session')
22 | def app(sandbox):
23 | # start application
24 | proc = subprocess.Popen(
25 | 'FLASK_ENV=development FLASK_APP=tests.resources.factory '
26 | 'flask run --host=127.0.0.1 --port=5000',
27 | stderr=subprocess.STDOUT, stdout=subprocess.PIPE, shell=True)
28 |
29 | # wait for server to start
30 | with timeout(10) as to:
31 | while True:
32 | try:
33 | response = requests.get('http://127.0.0.1:5000')
34 | assert response.json() == {'status': 'ok'}
35 | break
36 | except requests.exceptions.ConnectionError:
37 | pass
38 | assert not to.expired, 'Flask application did not start - check for port collisions.'
39 |
40 | yield
41 |
42 | # stop cluster process (can't just use proc.terminate()
43 | # because of the forking process)
44 | os.killpg(os.getpgid(proc.pid), signal.SIGINT)
45 | return
46 |
47 |
48 | # tests
49 | # -----
50 | def test_pattern(app):
51 | # ping
52 | response = requests.get('http://127.0.0.1:5000/ping')
53 | assert response.json() == {'msg': 'pong'}
54 |
55 | # task
56 | response = requests.get('http://127.0.0.1:5000/task')
57 | assert response.json() == {'success': True}
58 |
59 | # wait for flower availability
60 | with timeout(10) as to:
61 | while True:
62 | try:
63 | response = requests.get('http://127.0.0.1:5555/api/workers')
64 | assert True
65 | break
66 | except requests.exceptions.ConnectionError:
67 | pass
68 | assert not to.expired, 'Could not access flower.'
69 | return
70 |
--------------------------------------------------------------------------------
/tests/isolated/test_factory.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | #
3 | # Testing for application pattern setup differences.
4 | #
5 | # ------------------------------------------------
6 |
7 |
8 | # imports
9 | # -------
10 | import os
11 | import requests
12 | import subprocess
13 | import pytest
14 | import signal
15 |
16 | from ..fixtures import timeout
17 |
18 |
19 | # fixtures
20 | # --------
21 | @pytest.fixture(scope='session')
22 | def app(sandbox):
23 | # start application
24 | proc = subprocess.Popen(
25 | 'FLASK_ENV=development FLASK_APP=tests.resources.factory '
26 | 'flask run --host=127.0.0.1 --port=5000',
27 | stderr=subprocess.STDOUT, stdout=subprocess.PIPE, shell=True
28 | )
29 |
30 | # wait for server to start
31 | with timeout(10) as to:
32 | while True:
33 | try:
34 | response = requests.get('http://127.0.0.1:5000')
35 | assert response.json() == {'status': 'ok'}
36 | break
37 | except requests.exceptions.ConnectionError:
38 | pass
39 | assert not to.expired, 'Flask application did not start - check for port collisions.'
40 |
41 | yield
42 |
43 | # stop cluster process (can't just use proc.terminate()
44 | # because of the forking process)
45 | os.killpg(os.getpgid(proc.pid), signal.SIGINT)
46 | return
47 |
48 |
49 | # tests
50 | # -----
51 | def test_pattern(app):
52 | # ping
53 | response = requests.get('http://127.0.0.1:5000/ping')
54 | assert response.json() == {'msg': 'pong'}
55 |
56 | # task
57 | response = requests.get('http://127.0.0.1:5000/task')
58 | assert response.json() == {'success': True}
59 |
60 | # wait for flower availability
61 | with timeout(10) as to:
62 | while True:
63 | try:
64 | response = requests.get('http://127.0.0.1:5555/api/workers')
65 | assert True
66 | break
67 | except requests.exceptions.ConnectionError:
68 | pass
69 | assert not to.expired, 'Could not access flower.'
70 | return
71 |
--------------------------------------------------------------------------------
/tests/requirements.txt:
--------------------------------------------------------------------------------
1 | codecov
2 | pytest>=4.6
3 | pytest-cov
4 | pytest-runner
5 | parameterized
6 | Flask-SQLAlchemy
7 | redis
8 |
--------------------------------------------------------------------------------
/tests/resources/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bprinty/Flask-Execute/9406914a7996e73034db94f15b79f70e246d7084/tests/resources/__init__.py
--------------------------------------------------------------------------------
/tests/resources/direct.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | #
3 | # Application config testing factory method for instnatiating
4 | # flask application and associated plugins. To run the tests
5 | # associated with this file, execute:
6 | #
7 | # .. code-block:: bash
8 | #
9 | # ~$ pytest tests/isolated/test_factory.py
10 | # ------------------------------------------------------------
11 |
12 | from flask import Flask, jsonify
13 | from flask_execute import Celery
14 | from celery.schedules import crontab
15 | from .. import SANDBOX
16 |
17 | app = Flask(__name__)
18 | app.config['CELERY_LOG_DIR'] = SANDBOX
19 | app.config['CELERYD_CONCURRENCY'] = 4
20 | celery = Celery(app)
21 |
22 | def ping():
23 | return 'pong'
24 |
25 | @celery.schedule(
26 | crontab(
27 | hour=0,
28 | minute=0
29 | ),
30 | args=(True,),
31 | kwargs={}
32 | )
33 | def beat(input):
34 | return input
35 |
36 | @celery.task
37 | def noop():
38 | return True
39 |
40 | @celery.task(name='test')
41 | def nope():
42 | return True
43 |
44 | @app.route('/')
45 | def index():
46 | return jsonify(status='ok')
47 |
48 | @app.route('/ping')
49 | def ping_handler():
50 | future = celery.submit(ping)
51 | result = future.result(timeout=1)
52 | return jsonify(msg=result)
53 |
54 | @app.route('/task')
55 | def task_handler():
56 | task1 = celery.task.noop.delay()
57 | task1.wait()
58 | task2 = celery.task['test'].delay()
59 | task2.wait()
60 | return jsonify(success=task1.result & task2.result)
61 |
62 | if __name__ == '__main__':
63 | app.run()
64 |
--------------------------------------------------------------------------------
/tests/resources/factory.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | #
3 | # Application config testing factory method for instnatiating
4 | # flask application and associated plugins. To run the tests
5 | # associated with this file, execute:
6 | #
7 | # .. code-block:: bash
8 | #
9 | # ~$ pytest tests/isolated/test_factory.py
10 | # --------------------------------------------------------
11 |
12 | from flask import Flask, Blueprint, jsonify
13 | from flask_execute import Celery
14 | from celery.schedules import crontab
15 | from .. import SANDBOX
16 |
17 | celery = Celery()
18 | url = Blueprint('url', __name__)
19 |
20 | def create_app():
21 | app = Flask(__name__)
22 | app.config['CELERY_LOG_DIR'] = SANDBOX
23 | app.config['CELERYD_CONCURRENCY'] = 4
24 | app.register_blueprint(url)
25 | celery.init_app(app)
26 | return app
27 |
28 | def ping():
29 | return 'pong'
30 |
31 | @celery.schedule(
32 | crontab(
33 | hour=0,
34 | minute=0
35 | ),
36 | args=(True,),
37 | kwargs={}
38 | )
39 | def beat(input):
40 | return input
41 |
42 | @celery.task
43 | def noop():
44 | return True
45 |
46 | @celery.task(name='test')
47 | def nope():
48 | return True
49 |
50 | @url.route('/')
51 | def index():
52 | return jsonify(status='ok')
53 |
54 | @url.route('/ping')
55 | def ping_handler():
56 | future = celery.submit(ping)
57 | result = future.result(timeout=1)
58 | return jsonify(msg=result)
59 |
60 | @url.route('/task')
61 | def task_handler():
62 | task1 = celery.task.noop.delay()
63 | task1.wait()
64 | task2 = celery.task['test'].delay()
65 | task2.wait()
66 | return jsonify(success=task1.result & task2.result)
67 |
68 | if __name__ == '__main__':
69 | app = create_app()
70 | app.run()
71 |
--------------------------------------------------------------------------------
/tests/test_cli.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | #
3 | # Testing for pl
4 | #
5 | # ------------------------------------------------
6 |
7 |
8 | # imports
9 | # -------
10 | import subprocess
11 |
12 | from .fixtures import timeout
13 |
14 |
15 | # session
16 | # -------
17 | class TestCli:
18 |
19 | def test_status(self, client, celery):
20 | # submit tasks via api
21 | response = client.post('/ping')
22 | assert response.status_code == 200
23 | assert response.json['result'] == 'pong'
24 |
25 | # workers should be running
26 | output = subprocess.check_output('flask celery status', stderr=subprocess.STDOUT, shell=True).decode('utf-8')
27 | assert 'online' in output
28 | assert 'OK' in output
29 | return
30 |
31 | def test_worker(self, celery):
32 | worker = 'test_worker'
33 |
34 | # specific worker not running
35 | output = subprocess.check_output('flask celery status', stderr=subprocess.STDOUT, shell=True).decode('utf-8')
36 | assert worker not in output
37 |
38 | # start worker
39 | args = 'flask celery worker -n {}@%h'.format(worker).split(' ')
40 | celery.processes[worker] = subprocess.Popen(args, stderr=subprocess.STDOUT, stdout=subprocess.PIPE)
41 |
42 | # wait for status checking to return
43 | with timeout(5) as to:
44 | while not to.expired:
45 | output = subprocess.check_output('flask celery status', stderr=subprocess.STDOUT, shell=True).decode('utf-8')
46 | if 'online' in output and worker in output:
47 | break
48 |
49 | # assert specific worker is running
50 | assert worker in output
51 | return
52 |
--------------------------------------------------------------------------------
/tests/test_futures.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | #
3 | # Testing for pl
4 | #
5 | # ------------------------------------------------
6 |
7 |
8 | # imports
9 | # -------
10 | import pytest
11 |
12 | from .fixtures import add, sleep, fail
13 | from .fixtures import db, Item
14 |
15 |
16 | # tests
17 | # -----
18 | class TestFuture:
19 |
20 | def test_result(self, celery):
21 | # submit long command
22 | future = celery.submit(add, 1, 2)
23 | assert future.status == 'PENDING'
24 | assert not future.done()
25 | assert future.running()
26 |
27 | # wait for timeout
28 | result = future.result(timeout=1)
29 | assert result == 3
30 | assert not future.running()
31 | assert future.done()
32 | assert not future.running()
33 | assert not future.cancelled()
34 | assert future.status == 'SUCCESS'
35 | return
36 |
37 | def test_exception(self, celery):
38 | # submit failing task and assert result
39 | future = celery.submit(fail)
40 | with pytest.raises(AssertionError):
41 | future.result(timeout=1)
42 |
43 | # assert exception details
44 | exe = future.exception()
45 | assert isinstance(exe, AssertionError)
46 | assert str(exe) == 'fail'
47 | assert future.state == 'FAILURE'
48 | assert not future.running()
49 | return
50 |
51 | def test_cancel(self, celery):
52 | # submit long command
53 | future = celery.submit(sleep, 10)
54 | assert future.status == 'PENDING'
55 | assert future.running()
56 | assert not future.done()
57 |
58 | # cancel task and look at result
59 | future.cancel(wait=True, timeout=1)
60 | assert future.status == 'REVOKED'
61 | assert future.cancelled()
62 | assert not future.running()
63 | assert future.done()
64 | return
65 |
66 | def test_callback(self, celery):
67 | # add callback function
68 | def callback(task): ## noqa
69 | task = Item(name='callback')
70 | db.session.add(task)
71 | db.session.commit()
72 | return
73 |
74 | # submit task and add callback
75 | future = celery.submit(add, 1, 2)
76 | future.add_done_callback(callback)
77 |
78 | # assert item hasn't been created yet
79 | item = Item.query.filter_by(name='callback').first()
80 | assert item is None
81 | future.result(timeout=1)
82 |
83 | # after task finishes, assert item is created
84 | item = Item.query.filter_by(name='callback').first()
85 | assert item is not None
86 | return
87 |
88 | def test_running(self, celery):
89 | # implicitly tested by other methods\
90 | return
91 |
92 | def test_cancelled(self, celery):
93 | # implicitly tested by other methods
94 | return
95 |
96 | def test_done(self, celery):
97 | # implicitly tested by other methods
98 | return
99 |
100 |
101 | class TestFuturePool:
102 |
103 | def test_result(self, celery):
104 | # submit long command
105 | pool = celery.map(add, [1, 2], [1, 2], [1, 2])
106 | assert pool.status == 'PENDING'
107 | assert not pool.done()
108 | assert pool.running()
109 |
110 | # wait for timeout
111 | result = pool.result(timeout=1)
112 | assert result == [3, 3, 3]
113 | assert not pool.running()
114 | assert pool.done()
115 | assert not pool.running()
116 | assert not pool.cancelled()
117 | assert pool.status == 'SUCCESS'
118 | return
119 |
120 | def test_exception(self, celery):
121 | # submit failing task and assert result
122 | pool = celery.map(fail, [], [])
123 | with pytest.raises(AssertionError):
124 | pool.result(timeout=1)
125 |
126 | # assert exception details
127 | exe = pool.exception()[0]
128 | assert isinstance(exe, AssertionError)
129 | assert str(exe) == 'fail'
130 | assert pool.state == 'FAILURE'
131 | assert not pool.running()
132 | return
133 |
134 | def test_cancel(self, celery):
135 | # submit long command
136 | pool = celery.map(sleep, [3], [3], [3])
137 | assert pool.status == 'PENDING'
138 | assert pool.running()
139 | assert not pool.done()
140 |
141 | # cancel task and look at result
142 | pool.cancel(wait=True, timeout=1)
143 | assert pool.status == 'REVOKED'
144 | assert pool.cancelled()
145 | assert not pool.running()
146 | assert pool.done()
147 | return
148 |
149 | def test_callback(self, celery):
150 | # add callback function
151 | def callback(task): ## noqa
152 | task = Item(name='pool-callback')
153 | db.session.add(task)
154 | db.session.commit()
155 | return
156 |
157 | # submit task and add callback
158 | pool = celery.map(add, [1, 2], [1, 2])
159 | pool.add_done_callback(callback)
160 |
161 | # assert item hasn't been created yet
162 | item = Item.query.filter_by(name='pool-callback').first()
163 | assert item is None
164 | pool.result(timeout=1)
165 |
166 | # after task finishes, assert item is created
167 | item = Item.query.filter_by(name='pool-callback').first()
168 | assert item is not None
169 | return
170 |
171 | def test_running(self, celery):
172 | # implicitly tested by other methods\
173 | return
174 |
175 | def test_cancelled(self, celery):
176 | # implicitly tested by other methods
177 | return
178 |
179 | def test_done(self, celery):
180 | # implicitly tested by other methods
181 | return
182 |
--------------------------------------------------------------------------------
/tests/test_managers.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | #
3 | # Testing for pl
4 | #
5 | # ------------------------------------------------
6 |
7 |
8 | # imports
9 | # -------
10 | from .fixtures import add, sleep
11 |
12 |
13 | # tests
14 | # -----
15 | class TestTaskManagers:
16 |
17 | def test_task(self, celery):
18 | # check if task is registered
19 | data = celery.inspect.registered()
20 | worker = list(data.keys())[0]
21 | assert 'tests.fixtures.registered' in data[worker]
22 |
23 | # run registered task
24 | assert celery.task.registered()
25 |
26 | # run registered task with celery api
27 | task = celery.task.registered.delay()
28 | task.wait()
29 | assert task.result
30 | return
31 |
32 | def test_schedule(self, celery):
33 | # assert configuration
34 | assert 'scheduled-task' in celery.controller.conf['CELERYBEAT_SCHEDULE']
35 | schedule = celery.controller.conf['CELERYBEAT_SCHEDULE']['scheduled-task']
36 | assert schedule['task'] == 'tests.fixtures.scheduled'
37 | assert 'crontab' in str(type(schedule['schedule']))
38 |
39 | # run scheduled task
40 | assert celery.schedule.scheduled()
41 |
42 | # run registered task with celery api
43 | task = celery.schedule.scheduled.delay()
44 | task.wait()
45 | assert task.result
46 | return
47 |
48 |
49 | class TestCommandManagers:
50 |
51 | def test_inspect(self, celery):
52 | celery.submit(sleep).cancel(wait=True)
53 | celery.map(add, [1, 1], [1, 1], [1, 1])
54 |
55 | # revoked
56 | workers = celery.inspect.revoked()
57 | revoked = workers[list(workers.keys())[0]]
58 | assert len(revoked) > 0
59 | future = celery.get(revoked[0])
60 | assert revoked[0] == future.id
61 |
62 | # stats
63 | stats = celery.inspect.stats()
64 | assert len(stats) > 0
65 | key = list(stats.keys())[0]
66 | stat = stats[key]
67 | assert 'broker' in stat
68 | return
69 |
70 | def test_control(self, celery):
71 | workers = celery.control.heartbeat()
72 | beat = workers[list(workers.keys())[0]]
73 | assert beat is None
74 | return
75 |
--------------------------------------------------------------------------------
/tests/test_plugin.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | #
3 | # Testing for pl
4 | #
5 | # ------------------------------------------------
6 |
7 |
8 | # imports
9 | # -------
10 | import os
11 |
12 | from flask_execute.plugin import Future, FuturePool
13 |
14 | from .fixtures import add, task_id, timeout
15 |
16 |
17 | # tests
18 | # -----
19 | class TestPlugin:
20 |
21 | def test_submit(self, celery):
22 | future = celery.submit(add, 1, 2)
23 | result = future.result(timeout=1)
24 | assert isinstance(future, Future)
25 | assert result == 3
26 | return
27 |
28 | def test_map(self, celery):
29 | pool = celery.map(add, [1, 2], [3, 4], [5, 6])
30 | assert len(pool) == 3
31 | results = pool.result(timeout=1)
32 | assert isinstance(pool, FuturePool)
33 | assert results == [3, 7, 11]
34 | return
35 |
36 | def test_current_task(self, celery):
37 | # check current task metadata from proxy
38 | future = celery.submit(task_id)
39 | ident = future.result()
40 | assert ident is not None
41 |
42 | # get the result and check the status
43 | future = celery.get(ident)
44 | assert not future.running()
45 | assert future.done()
46 | return
47 |
48 | def test_status(self, celery):
49 | status = celery.status()
50 | assert len(status)
51 | worker = list(status.keys())[0]
52 | assert status[worker] == 'OK'
53 | return
54 |
55 | def test_get(self, celery):
56 | future = celery.submit(add, 1, 2)
57 | future = celery.get(future.id)
58 | result = future.result(timeout=1)
59 | assert isinstance(future, Future)
60 | assert result == 3
61 | return
62 |
63 | def test_logs(self, celery):
64 | check = celery.logs.copy()
65 | with timeout(5) as to:
66 | while not to.expired and len(check):
67 | logfile = check[0]
68 | if os.path.exists(logfile) and \
69 | os.stat(logfile).st_size != 0:
70 | del check[0]
71 | return
72 |
73 |
74 | class TestIntegration:
75 |
76 | def test_api(self, client, celery):
77 | # submit tasks via api
78 | response = client.post('/submit')
79 | assert response.status_code == 200
80 | assert len(response.json) == 5
81 | tasks = response.json
82 |
83 | # monitor results
84 | sleep = tasks[3]
85 | response = client.get('/monitor/{}'.format(sleep))
86 | assert response.status_code == 200
87 | assert response.json == {'status': 'PENDING'}
88 |
89 | # wait for something to finish and check statuses of other tasks
90 | celery.get(sleep).result()
91 | success = 0
92 | for task in tasks:
93 | response = client.get('/monitor/{}'.format(task))
94 | assert response.status_code == 200
95 | if 'SUCCESS' in response.json['status']:
96 | success += 1
97 | assert success > 0
98 | return
99 |
--------------------------------------------------------------------------------