├── .github └── workflows │ └── tests.yaml ├── .gitignore ├── .readthedocs.yaml ├── CHANGELOG.md ├── LICENSE ├── MANIFEST.in ├── README.rst ├── TODO ├── docs ├── Makefile ├── _static │ ├── huey.jpg │ └── logo.jpg ├── _themes │ └── flask │ │ ├── layout.html │ │ ├── relations.html │ │ ├── static │ │ ├── flasky.css_t │ │ └── small_flask.css │ │ └── theme.conf ├── api.rst ├── asyncio.rst ├── changes.rst ├── conf.py ├── consumer.rst ├── contrib.rst ├── django.rst ├── guide.rst ├── huey.jpg ├── imports.rst ├── index.rst ├── installation.rst ├── make.bat ├── mini.rst ├── shared_resources.rst ├── signals.rst └── troubleshooting.rst ├── examples ├── django_ex │ ├── README │ ├── djangoex │ │ ├── __init__.py │ │ ├── settings.py │ │ ├── test_app │ │ │ ├── __init__.py │ │ │ ├── models.py │ │ │ └── tasks.py │ │ └── urls.py │ └── manage.py ├── flask_ex │ ├── README.md │ ├── __init__.py │ ├── app.py │ ├── main.py │ ├── run_huey.sh │ ├── run_webapp.sh │ ├── tasks.py │ ├── templates │ │ └── home.html │ └── views.py ├── mini │ └── mini.py └── simple │ ├── README │ ├── __init__.py │ ├── amain.py │ ├── config.py │ ├── cons.sh │ ├── main.py │ └── tasks.py ├── huey ├── __init__.py ├── api.py ├── bin │ ├── __init__.py │ └── huey_consumer.py ├── constants.py ├── consumer.py ├── consumer_options.py ├── contrib │ ├── __init__.py │ ├── asyncio.py │ ├── djhuey │ │ ├── __init__.py │ │ ├── management │ │ │ ├── __init__.py │ │ │ └── commands │ │ │ │ ├── __init__.py │ │ │ │ └── run_huey.py │ │ └── models.py │ ├── helpers.py │ ├── kyototycoon.py │ ├── mini.py │ └── sql_huey.py ├── exceptions.py ├── registry.py ├── serializer.py ├── signals.py ├── storage.py ├── tests │ ├── __init__.py │ ├── __main__.py │ ├── base.py │ ├── scripts │ │ └── kt.lua │ ├── test_api.py │ ├── test_consumer.py │ ├── test_crontab.py │ ├── test_helpers.py │ ├── test_immediate.py │ ├── test_kt_huey.py │ ├── test_priority.py │ ├── test_registry.py │ ├── test_serializer.py │ ├── test_signals.py │ ├── test_sql_huey.py │ ├── test_storage.py │ ├── test_utils.py │ └── test_wrappers.py └── utils.py ├── pyproject.toml ├── runtests.py └── setup.py /.github/workflows/tests.yaml: -------------------------------------------------------------------------------- 1 | name: Tests 2 | on: [push] 3 | jobs: 4 | tests: 5 | name: ${{ matrix.python-version }} 6 | runs-on: ubuntu-20.04 7 | services: 8 | redis: 9 | image: redis 10 | ports: 11 | - 6379:6379 12 | kyototycoon: 13 | image: coleifer/kyototycoon 14 | ports: 15 | - 1978:1978 16 | strategy: 17 | fail-fast: false 18 | matrix: 19 | python-version: [3.8, "3.10", "3.11", "3.13"] 20 | steps: 21 | - uses: actions/checkout@v2 22 | - uses: actions/setup-python@v2 23 | with: 24 | python-version: ${{ matrix.python-version }} 25 | - name: pip deps 26 | run: pip install setuptools gevent redis peewee ukt 27 | - name: runtests 28 | env: 29 | HUEY_TRAVIS: '1' 30 | run: python runtests.py -v 2 31 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | __pycache__ 2 | .idea 3 | *.pyc 4 | docs/_build 5 | venv/ 6 | huey.egg-info/ 7 | huey.db 8 | -------------------------------------------------------------------------------- /.readthedocs.yaml: -------------------------------------------------------------------------------- 1 | version: 2 2 | build: 3 | os: ubuntu-22.04 4 | tools: 5 | python: "3.11" 6 | sphinx: 7 | configuration: docs/conf.py 8 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2017 Charles Leifer 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy 4 | of this software and associated documentation files (the "Software"), to deal 5 | in the Software without restriction, including without limitation the rights 6 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 7 | copies of the Software, and to permit persons to whom the Software is 8 | furnished to do so, subject to the following conditions: 9 | 10 | The above copyright notice and this permission notice shall be included in 11 | all copies or substantial portions of the Software. 12 | 13 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 14 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 15 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 16 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 17 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 18 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 19 | THE SOFTWARE. 20 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include CHANGELOG.md 2 | include LICENSE 3 | include MANIFEST.in 4 | include README.rst 5 | include pyproject.toml 6 | recursive-include docs * 7 | recursive-include examples * 8 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | .. image:: http://media.charlesleifer.com/blog/photos/huey2-logo.png 2 | 3 | *a lightweight alternative*. 4 | 5 | huey is: 6 | 7 | * a task queue 8 | * written in python 9 | * clean and simple API 10 | * redis, sqlite, file-system, or in-memory storage 11 | * `example code `_. 12 | * `read the documentation `_. 13 | 14 | huey supports: 15 | 16 | * multi-process, multi-thread or greenlet task execution models 17 | * schedule tasks to execute at a given time, or after a given delay 18 | * schedule recurring tasks, like a crontab 19 | * automatically retry tasks that fail 20 | * task prioritization 21 | * task result storage 22 | * task expiration 23 | * task locking 24 | * task pipelines and chains 25 | 26 | .. image:: http://i.imgur.com/2EpRs.jpg 27 | 28 | At a glance 29 | ----------- 30 | 31 | .. code-block:: python 32 | 33 | from huey import RedisHuey, crontab 34 | 35 | huey = RedisHuey('my-app', host='redis.myapp.com') 36 | 37 | @huey.task() 38 | def add_numbers(a, b): 39 | return a + b 40 | 41 | @huey.task(retries=2, retry_delay=60) 42 | def flaky_task(url): 43 | # This task might fail, in which case it will be retried up to 2 times 44 | # with a delay of 60s between retries. 45 | return this_might_fail(url) 46 | 47 | @huey.periodic_task(crontab(minute='0', hour='3')) 48 | def nightly_backup(): 49 | sync_all_data() 50 | 51 | Calling a ``task``-decorated function will enqueue the function call for 52 | execution by the consumer. A special result handle is returned immediately, 53 | which can be used to fetch the result once the task is finished: 54 | 55 | .. code-block:: pycon 56 | 57 | >>> from demo import add_numbers 58 | >>> res = add_numbers(1, 2) 59 | >>> res 60 | 61 | 62 | >>> res() 63 | 3 64 | 65 | Tasks can be scheduled to run in the future: 66 | 67 | .. code-block:: pycon 68 | 69 | >>> res = add_numbers.schedule((2, 3), delay=10) # Will be run in ~10s. 70 | >>> res(blocking=True) # Will block until task finishes, in ~10s. 71 | 5 72 | 73 | For much more, check out the `guide `_ 74 | or take a look at the `example code `_. 75 | 76 | Running the consumer 77 | ^^^^^^^^^^^^^^^^^^^^ 78 | 79 | Run the consumer with four worker processes: 80 | 81 | .. code-block:: console 82 | 83 | $ huey_consumer.py my_app.huey -k process -w 4 84 | 85 | To run the consumer with a single worker thread (default): 86 | 87 | .. code-block:: console 88 | 89 | $ huey_consumer.py my_app.huey 90 | 91 | If your work-loads are mostly IO-bound, you can run the consumer with threads 92 | or greenlets instead. Because greenlets are so lightweight, you can run quite a 93 | few of them efficiently: 94 | 95 | .. code-block:: console 96 | 97 | $ huey_consumer.py my_app.huey -k greenlet -w 32 98 | 99 | Storage 100 | ------- 101 | 102 | Huey's design and feature-set were informed by the capabilities of the 103 | `Redis `_ database. Redis is a fantastic fit for a 104 | lightweight task queueing library like Huey: it's self-contained, versatile, 105 | and can be a multi-purpose solution for other web-application tasks like 106 | caching, event publishing, analytics, rate-limiting, and more. 107 | 108 | Although Huey was designed with Redis in mind, the storage system implements a 109 | simple API and many other tools could be used instead of Redis if that's your 110 | preference. 111 | 112 | Huey comes with builtin support for Redis, Sqlite and in-memory storage. 113 | 114 | Documentation 115 | ---------------- 116 | 117 | `See Huey documentation `_. 118 | 119 | Project page 120 | --------------- 121 | 122 | `See source code and issue tracker on Github `_. 123 | 124 | Huey is named in honor of my cat: 125 | 126 | .. image:: http://m.charlesleifer.com/t/800x-/blog/photos/p1473037658.76.jpg?key=mD9_qMaKBAuGPi95KzXYqg 127 | 128 | -------------------------------------------------------------------------------- /TODO: -------------------------------------------------------------------------------- 1 | * Postgres / MySQL support 2 | * At least once delivery? 3 | * Redis streams as viable broker implementation? 4 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | PAPER = 8 | BUILDDIR = _build 9 | 10 | # Internal variables. 11 | PAPEROPT_a4 = -D latex_paper_size=a4 12 | PAPEROPT_letter = -D latex_paper_size=letter 13 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 14 | # the i18n builder cannot share the environment and doctrees with the others 15 | I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 16 | 17 | .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext 18 | 19 | help: 20 | @echo "Please use \`make ' where is one of" 21 | @echo " html to make standalone HTML files" 22 | @echo " dirhtml to make HTML files named index.html in directories" 23 | @echo " singlehtml to make a single large HTML file" 24 | @echo " pickle to make pickle files" 25 | @echo " json to make JSON files" 26 | @echo " htmlhelp to make HTML files and a HTML help project" 27 | @echo " qthelp to make HTML files and a qthelp project" 28 | @echo " devhelp to make HTML files and a Devhelp project" 29 | @echo " epub to make an epub" 30 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" 31 | @echo " latexpdf to make LaTeX files and run them through pdflatex" 32 | @echo " text to make text files" 33 | @echo " man to make manual pages" 34 | @echo " texinfo to make Texinfo files" 35 | @echo " info to make Texinfo files and run them through makeinfo" 36 | @echo " gettext to make PO message catalogs" 37 | @echo " changes to make an overview of all changed/added/deprecated items" 38 | @echo " linkcheck to check all external links for integrity" 39 | @echo " doctest to run all doctests embedded in the documentation (if enabled)" 40 | 41 | clean: 42 | -rm -rf $(BUILDDIR)/* 43 | 44 | html: 45 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html 46 | @echo 47 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." 48 | 49 | dirhtml: 50 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml 51 | @echo 52 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." 53 | 54 | singlehtml: 55 | $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml 56 | @echo 57 | @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." 58 | 59 | pickle: 60 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle 61 | @echo 62 | @echo "Build finished; now you can process the pickle files." 63 | 64 | json: 65 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json 66 | @echo 67 | @echo "Build finished; now you can process the JSON files." 68 | 69 | htmlhelp: 70 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp 71 | @echo 72 | @echo "Build finished; now you can run HTML Help Workshop with the" \ 73 | ".hhp project file in $(BUILDDIR)/htmlhelp." 74 | 75 | qthelp: 76 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp 77 | @echo 78 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \ 79 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:" 80 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/huey.qhcp" 81 | @echo "To view the help file:" 82 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/huey.qhc" 83 | 84 | devhelp: 85 | $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp 86 | @echo 87 | @echo "Build finished." 88 | @echo "To view the help file:" 89 | @echo "# mkdir -p $$HOME/.local/share/devhelp/huey" 90 | @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/huey" 91 | @echo "# devhelp" 92 | 93 | epub: 94 | $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub 95 | @echo 96 | @echo "Build finished. The epub file is in $(BUILDDIR)/epub." 97 | 98 | latex: 99 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 100 | @echo 101 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." 102 | @echo "Run \`make' in that directory to run these through (pdf)latex" \ 103 | "(use \`make latexpdf' here to do that automatically)." 104 | 105 | latexpdf: 106 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 107 | @echo "Running LaTeX files through pdflatex..." 108 | $(MAKE) -C $(BUILDDIR)/latex all-pdf 109 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 110 | 111 | text: 112 | $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text 113 | @echo 114 | @echo "Build finished. The text files are in $(BUILDDIR)/text." 115 | 116 | man: 117 | $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man 118 | @echo 119 | @echo "Build finished. The manual pages are in $(BUILDDIR)/man." 120 | 121 | texinfo: 122 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 123 | @echo 124 | @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." 125 | @echo "Run \`make' in that directory to run these through makeinfo" \ 126 | "(use \`make info' here to do that automatically)." 127 | 128 | info: 129 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 130 | @echo "Running Texinfo files through makeinfo..." 131 | make -C $(BUILDDIR)/texinfo info 132 | @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." 133 | 134 | gettext: 135 | $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale 136 | @echo 137 | @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." 138 | 139 | changes: 140 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes 141 | @echo 142 | @echo "The overview file is in $(BUILDDIR)/changes." 143 | 144 | linkcheck: 145 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck 146 | @echo 147 | @echo "Link check complete; look for any errors in the above output " \ 148 | "or in $(BUILDDIR)/linkcheck/output.txt." 149 | 150 | doctest: 151 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest 152 | @echo "Testing of doctests in the sources finished, look at the " \ 153 | "results in $(BUILDDIR)/doctest/output.txt." 154 | -------------------------------------------------------------------------------- /docs/_static/huey.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/coleifer/huey/302fb8ed8c4894ee7b419f91a22ec53b236fe50c/docs/_static/huey.jpg -------------------------------------------------------------------------------- /docs/_static/logo.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/coleifer/huey/302fb8ed8c4894ee7b419f91a22ec53b236fe50c/docs/_static/logo.jpg -------------------------------------------------------------------------------- /docs/_themes/flask/layout.html: -------------------------------------------------------------------------------- 1 | {%- extends "basic/layout.html" %} 2 | {%- block extrahead %} 3 | {{ super() }} 4 | {% if theme_touch_icon %} 5 | 6 | {% endif %} 7 | 9 | {% endblock %} 10 | {%- block relbar2 %}{% endblock %} 11 | {% block header %} 12 | {{ super() }} 13 | {% if pagename == 'index' %} 14 |
15 | {% endif %} 16 | {% endblock %} 17 | {%- block footer %} 18 | 22 | {% if pagename == 'index' %} 23 |
24 | {% endif %} 25 | {%- endblock %} 26 | -------------------------------------------------------------------------------- /docs/_themes/flask/relations.html: -------------------------------------------------------------------------------- 1 |

Related Topics

2 | 20 | -------------------------------------------------------------------------------- /docs/_themes/flask/static/flasky.css_t: -------------------------------------------------------------------------------- 1 | /* 2 | * flasky.css_t 3 | * ~~~~~~~~~~~~ 4 | * 5 | * :copyright: Copyright 2010 by Armin Ronacher. 6 | * :license: Flask Design License, see LICENSE for details. 7 | */ 8 | 9 | {% set page_width = '940px' %} 10 | {% set sidebar_width = '220px' %} 11 | 12 | @import url("basic.css"); 13 | 14 | /* -- page layout ----------------------------------------------------------- */ 15 | 16 | body { 17 | font-family: 'Georgia', serif; 18 | font-size: 17px; 19 | background-color: white; 20 | color: #000; 21 | margin: 0; 22 | padding: 0; 23 | } 24 | 25 | div.document { 26 | width: {{ page_width }}; 27 | margin: 30px auto 0 auto; 28 | } 29 | 30 | div.documentwrapper { 31 | float: left; 32 | width: 100%; 33 | } 34 | 35 | div.bodywrapper { 36 | margin: 0 0 0 {{ sidebar_width }}; 37 | } 38 | 39 | div.sphinxsidebar { 40 | width: {{ sidebar_width }}; 41 | } 42 | 43 | hr { 44 | border: 1px solid #B1B4B6; 45 | } 46 | 47 | div.body { 48 | background-color: #ffffff; 49 | color: #3E4349; 50 | padding: 0 30px 0 30px; 51 | } 52 | 53 | img.floatingflask { 54 | padding: 0 0 10px 10px; 55 | float: right; 56 | } 57 | 58 | div.footer { 59 | width: {{ page_width }}; 60 | margin: 20px auto 30px auto; 61 | font-size: 14px; 62 | color: #888; 63 | text-align: right; 64 | } 65 | 66 | div.footer a { 67 | color: #888; 68 | } 69 | 70 | div.related { 71 | display: none; 72 | } 73 | 74 | div.sphinxsidebar a { 75 | color: #444; 76 | text-decoration: none; 77 | border-bottom: 1px dotted #999; 78 | } 79 | 80 | div.sphinxsidebar a:hover { 81 | border-bottom: 1px solid #999; 82 | } 83 | 84 | div.sphinxsidebar { 85 | font-size: 14px; 86 | line-height: 1.5; 87 | } 88 | 89 | div.sphinxsidebarwrapper { 90 | padding: 18px 10px; 91 | } 92 | 93 | div.sphinxsidebarwrapper p.logo { 94 | padding: 0 0 20px 0; 95 | margin: 0; 96 | text-align: center; 97 | } 98 | 99 | div.sphinxsidebar h3, 100 | div.sphinxsidebar h4 { 101 | font-family: 'Garamond', 'Georgia', serif; 102 | color: #444; 103 | font-size: 24px; 104 | font-weight: normal; 105 | margin: 0 0 5px 0; 106 | padding: 0; 107 | } 108 | 109 | div.sphinxsidebar h4 { 110 | font-size: 20px; 111 | } 112 | 113 | div.sphinxsidebar h3 a { 114 | color: #444; 115 | } 116 | 117 | div.sphinxsidebar p.logo a, 118 | div.sphinxsidebar h3 a, 119 | div.sphinxsidebar p.logo a:hover, 120 | div.sphinxsidebar h3 a:hover { 121 | border: none; 122 | } 123 | 124 | div.sphinxsidebar p { 125 | color: #555; 126 | margin: 10px 0; 127 | } 128 | 129 | div.sphinxsidebar ul { 130 | margin: 10px 0; 131 | padding: 0; 132 | color: #000; 133 | } 134 | 135 | div.sphinxsidebar input { 136 | border: 1px solid #ccc; 137 | font-family: 'Georgia', serif; 138 | font-size: 1em; 139 | } 140 | 141 | /* -- body styles ----------------------------------------------------------- */ 142 | 143 | a { 144 | color: #004B6B; 145 | text-decoration: underline; 146 | } 147 | 148 | a:hover { 149 | color: #6D4100; 150 | text-decoration: underline; 151 | } 152 | 153 | div.body h1, 154 | div.body h2, 155 | div.body h3, 156 | div.body h4, 157 | div.body h5, 158 | div.body h6 { 159 | font-family: 'Garamond', 'Georgia', serif; 160 | font-weight: normal; 161 | margin: 30px 0px 10px 0px; 162 | padding: 0; 163 | } 164 | 165 | {% if theme_index_logo %} 166 | div.indexwrapper h1 { 167 | text-indent: -999999px; 168 | background: url({{ theme_index_logo }}) no-repeat center center; 169 | height: {{ theme_index_logo_height }}; 170 | } 171 | {% endif %} 172 | 173 | div.body h1 { margin-top: 0; padding-top: 0; font-size: 240%; } 174 | div.body h2 { font-size: 180%; } 175 | div.body h3 { font-size: 150%; } 176 | div.body h4 { font-size: 130%; } 177 | div.body h5 { font-size: 100%; } 178 | div.body h6 { font-size: 100%; } 179 | 180 | a.headerlink { 181 | color: #ddd; 182 | padding: 0 4px; 183 | text-decoration: none; 184 | } 185 | 186 | a.headerlink:hover { 187 | color: #444; 188 | background: #eaeaea; 189 | } 190 | 191 | div.body p, div.body dd, div.body li { 192 | line-height: 1.4em; 193 | } 194 | 195 | div.admonition { 196 | background: #fafafa; 197 | margin: 20px -30px; 198 | padding: 10px 30px; 199 | border-top: 1px solid #ccc; 200 | border-bottom: 1px solid #ccc; 201 | } 202 | 203 | div.admonition tt.xref, div.admonition a tt { 204 | border-bottom: 1px solid #fafafa; 205 | } 206 | 207 | dd div.admonition { 208 | margin-left: -60px; 209 | padding-left: 60px; 210 | } 211 | 212 | div.admonition p.admonition-title { 213 | font-family: 'Garamond', 'Georgia', serif; 214 | font-weight: normal; 215 | font-size: 24px; 216 | margin: 0 0 10px 0; 217 | padding: 0; 218 | line-height: 1; 219 | } 220 | 221 | div.admonition p.last { 222 | margin-bottom: 0; 223 | } 224 | 225 | div.highlight { 226 | background-color: white; 227 | } 228 | 229 | dt:target, .highlight { 230 | background: #FAF3E8; 231 | } 232 | 233 | div.note { 234 | background-color: #eee; 235 | border: 1px solid #ccc; 236 | } 237 | 238 | div.seealso { 239 | background-color: #ffc; 240 | border: 1px solid #ff6; 241 | } 242 | 243 | div.topic { 244 | background-color: #eee; 245 | } 246 | 247 | p.admonition-title { 248 | display: inline; 249 | } 250 | 251 | p.admonition-title:after { 252 | content: ":"; 253 | } 254 | 255 | pre, tt { 256 | font-family: 'Consolas', 'Menlo', 'Deja Vu Sans Mono', 'Bitstream Vera Sans Mono', monospace; 257 | font-size: 0.9em; 258 | } 259 | 260 | img.screenshot { 261 | } 262 | 263 | tt.descname, tt.descclassname { 264 | font-size: 0.95em; 265 | } 266 | 267 | tt.descname { 268 | padding-right: 0.08em; 269 | } 270 | 271 | img.screenshot { 272 | -moz-box-shadow: 2px 2px 4px #eee; 273 | -webkit-box-shadow: 2px 2px 4px #eee; 274 | box-shadow: 2px 2px 4px #eee; 275 | } 276 | 277 | table.docutils { 278 | border: 1px solid #888; 279 | -moz-box-shadow: 2px 2px 4px #eee; 280 | -webkit-box-shadow: 2px 2px 4px #eee; 281 | box-shadow: 2px 2px 4px #eee; 282 | } 283 | 284 | table.docutils td, table.docutils th { 285 | border: 1px solid #888; 286 | padding: 0.25em 0.7em; 287 | } 288 | 289 | table.field-list, table.footnote { 290 | border: none; 291 | -moz-box-shadow: none; 292 | -webkit-box-shadow: none; 293 | box-shadow: none; 294 | } 295 | 296 | table.footnote { 297 | margin: 15px 0; 298 | width: 100%; 299 | border: 1px solid #eee; 300 | background: #fdfdfd; 301 | font-size: 0.9em; 302 | } 303 | 304 | table.footnote + table.footnote { 305 | margin-top: -15px; 306 | border-top: none; 307 | } 308 | 309 | table.field-list th { 310 | padding: 0 0.8em 0 0; 311 | } 312 | 313 | table.field-list td { 314 | padding: 0; 315 | } 316 | 317 | table.footnote td.label { 318 | width: 0px; 319 | padding: 0.3em 0 0.3em 0.5em; 320 | } 321 | 322 | table.footnote td { 323 | padding: 0.3em 0.5em; 324 | } 325 | 326 | dl { 327 | margin: 0; 328 | padding: 0; 329 | } 330 | 331 | dl dd { 332 | margin-left: 30px; 333 | } 334 | 335 | blockquote { 336 | margin: 0 0 0 30px; 337 | padding: 0; 338 | } 339 | 340 | ul, ol { 341 | margin: 10px 0 10px 30px; 342 | padding: 0; 343 | } 344 | 345 | pre { 346 | background: #eee; 347 | padding: 7px 30px; 348 | margin: 15px -30px; 349 | line-height: 1.3em; 350 | } 351 | 352 | dl pre, blockquote pre, li pre { 353 | margin-left: -60px; 354 | padding-left: 60px; 355 | } 356 | 357 | dl dl pre { 358 | margin-left: -90px; 359 | padding-left: 90px; 360 | } 361 | 362 | tt { 363 | background-color: #ecf0f3; 364 | color: #222; 365 | /* padding: 1px 2px; */ 366 | } 367 | 368 | tt.xref, a tt { 369 | background-color: #FBFBFB; 370 | border-bottom: 1px solid white; 371 | } 372 | 373 | a.reference { 374 | text-decoration: none; 375 | border-bottom: 1px dotted #004B6B; 376 | } 377 | 378 | a.reference:hover { 379 | border-bottom: 1px solid #6D4100; 380 | } 381 | 382 | a.footnote-reference { 383 | text-decoration: none; 384 | font-size: 0.7em; 385 | vertical-align: top; 386 | border-bottom: 1px dotted #004B6B; 387 | } 388 | 389 | a.footnote-reference:hover { 390 | border-bottom: 1px solid #6D4100; 391 | } 392 | 393 | a:hover tt { 394 | background: #EEE; 395 | } 396 | -------------------------------------------------------------------------------- /docs/_themes/flask/static/small_flask.css: -------------------------------------------------------------------------------- 1 | /* 2 | * small_flask.css_t 3 | * ~~~~~~~~~~~~~~~~~ 4 | * 5 | * :copyright: Copyright 2010 by Armin Ronacher. 6 | * :license: Flask Design License, see LICENSE for details. 7 | */ 8 | 9 | body { 10 | margin: 0; 11 | padding: 20px 30px; 12 | } 13 | 14 | div.documentwrapper { 15 | float: none; 16 | background: white; 17 | } 18 | 19 | div.sphinxsidebar { 20 | display: block; 21 | float: none; 22 | width: 102.5%; 23 | margin: 50px -30px -20px -30px; 24 | padding: 10px 20px; 25 | background: #333; 26 | color: white; 27 | } 28 | 29 | div.sphinxsidebar h3, div.sphinxsidebar h4, div.sphinxsidebar p, 30 | div.sphinxsidebar h3 a { 31 | color: white; 32 | } 33 | 34 | div.sphinxsidebar a { 35 | color: #aaa; 36 | } 37 | 38 | div.sphinxsidebar p.logo { 39 | display: none; 40 | } 41 | 42 | div.document { 43 | width: 100%; 44 | margin: 0; 45 | } 46 | 47 | div.related { 48 | display: block; 49 | margin: 0; 50 | padding: 10px 0 20px 0; 51 | } 52 | 53 | div.related ul, 54 | div.related ul li { 55 | margin: 0; 56 | padding: 0; 57 | } 58 | 59 | div.footer { 60 | display: none; 61 | } 62 | 63 | div.bodywrapper { 64 | margin: 0; 65 | } 66 | 67 | div.body { 68 | min-height: 0; 69 | padding: 0; 70 | } 71 | -------------------------------------------------------------------------------- /docs/_themes/flask/theme.conf: -------------------------------------------------------------------------------- 1 | [theme] 2 | inherit = basic 3 | stylesheet = flasky.css 4 | pygments_style = flask_theme_support.FlaskyStyle 5 | 6 | [options] 7 | index_logo = '' 8 | index_logo_height = 120px 9 | touch_icon = 10 | -------------------------------------------------------------------------------- /docs/asyncio.rst: -------------------------------------------------------------------------------- 1 | .. _asyncio: 2 | 3 | AsyncIO 4 | ------- 5 | 6 | While Huey does not provide first-class support for a full asyncio pipeline, in 7 | practice one of the most useful locations to be "async"-friendly is when 8 | blocking while waiting for a task result to be ready. When waiting for a task 9 | result, Huey must poll the storage backend to determine if the result is ready 10 | which means lots of opportunity for an asynchronous solution. 11 | 12 | In order to simplify this, Huey provides two helpers for ``await``-ing task 13 | results: 14 | 15 | .. py:function:: aget_result(result, backoff=1.15, max_delay=1.0, preserve=False) 16 | 17 | :param Result result: a result handle returned when calling a task. 18 | :return: task return value. 19 | 20 | AsyncIO helper for awaiting the result of a task execution. 21 | 22 | Example: 23 | 24 | .. code-block:: python 25 | 26 | @huey.task() 27 | def sleep(n): 28 | time.sleep(n) 29 | return n 30 | 31 | async def main(): 32 | # Single task, will finish in ~2 seconds (other coroutines can run 33 | # during this time!). 34 | rh = sleep(2) 35 | result = await aget_result(rh) 36 | 37 | # Awaiting multiple results. This will also finish in ~2 seconds. 38 | r1 = sleep(2) 39 | r2 = sleep(2) 40 | r3 = sleep(2) 41 | results = await asyncio.gather( 42 | aget_result(r1), 43 | aget_result(r2), 44 | aget_result(r3)) 45 | 46 | 47 | .. py:function:: aget_result_group(rg, *args, **kwargs) 48 | 49 | :param ResultGroup rg: a result-group handle for multiple tasks. 50 | :return: return values for all tasks in the result group. 51 | 52 | AsyncIO helper for awaiting the result of multiple task executions. 53 | 54 | Example: 55 | 56 | .. code-block:: python 57 | 58 | @huey.task() 59 | def sleep(n): 60 | time.sleep(n) 61 | return n 62 | 63 | async def main(): 64 | # Spawn 3 "sleep" tasks, each sleeping for 2 seconds. 65 | rg = sleep.map([2, 2, 2]) 66 | 67 | # Await the results. This will finish in ~2 seconds while also 68 | # allowing other coroutines to run. 69 | results = await aget_result_group(rg) 70 | -------------------------------------------------------------------------------- /docs/changes.rst: -------------------------------------------------------------------------------- 1 | .. _changes: 2 | 3 | Changes in 2.0 4 | ============== 5 | 6 | The 2.0 release of Huey is mostly API-compatible with previous versions, but 7 | there are a number of things that have been altered or improved in this 8 | release. 9 | 10 | .. warning:: 11 | The serialization format for tasks has changed. An attempt has been made to 12 | provide backward compatibility when reading messages enqueued by an older 13 | version of Huey, but this is not guaranteed to work. 14 | 15 | Summary 16 | ------- 17 | 18 | The ``always_eager`` mode has been renamed :ref:`immediate`. As the new name 19 | implies, tasks are run immediately instead of being enqueued. Immediate mode is 20 | designed to be used during testing and development. When immediate mode is 21 | enabled, Huey switches to using in-memory storage by default, so as to avoid 22 | accidental writes to a live storage. Immediate mode improves greatly on 23 | ``always_eager`` mode, as it no longer requires special-casing and follows the 24 | same code-paths used when Huey is in live mode. See :ref:`immediate` for more 25 | details. 26 | 27 | Previously, the Huey consumer accepted options to run in UTC or local-time. 28 | Various APIs, particularly around scheduling and task revocation, needed to be 29 | compatible with however the consumer was configured, and it could easily get 30 | confusing. As of 2.0, UTC-vs-localtime is specified when instantiating Huey, 31 | and all conversion happens internally, hopefully making things easier to think 32 | about -- that is, you don't have to think about it. 33 | 34 | The events APIs have been removed and replaced by a :ref:`signals` system. 35 | Signal handlers are executed synchronously by the worker(s) as they run, so 36 | it's a bit different, but hopefully a lot easier to actually utilize, as the 37 | events API required a dedicated listener thread if you were to make any use of 38 | it (since it used a pub/sub approach). Events could be built on-top of the 39 | signals, but currently I have no plans for this. 40 | 41 | Errors are no longer stored in a separate list. Should a task fail due to an 42 | unhandled exception, the exception will be placed in the result store, and can 43 | be introspected using the task's :py:class:`Result` handle. 44 | 45 | Huey now supports :ref:`priority`. To use priorities with Redis, you need to be 46 | running Redis 5.0 or newer, and should use :py:class:`PriorityRedisHuey`. The 47 | original :py:class:`RedisHuey` continues to support older versions of Redis. 48 | :py:class:`SqliteHuey` and the in-memory storage used for dev/testing provide 49 | full support for task priorities. 50 | 51 | Details 52 | ------- 53 | 54 | Changes when initializing :py:class:`Huey`: 55 | 56 | * ``result_store`` parameter has been renamed to ``results``. 57 | * ``events`` parameter is removed. Events have been replaced by :ref:`signals`. 58 | * ``store_errors`` parameter is removed. Huey no longer maintains a separate 59 | list of recent errors. Unhandled errors that occur when running a task are 60 | stored in the result store. Also the ``max_errors`` parameter of the Redis 61 | storage engine is removed. 62 | * ``global_registry`` parameter is removed. Tasks are no longer registered to a 63 | global registry - tasks are registered to the Huey instance with which they 64 | are decorated. 65 | * ``always_eager`` has been renamed ``immediate``. 66 | 67 | New initialization arguments: 68 | 69 | * Boolean ``utc`` parameter (defaults to true). This setting is used to control 70 | how Huey interprets datetimes internally. Previously, this logic was spread 71 | across a number of APIs and a consumer flag. 72 | * Serializer parameter accepts an (optional) object implementing the 73 | :py:class:`Serializer` interface. Defaults to using ``pickle``. 74 | * Accepts option to use gzip ``compression`` when serializing data. 75 | 76 | Other changes to :py:class:`Huey`: 77 | 78 | * Immediate mode can be enabled or disabled at runtime by setting the 79 | :py:attr:`~Huey.immediate` property. 80 | * Event emitter has been replaced by :ref:`signals`, so all event-related APIs 81 | have been removed. 82 | * Special classes of exceptions for the various storage operations have been 83 | removed. For more information see :ref:`exceptions`. 84 | * The ``Huey.errors()`` method is gone. Errors are no longer tracked 85 | separately. 86 | 87 | Changes to the :py:meth:`~Huey.task` and :py:meth:`~Huey.periodic_task` 88 | decorators: 89 | 90 | * Previously these decorators accepted two optional keyword arguments, 91 | ``retries_as_argument`` and ``include_task``. Since the remaining retries are 92 | stored as an attribute on the task itself, the first is redundant. In 2.0 93 | these are replaced by a new keyword argument ``context``, which, if ``True``, 94 | will pass the task instance to the decorated function as a keyword argument. 95 | * Enqueueing a task pipeline will now return a :py:class:`ResultGroup` instead 96 | of a list of individual :py:class:`Result` instances. 97 | 98 | Changes to the :py:class:`Result` handle (previous called 99 | ``TaskResultWrapper``): 100 | 101 | * The ``task_id`` property is renamed to ``id``. 102 | * Task instances that are revoked via :py:meth:`Result.revoke` will default to 103 | using ``revoke_once=True``. 104 | * The :py:meth:`~Result.reschedule` method no longer requires a delay or eta. 105 | Leaving both empty will reschedule the task immediately. 106 | 107 | Changes to :py:func:`crontab`: 108 | 109 | * The order of arguments has been changed to match the order used on linux 110 | crontab. The order is now minute, hour, day, month, day of week. 111 | 112 | Miscellaneous: 113 | 114 | * Huey no longer uses a global registry for task functions. Task functions are 115 | only visible to the huey instance they are decorated by. 116 | * ``RedisHuey`` defaults to using a blocking pop on the queue, which should 117 | improve latency and reduce chatter. To go back to the old polling default, 118 | specify ``blocking=False`` when creating your huey instance. 119 | * ``SqliteHuey`` no longer has any third-party dependencies and has been moved 120 | into the main ``huey`` module. 121 | * The :py:class:`MiniHuey` contrib module has been renamed to 122 | ``huey.contrib.mini``. 123 | * The ``SimpleStorage`` contrib module has been removed. 124 | 125 | Django-specific: 126 | 127 | * The ``backend_class`` setting has been renamed to ``huey_class`` (used to 128 | specify import-path to Huey implementation, e.g. ``huey.RedisHuey``). 129 | -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # huey documentation build configuration file, created by 4 | # sphinx-quickstart on Wed Nov 16 12:48:28 2011. 5 | # 6 | # This file is execfile()d with the current directory set to its containing dir. 7 | # 8 | # Note that not all possible configuration values are present in this 9 | # autogenerated file. 10 | # 11 | # All configuration values have a default; values that are commented out 12 | # serve to show the default. 13 | 14 | import sys, os 15 | 16 | # If extensions (or modules to document with autodoc) are in another directory, 17 | # add these directories to sys.path here. If the directory is relative to the 18 | # documentation root, use os.path.abspath to make it absolute, like shown here. 19 | #sys.path.insert(0, os.path.abspath('.')) 20 | 21 | # -- General configuration ----------------------------------------------------- 22 | 23 | # If your documentation needs a minimal Sphinx version, state it here. 24 | #needs_sphinx = '1.0' 25 | 26 | # Add any Sphinx extension module names here, as strings. They can be extensions 27 | # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. 28 | extensions = [] 29 | 30 | # Add any paths that contain templates here, relative to this directory. 31 | templates_path = ['_templates'] 32 | 33 | # The suffix of source filenames. 34 | source_suffix = '.rst' 35 | 36 | # The encoding of source files. 37 | #source_encoding = 'utf-8-sig' 38 | 39 | # The master toctree document. 40 | master_doc = 'index' 41 | 42 | # General information about the project. 43 | project = u'huey' 44 | copyright = u'2013, charles leifer' 45 | 46 | # The version info for the project you're documenting, acts as replacement for 47 | # |version| and |release|, also used in various other places throughout the 48 | # built documents. 49 | # 50 | # The short X.Y version. 51 | src_dir = os.path.realpath(os.path.dirname(os.path.dirname(__file__))) 52 | sys.path.insert(0, src_dir) 53 | from huey import __version__ 54 | version = __version__ 55 | # The full version, including alpha/beta/rc tags. 56 | release = __version__ 57 | 58 | # The language for content autogenerated by Sphinx. Refer to documentation 59 | # for a list of supported languages. 60 | #language = None 61 | 62 | # There are two options for replacing |today|: either, you set today to some 63 | # non-false value, then it is used: 64 | #today = '' 65 | # Else, today_fmt is used as the format for a strftime call. 66 | #today_fmt = '%B %d, %Y' 67 | 68 | # List of patterns, relative to source directory, that match files and 69 | # directories to ignore when looking for source files. 70 | exclude_patterns = ['_build'] 71 | 72 | # The reST default role (used for this markup: `text`) to use for all documents. 73 | #default_role = None 74 | 75 | # If true, '()' will be appended to :func: etc. cross-reference text. 76 | #add_function_parentheses = True 77 | 78 | # If true, the current module name will be prepended to all description 79 | # unit titles (such as .. function::). 80 | #add_module_names = True 81 | 82 | # If true, sectionauthor and moduleauthor directives will be shown in the 83 | # output. They are ignored by default. 84 | #show_authors = False 85 | 86 | # The name of the Pygments (syntax highlighting) style to use. 87 | pygments_style = 'sphinx' 88 | 89 | # A list of ignored prefixes for module index sorting. 90 | #modindex_common_prefix = [] 91 | 92 | 93 | # -- Options for HTML output --------------------------------------------------- 94 | 95 | # The theme to use for HTML and HTML Help pages. See the documentation for 96 | # a list of builtin themes. 97 | #html_theme = 'flask' 98 | html_theme = 'default' 99 | 100 | # Theme options are theme-specific and customize the look and feel of a theme 101 | # further. For a list of options available for each theme, see the 102 | # documentation. 103 | #html_theme_options = { 104 | # 'index_logo': 'logo.jpg', 105 | #} 106 | # 107 | ## Add any paths that contain custom themes here, relative to this directory. 108 | #html_theme_path = ['_themes'] 109 | 110 | # The name for this set of Sphinx documents. If None, it defaults to 111 | # " v documentation". 112 | #html_title = None 113 | 114 | # A shorter title for the navigation bar. Default is the same as html_title. 115 | #html_short_title = None 116 | 117 | # The name of an image file (relative to this directory) to place at the top 118 | # of the sidebar. 119 | #html_logo = None 120 | 121 | # The name of an image file (within the static path) to use as favicon of the 122 | # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 123 | # pixels large. 124 | #html_favicon = None 125 | 126 | # Add any paths that contain custom static files (such as style sheets) here, 127 | # relative to this directory. They are copied after the builtin static files, 128 | # so a file named "default.css" will overwrite the builtin "default.css". 129 | html_static_path = ['_static'] 130 | 131 | # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, 132 | # using the given strftime format. 133 | #html_last_updated_fmt = '%b %d, %Y' 134 | 135 | # If true, SmartyPants will be used to convert quotes and dashes to 136 | # typographically correct entities. 137 | #html_use_smartypants = True 138 | 139 | # Custom sidebar templates, maps document names to template names. 140 | #html_sidebars = {} 141 | 142 | # Additional templates that should be rendered to pages, maps page names to 143 | # template names. 144 | #html_additional_pages = {} 145 | 146 | # If false, no module index is generated. 147 | #html_domain_indices = True 148 | 149 | # If false, no index is generated. 150 | #html_use_index = True 151 | 152 | # If true, the index is split into individual pages for each letter. 153 | #html_split_index = False 154 | 155 | # If true, links to the reST sources are added to the pages. 156 | #html_show_sourcelink = True 157 | 158 | # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. 159 | #html_show_sphinx = True 160 | 161 | # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. 162 | #html_show_copyright = True 163 | 164 | # If true, an OpenSearch description file will be output, and all pages will 165 | # contain a tag referring to it. The value of this option must be the 166 | # base URL from which the finished HTML is served. 167 | #html_use_opensearch = '' 168 | 169 | # This is the file name suffix for HTML files (e.g. ".xhtml"). 170 | #html_file_suffix = None 171 | 172 | # Output file base name for HTML help builder. 173 | htmlhelp_basename = 'hueydoc' 174 | 175 | 176 | # -- Options for LaTeX output -------------------------------------------------- 177 | 178 | latex_elements = { 179 | # The paper size ('letterpaper' or 'a4paper'). 180 | #'papersize': 'letterpaper', 181 | 182 | # The font size ('10pt', '11pt' or '12pt'). 183 | #'pointsize': '10pt', 184 | 185 | # Additional stuff for the LaTeX preamble. 186 | #'preamble': '', 187 | } 188 | 189 | # Grouping the document tree into LaTeX files. List of tuples 190 | # (source start file, target name, title, author, documentclass [howto/manual]). 191 | latex_documents = [ 192 | ('index', 'huey.tex', u'huey Documentation', 193 | u'charles leifer', 'manual'), 194 | ] 195 | 196 | # The name of an image file (relative to this directory) to place at the top of 197 | # the title page. 198 | #latex_logo = None 199 | 200 | # For "manual" documents, if this is true, then toplevel headings are parts, 201 | # not chapters. 202 | #latex_use_parts = False 203 | 204 | # If true, show page references after internal links. 205 | #latex_show_pagerefs = False 206 | 207 | # If true, show URL addresses after external links. 208 | #latex_show_urls = False 209 | 210 | # Documents to append as an appendix to all manuals. 211 | #latex_appendices = [] 212 | 213 | # If false, no module index is generated. 214 | #latex_domain_indices = True 215 | 216 | 217 | # -- Options for manual page output -------------------------------------------- 218 | 219 | # One entry per manual page. List of tuples 220 | # (source start file, name, description, authors, manual section). 221 | man_pages = [ 222 | ('index', 'huey', u'huey Documentation', 223 | [u'charles leifer'], 1) 224 | ] 225 | 226 | # If true, show URL addresses after external links. 227 | #man_show_urls = False 228 | 229 | 230 | # -- Options for Texinfo output ------------------------------------------------ 231 | 232 | # Grouping the document tree into Texinfo files. List of tuples 233 | # (source start file, target name, title, author, 234 | # dir menu entry, description, category) 235 | texinfo_documents = [ 236 | ('index', 'huey', u'huey Documentation', 237 | u'charles leifer', 'huey', 'One line description of project.', 238 | 'Miscellaneous'), 239 | ] 240 | 241 | # Documents to append as an appendix to all manuals. 242 | #texinfo_appendices = [] 243 | 244 | # If false, no module index is generated. 245 | #texinfo_domain_indices = True 246 | 247 | # How to display URL addresses: 'footnote', 'no', or 'inline'. 248 | #texinfo_show_urls = 'footnote' 249 | -------------------------------------------------------------------------------- /docs/contrib.rst: -------------------------------------------------------------------------------- 1 | .. _contrib: 2 | 3 | Huey Extensions 4 | =============== 5 | 6 | The ``huey.contrib`` package contains modules that provide extra functionality 7 | beyond the core APIs. 8 | 9 | .. include:: mini.rst 10 | 11 | .. include:: django.rst 12 | 13 | .. include:: asyncio.rst 14 | -------------------------------------------------------------------------------- /docs/huey.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/coleifer/huey/302fb8ed8c4894ee7b419f91a22ec53b236fe50c/docs/huey.jpg -------------------------------------------------------------------------------- /docs/imports.rst: -------------------------------------------------------------------------------- 1 | .. _imports: 2 | 3 | Understanding how tasks are imported 4 | ==================================== 5 | 6 | Behind-the-scenes when you decorate a function with :py:meth:`~Huey.task` or 7 | :py:meth:`~Huey.periodic_task`, the function registers itself with an in-memory 8 | registry. When a task function is called, a reference is put into the queue, 9 | along with the arguments the function was called with, etc. The message is then 10 | read by the consumer, and the task function is looked-up in the consumer's 11 | registry. Because of the way this works, it is strongly recommended 12 | that **all decorated functions be imported when the consumer starts up**. 13 | 14 | .. note:: 15 | If a task is not recognized, the consumer will raise a 16 | :py:class:`HueyException`. 17 | 18 | The consumer is executed with a single required parameter -- the import path to 19 | a :py:class:`Huey` object. It will import the Huey instance along with 20 | anything else in the module -- thus you must be sure **imports of your tasks 21 | occur with the import of the Huey object**. 22 | 23 | Suggested organization of code 24 | ------------------------------ 25 | 26 | Generally, I structure things like this, which makes it very easy to avoid 27 | circular imports. 28 | 29 | * ``config.py``, the module containing the :py:class:`Huey` object. 30 | 31 | .. code-block:: python 32 | 33 | # config.py 34 | from huey import RedisHuey 35 | 36 | huey = RedisHuey('testing') 37 | 38 | * ``tasks.py``, the module containing any decorated functions. Imports the 39 | ``huey`` object from the ``config.py`` module: 40 | 41 | .. code-block:: python 42 | 43 | # tasks.py 44 | from config import huey 45 | 46 | @huey.task() 47 | def add(a, b): 48 | return a + b 49 | 50 | * ``main.py`` / ``app.py``, the "main" module. Imports both the ``config.py`` 51 | module **and** the ``tasks.py`` module. 52 | 53 | .. code-block:: python 54 | 55 | # main.py 56 | from config import huey # import the "huey" object. 57 | from tasks import add # import any tasks / decorated functions 58 | 59 | 60 | if __name__ == '__main__': 61 | result = add(1, 2) 62 | print('1 + 2 = %s' % result.get(blocking=True)) 63 | 64 | To run the consumer, point it at ``main.huey``, in this way, both the ``huey`` 65 | instance **and** the task functions are imported in a centralized location. 66 | 67 | .. code-block:: console 68 | 69 | $ huey_consumer.py main.huey 70 | -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | .. huey documentation master file, created by 2 | sphinx-quickstart on Wed Nov 16 12:48:28 2011. 3 | You can adapt this file completely to your liking, but it should at least 4 | contain the root `toctree` directive. 5 | 6 | huey 7 | ==== 8 | 9 | .. image:: http://media.charlesleifer.com/blog/photos/huey2-logo.png 10 | 11 | *a lightweight alternative*. 12 | 13 | huey is: 14 | 15 | * a task queue 16 | * written in python 17 | * clean and simple API 18 | * redis, sqlite, file-system, or in-memory storage 19 | * `example code `_. 20 | 21 | huey supports: 22 | 23 | * multi-process, multi-thread or greenlet task execution models 24 | * schedule tasks to execute at a given time, or after a given delay 25 | * schedule recurring tasks, like a crontab 26 | * automatically retry tasks that fail 27 | * task prioritization 28 | * task result storage 29 | * task expiration 30 | * task locking 31 | * task pipelines and chains 32 | 33 | .. image:: http://i.imgur.com/2EpRs.jpg 34 | 35 | At a glance 36 | ----------- 37 | 38 | :py:meth:`~Huey.task` and :py:meth:`~Huey.periodic_task` decorators turn 39 | functions into tasks executed by the consumer: 40 | 41 | .. code-block:: python 42 | 43 | from huey import RedisHuey, crontab 44 | 45 | huey = RedisHuey('my-app', host='redis.myapp.com') 46 | 47 | @huey.task() 48 | def add_numbers(a, b): 49 | return a + b 50 | 51 | @huey.task(retries=2, retry_delay=60) 52 | def flaky_task(url): 53 | # This task might fail, in which case it will be retried up to 2 times 54 | # with a delay of 60s between retries. 55 | return this_might_fail(url) 56 | 57 | @huey.periodic_task(crontab(minute='0', hour='3')) 58 | def nightly_backup(): 59 | sync_all_data() 60 | 61 | Calling a ``task``-decorated function will enqueue the function call for 62 | execution by the consumer. A special result handle is returned immediately, 63 | which can be used to fetch the result once the task is finished: 64 | 65 | .. code-block:: pycon 66 | 67 | >>> from demo import add_numbers 68 | >>> res = add_numbers(1, 2) 69 | >>> res 70 | 71 | 72 | >>> res() 73 | 3 74 | 75 | Tasks can be scheduled to run in the future: 76 | 77 | .. code-block:: pycon 78 | 79 | >>> res = add_numbers.schedule((2, 3), delay=10) # Will be run in ~10s. 80 | >>> res(blocking=True) # Will block until task finishes, in ~10s. 81 | 5 82 | 83 | For much more, check out the :ref:`guide` or take a look at the `example code `_. 84 | 85 | Running the consumer 86 | ^^^^^^^^^^^^^^^^^^^^ 87 | 88 | Run the consumer with four worker processes: 89 | 90 | .. code-block:: console 91 | 92 | $ huey_consumer.py my_app.huey -k process -w 4 93 | 94 | To run the consumer with a single worker thread (default): 95 | 96 | .. code-block:: console 97 | 98 | $ huey_consumer.py my_app.huey 99 | 100 | If your work-loads are mostly IO-bound, you can run the consumer with threads 101 | or greenlets instead. Because greenlets are so lightweight, you can run quite a 102 | few of them efficiently: 103 | 104 | .. code-block:: console 105 | 106 | $ huey_consumer.py my_app.huey -k greenlet -w 32 107 | 108 | For more information, see the :ref:`consuming-tasks` document. 109 | 110 | Table of contents 111 | ----------------- 112 | 113 | .. toctree:: 114 | :maxdepth: 2 115 | 116 | installation 117 | guide 118 | consumer 119 | imports 120 | shared_resources 121 | signals 122 | api 123 | contrib 124 | troubleshooting 125 | changes 126 | 127 | Huey is named in honor of my cat 128 | 129 | .. image:: http://m.charlesleifer.com/t/800x-/blog/photos/p1473037658.76.jpg?key=mD9_qMaKBAuGPi95KzXYqg 130 | 131 | 132 | Indices and tables 133 | ================== 134 | 135 | * :ref:`genindex` 136 | * :ref:`modindex` 137 | * :ref:`search` 138 | 139 | -------------------------------------------------------------------------------- /docs/installation.rst: -------------------------------------------------------------------------------- 1 | .. _installation: 2 | 3 | Installing 4 | ========== 5 | 6 | huey can be installed from PyPI using `pip `_. 7 | 8 | .. code-block:: bash 9 | 10 | $ pip install huey 11 | 12 | huey has no dependencies outside the standard library, but `redis-py `_ 13 | is required to utilize Redis for your task storage: 14 | 15 | .. code-block:: bash 16 | 17 | $ pip install redis 18 | 19 | If your tasks are IO-bound rather than CPU-bound, you might consider using the 20 | ``greenlet`` worker type. To use the greenlet workers, you need to 21 | install ``gevent``: 22 | 23 | .. code-block:: bash 24 | 25 | pip install gevent 26 | 27 | Using git 28 | --------- 29 | 30 | If you want to run the very latest, you can clone the `source 31 | repo `_ and install the library: 32 | 33 | .. code-block:: bash 34 | 35 | $ git clone https://github.com/coleifer/huey.git 36 | $ cd huey 37 | $ python setup.py install 38 | 39 | You can run the tests using the test-runner: 40 | 41 | .. code-block:: bash 42 | 43 | $ python setup.py test 44 | 45 | The source code is available online at https://github.com/coleifer/huey 46 | -------------------------------------------------------------------------------- /docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | REM Command file for Sphinx documentation 4 | 5 | if "%SPHINXBUILD%" == "" ( 6 | set SPHINXBUILD=sphinx-build 7 | ) 8 | set BUILDDIR=_build 9 | set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% . 10 | set I18NSPHINXOPTS=%SPHINXOPTS% . 11 | if NOT "%PAPER%" == "" ( 12 | set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% 13 | set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS% 14 | ) 15 | 16 | if "%1" == "" goto help 17 | 18 | if "%1" == "help" ( 19 | :help 20 | echo.Please use `make ^` where ^ is one of 21 | echo. html to make standalone HTML files 22 | echo. dirhtml to make HTML files named index.html in directories 23 | echo. singlehtml to make a single large HTML file 24 | echo. pickle to make pickle files 25 | echo. json to make JSON files 26 | echo. htmlhelp to make HTML files and a HTML help project 27 | echo. qthelp to make HTML files and a qthelp project 28 | echo. devhelp to make HTML files and a Devhelp project 29 | echo. epub to make an epub 30 | echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter 31 | echo. text to make text files 32 | echo. man to make manual pages 33 | echo. texinfo to make Texinfo files 34 | echo. gettext to make PO message catalogs 35 | echo. changes to make an overview over all changed/added/deprecated items 36 | echo. linkcheck to check all external links for integrity 37 | echo. doctest to run all doctests embedded in the documentation if enabled 38 | goto end 39 | ) 40 | 41 | if "%1" == "clean" ( 42 | for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i 43 | del /q /s %BUILDDIR%\* 44 | goto end 45 | ) 46 | 47 | if "%1" == "html" ( 48 | %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html 49 | if errorlevel 1 exit /b 1 50 | echo. 51 | echo.Build finished. The HTML pages are in %BUILDDIR%/html. 52 | goto end 53 | ) 54 | 55 | if "%1" == "dirhtml" ( 56 | %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml 57 | if errorlevel 1 exit /b 1 58 | echo. 59 | echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. 60 | goto end 61 | ) 62 | 63 | if "%1" == "singlehtml" ( 64 | %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml 65 | if errorlevel 1 exit /b 1 66 | echo. 67 | echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. 68 | goto end 69 | ) 70 | 71 | if "%1" == "pickle" ( 72 | %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle 73 | if errorlevel 1 exit /b 1 74 | echo. 75 | echo.Build finished; now you can process the pickle files. 76 | goto end 77 | ) 78 | 79 | if "%1" == "json" ( 80 | %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json 81 | if errorlevel 1 exit /b 1 82 | echo. 83 | echo.Build finished; now you can process the JSON files. 84 | goto end 85 | ) 86 | 87 | if "%1" == "htmlhelp" ( 88 | %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp 89 | if errorlevel 1 exit /b 1 90 | echo. 91 | echo.Build finished; now you can run HTML Help Workshop with the ^ 92 | .hhp project file in %BUILDDIR%/htmlhelp. 93 | goto end 94 | ) 95 | 96 | if "%1" == "qthelp" ( 97 | %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp 98 | if errorlevel 1 exit /b 1 99 | echo. 100 | echo.Build finished; now you can run "qcollectiongenerator" with the ^ 101 | .qhcp project file in %BUILDDIR%/qthelp, like this: 102 | echo.^> qcollectiongenerator %BUILDDIR%\qthelp\huey.qhcp 103 | echo.To view the help file: 104 | echo.^> assistant -collectionFile %BUILDDIR%\qthelp\huey.ghc 105 | goto end 106 | ) 107 | 108 | if "%1" == "devhelp" ( 109 | %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp 110 | if errorlevel 1 exit /b 1 111 | echo. 112 | echo.Build finished. 113 | goto end 114 | ) 115 | 116 | if "%1" == "epub" ( 117 | %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub 118 | if errorlevel 1 exit /b 1 119 | echo. 120 | echo.Build finished. The epub file is in %BUILDDIR%/epub. 121 | goto end 122 | ) 123 | 124 | if "%1" == "latex" ( 125 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 126 | if errorlevel 1 exit /b 1 127 | echo. 128 | echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. 129 | goto end 130 | ) 131 | 132 | if "%1" == "text" ( 133 | %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text 134 | if errorlevel 1 exit /b 1 135 | echo. 136 | echo.Build finished. The text files are in %BUILDDIR%/text. 137 | goto end 138 | ) 139 | 140 | if "%1" == "man" ( 141 | %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man 142 | if errorlevel 1 exit /b 1 143 | echo. 144 | echo.Build finished. The manual pages are in %BUILDDIR%/man. 145 | goto end 146 | ) 147 | 148 | if "%1" == "texinfo" ( 149 | %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo 150 | if errorlevel 1 exit /b 1 151 | echo. 152 | echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo. 153 | goto end 154 | ) 155 | 156 | if "%1" == "gettext" ( 157 | %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale 158 | if errorlevel 1 exit /b 1 159 | echo. 160 | echo.Build finished. The message catalogs are in %BUILDDIR%/locale. 161 | goto end 162 | ) 163 | 164 | if "%1" == "changes" ( 165 | %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes 166 | if errorlevel 1 exit /b 1 167 | echo. 168 | echo.The overview file is in %BUILDDIR%/changes. 169 | goto end 170 | ) 171 | 172 | if "%1" == "linkcheck" ( 173 | %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck 174 | if errorlevel 1 exit /b 1 175 | echo. 176 | echo.Link check complete; look for any errors in the above output ^ 177 | or in %BUILDDIR%/linkcheck/output.txt. 178 | goto end 179 | ) 180 | 181 | if "%1" == "doctest" ( 182 | %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest 183 | if errorlevel 1 exit /b 1 184 | echo. 185 | echo.Testing of doctests in the sources finished, look at the ^ 186 | results in %BUILDDIR%/doctest/output.txt. 187 | goto end 188 | ) 189 | 190 | :end 191 | -------------------------------------------------------------------------------- /docs/mini.rst: -------------------------------------------------------------------------------- 1 | .. _mini: 2 | 3 | Mini-Huey 4 | --------- 5 | 6 | :py:class:`MiniHuey` provides a very lightweight huey-like API that may be 7 | useful for certain applications. The ``MiniHuey`` consumer runs inside a 8 | greenlet in your main application process. This means there is no separate 9 | consumer process to manage, nor is there any persistence for the 10 | enqueued/scheduled tasks; whenever a task is enqueued or is scheduled to run, a 11 | new greenlet is spawned to execute the task. 12 | 13 | *MiniHuey* may be useful if: 14 | 15 | * Your application is a WSGI application. 16 | * Your tasks do stuff like check for spam, send email, make requests to 17 | web-based APIs, query a database server. 18 | * You do not need automatic retries, persistence for your message queue, 19 | dynamic task revocation. 20 | * You wish to keep things nice and simple and don't want the overhead of 21 | additional process(es) to manage. 22 | 23 | *MiniHuey* may be a bad choice if: 24 | 25 | * Your application is incompatible with gevent (e.g. uses asyncio). 26 | * Your tasks do stuff like process large files, crunch numbers, parse large XML 27 | or JSON documents, or other CPU or disk-intensive work. 28 | * You need a persistent store for messages and results, so the consumer can be 29 | restarted without losing any unprocessed messages. 30 | 31 | If you are not sure, then you should probably not use *MiniHuey*. Use the 32 | regular :py:class:`Huey` instead. 33 | 34 | Usage and task declaration: 35 | 36 | .. py:class:: MiniHuey([name='huey'[, interval=1[, pool_size=None]]]) 37 | 38 | :param str name: Name given to this huey instance. 39 | :param int interval: How frequently to check for scheduled tasks (seconds). 40 | :param int pool_size: Limit number of concurrent tasks to given size. 41 | 42 | .. py:method:: task([validate_func=None]) 43 | 44 | Task decorator similar to :py:meth:`Huey.task` or :py:meth:`Huey.periodic_task`. 45 | For tasks that should be scheduled automatically at regular intervals, 46 | simply provide a suitable :py:func:`crontab` definition. 47 | 48 | The decorated task will gain a ``schedule()`` method which can be used 49 | like the :py:meth:`TaskWrapper.schedule` method. 50 | 51 | Examples task declarations: 52 | 53 | .. code-block:: python 54 | 55 | from huey import crontab 56 | from huey.contrib.mini import MiniHuey 57 | 58 | huey = MiniHuey() 59 | 60 | @huey.task() 61 | def fetch_url(url): 62 | return urlopen(url).read() 63 | 64 | @huey.task(crontab(minute='0', hour='4')) 65 | def run_backup(): 66 | pass 67 | 68 | Example usage. Running tasks and getting results work about the same as 69 | regular Huey: 70 | 71 | .. code-block:: python 72 | 73 | # Executes the task asynchronously in a new greenlet. 74 | result = fetch_url('https://google.com/') 75 | 76 | # Wait for the task to finish. 77 | html = result.get() 78 | 79 | Scheduling a task for execution: 80 | 81 | .. code-block:: python 82 | 83 | # Fetch in ~30s. 84 | result = fetch_url.schedule(('https://google.com',), delay=30) 85 | 86 | # Wait until result is ready, takes ~30s. 87 | html = result.get() 88 | 89 | .. py:method:: start() 90 | 91 | Start the scheduler in a new green thread. The scheduler is needed if 92 | you plan to schedule tasks for execution using the ``schedule()`` 93 | method, or if you want to run periodic tasks. 94 | 95 | Typically this method should be called when your application starts up. 96 | For example, a WSGI application might do something like: 97 | 98 | .. code-block:: python 99 | 100 | # Always apply gevent monkey-patch before anything else! 101 | from gevent import monkey; monkey.patch_all() 102 | 103 | from my_app import app # flask/bottle/whatever WSGI app. 104 | from my_app import mini_huey 105 | 106 | # Start the scheduler. Returns immediately. 107 | mini_huey.start() 108 | 109 | # Run the WSGI server. 110 | from gevent.pywsgi import WSGIServer 111 | WSGIServer(('127.0.0.1', 8000), app).serve_forever() 112 | 113 | .. py:method:: stop() 114 | 115 | Stop the scheduler. 116 | 117 | .. note:: 118 | There is not a separate decorator for *periodic*, or *crontab*, tasks. Just 119 | use :py:meth:`MiniHuey.task` and pass in a validation function. A 120 | validation function can be generated using the :py:func:`crontab` function. 121 | 122 | .. note:: 123 | Tasks enqueued for immediate execution will be run regardless of whether 124 | the scheduler is running. You only need to start the scheduler if you plan 125 | to schedule tasks in the future or run periodic tasks. 126 | -------------------------------------------------------------------------------- /docs/shared_resources.rst: -------------------------------------------------------------------------------- 1 | .. _shared_resources: 2 | 3 | Managing shared resources 4 | ========================= 5 | 6 | Tasks may need to make use of shared resources from the application, such as a 7 | database connection or an API client. 8 | 9 | The simplest approach is to manage the resource explicitly. For example, Peewee 10 | database connections can be used as a context manager, so if we need to run 11 | some queries inside a task, we might write: 12 | 13 | .. code-block:: python 14 | 15 | database = peewee.PostgresqlDatabase('my_app') 16 | huey = RedisHuey() 17 | 18 | @huey.task() 19 | def check_comment_spam(comment_id): 20 | # Open DB connection at start of task, close upon exit. 21 | with database: 22 | comment = Comment.get(Comment.id == comment_id) 23 | 24 | if akismet.is_spam(comment.body): 25 | comment.is_spam = True 26 | comment.save() 27 | 28 | Another option would be to write a decorator that acquires the shared resource 29 | before calling the task function, and then closes it after the task has 30 | finished. To make this a little simpler, Huey provides a special helper 31 | :py:meth:`Huey.context_task` decorator that accepts an object implementing the 32 | context-manager API, and automatically wraps the task within the given context: 33 | 34 | .. code-block:: python 35 | 36 | # Same as previous example, except we can omit the "with db" block. 37 | @huey.context_task(database) 38 | def check_comment_spam(comment_id): 39 | comment = Comment.get(Comment.id == comment_id) 40 | 41 | if akismet.is_spam(comment.body): 42 | comment.is_spam = True 43 | comment.save() 44 | 45 | Startup hooks 46 | ------------- 47 | 48 | The :py:meth:`Huey.on_startup` decorator is used to register a callback that is 49 | executed once when each worker starts running. This hook provides a convenient 50 | way to initialize shared resources or perform other initializations which 51 | should happen within the context of the worker thread or process. 52 | 53 | As an example, suppose many of our tasks will be executing queries against a 54 | Postgres database. Rather than opening and closing a connection for every task, 55 | we will instead open a connection when each worker starts. This connection may 56 | then be used by any tasks that are executed by that consumer: 57 | 58 | .. code-block:: python 59 | 60 | import peewee 61 | 62 | db = PostgresqlDatabase('my_app') 63 | 64 | @huey.on_startup() 65 | def open_db_connection(): 66 | # If for some reason the db connection appears to already be open, 67 | # close it first. 68 | if not db.is_closed(): 69 | db.close() 70 | db.connect() 71 | 72 | @huey.task() 73 | def run_query(n): 74 | db.execute_sql('select pg_sleep(%s)', (n,)) 75 | return n 76 | 77 | .. note:: 78 | The above code works correctly because `peewee `_ 79 | stores connection state in a threadlocal. This is important if we are 80 | running the workers in threads (huey's default). Every thread will be 81 | sharing the same ``PostgresqlDatabase`` instance, but since the connection 82 | state is thread-local, each worker thread will see only its own connection. 83 | 84 | Pre and post execute hooks 85 | -------------------------- 86 | 87 | In addition to the :py:meth:`~Huey.on_startup` hook, Huey also provides 88 | decorators for registering pre- and post-execute hooks: 89 | 90 | * :py:meth:`Huey.pre_execute` - called right before a task is executed. The 91 | handler function should accept one argument: the task that will be executed. 92 | Pre-execute hooks have an additional feature: they can raise a special 93 | :py:class:`CancelExecution` exception to instruct the consumer that the task 94 | should not be run. 95 | * :py:meth:`Huey.post_execute` - called after task has finished. The handler 96 | function should accept three arguments: the task that was executed, the 97 | return value, and the exception (if one occurred, otherwise is ``None``). 98 | 99 | Example: 100 | 101 | .. code-block:: python 102 | 103 | from huey import CancelExecution 104 | 105 | @huey.pre_execute() 106 | def pre_execute_hook(task): 107 | # Pre-execute hooks are passed the task that is about to be run. 108 | 109 | # This pre-execute task will cancel the execution of every task if the 110 | # current day is Sunday. 111 | if datetime.datetime.now().weekday() == 6: 112 | raise CancelExecution('No tasks on sunday!') 113 | 114 | @huey.post_execute() 115 | def post_execute_hook(task, task_value, exc): 116 | # Post-execute hooks are passed the task, the return value (if the task 117 | # succeeded), and the exception (if one occurred). 118 | if exc is not None: 119 | print('Task "%s" failed with error: %s!' % (task.id, exc)) 120 | 121 | .. note:: 122 | Printing the error message is redundant, as the huey logger already logs 123 | any unhandled exceptions raised by a task, along with a traceback. These 124 | are just examples. 125 | -------------------------------------------------------------------------------- /docs/signals.rst: -------------------------------------------------------------------------------- 1 | .. _signals: 2 | 3 | Signals 4 | ======= 5 | 6 | The consumer will send various signals as it processes tasks. Callbacks can be 7 | registered as signal handlers, and will be called synchronously by the consumer 8 | process. 9 | 10 | The following signals are implemented by Huey: 11 | 12 | * ``SIGNAL_CANCELED``: task was canceled due to a pre-execute hook raising 13 | a :py:class:`CancelExecution` exception. 14 | * ``SIGNAL_COMPLETE``: task has been executed successfully. 15 | * ``SIGNAL_ENQUEUED``: task has been enqueued (**see note**). 16 | * ``SIGNAL_ERROR``: task failed due to an unhandled exception. 17 | * ``SIGNAL_EXECUTING``: task is about to be executed. 18 | * ``SIGNAL_EXPIRED``: task expired. 19 | * ``SIGNAL_LOCKED``: failed to acquire lock, aborting task. 20 | * ``SIGNAL_RETRYING``: task failed, but will be retried. 21 | * ``SIGNAL_REVOKED``: task is revoked and will not be executed. 22 | * ``SIGNAL_SCHEDULED``: task is not yet ready to run and has been added to the 23 | schedule for future execution. 24 | * ``SIGNAL_INTERRUPTED``: task is interrupted when consumer exits. 25 | 26 | When a signal handler is called, it will be called with the following 27 | arguments: 28 | 29 | * ``signal``: the signal name, e.g. ``'executing'``. 30 | * ``task``: the :py:class:`Task` instance. 31 | 32 | The following signals will include additional arguments: 33 | 34 | * ``SIGNAL_ERROR``: includes a third argument ``exc``, which is the 35 | ``Exception`` that was raised while executing the task. 36 | 37 | .. note:: 38 | Signals are run within the context of the consumer **except** that the 39 | ``SIGNAL_ENQUEUED`` signal will also run within the context of your 40 | application code (since your application code will typically enqueue 41 | tasks). Recall that signal handlers are run sequentially and synchronously, 42 | so be careful about introducing overhead in them -- particularly when they 43 | may be run by the application process. 44 | 45 | To register a signal handler, use the :py:meth:`Huey.signal` method: 46 | 47 | .. code-block:: python 48 | 49 | @huey.signal() 50 | def all_signal_handler(signal, task, exc=None): 51 | # This handler will be called for every signal. 52 | print('%s - %s' % (signal, task.id)) 53 | 54 | @huey.signal(SIGNAL_ERROR, SIGNAL_LOCKED, SIGNAL_CANCELED, SIGNAL_REVOKED) 55 | def task_not_executed_handler(signal, task, exc=None): 56 | # This handler will be called for the 4 signals listed, which 57 | # correspond to error conditions. 58 | print('[%s] %s - not executed' % (signal, task.id)) 59 | 60 | @huey.signal(SIGNAL_COMPLETE) 61 | def task_success(signal, task): 62 | # This handle will be called for each task that completes successfully. 63 | pass 64 | 65 | Signal handlers can be unregistered using :py:meth:`Huey.disconnect_signal`. 66 | 67 | .. code-block:: python 68 | 69 | # Disconnect the "task_success" signal handler. 70 | huey.disconnect_signal(task_success) 71 | 72 | # Disconnect the "task_not_executed_handler", but just from 73 | # handling SIGNAL_LOCKED. 74 | huey.disconnect_signal(task_not_executed_handler, SIGNAL_LOCKED) 75 | 76 | Examples 77 | ^^^^^^^^ 78 | 79 | We'll use the following tasks to illustrate how signals may be sent: 80 | 81 | .. code-block:: python 82 | 83 | @huey.task() 84 | def add(a, b): 85 | return a + b 86 | 87 | @huey.task(retries=2, retry_delay=10) 88 | def flaky_task(): 89 | if random.randint(0, 1) == 0: 90 | raise ValueError('uh-oh') 91 | return 'OK' 92 | 93 | Here is a simple example of a task execution we would expect to succeed: 94 | 95 | .. code-block:: pycon 96 | 97 | >>> result = add(1, 2) 98 | >>> result.get(blocking=True) 99 | 100 | The following signals would be fired: 101 | 102 | * ``SIGNAL_ENQUEUED`` - the task has been enqueued (happens in the application 103 | process). 104 | * ``SIGNAL_EXECUTING`` - the task has been dequeued and will be executed. 105 | * ``SIGNAL_COMPLETE`` - the task has finished successfully. 106 | 107 | Here is an example of scheduling a task for execution after a short delay: 108 | 109 | .. code-block:: pycon 110 | 111 | >>> result = add.schedule((2, 3), delay=10) 112 | >>> result(True) # same as result.get(blocking=True) 113 | 114 | The following signals would be sent: 115 | 116 | * ``SIGNAL_ENQUEUED`` - the task has been enqueued (happens in the **application** 117 | process). 118 | * ``SIGNAL_SCHEDULED`` - the task is not yet ready to run, so it has been added 119 | to the schedule. 120 | * After 10 seconds, the consumer will re-enqueue the task as it is now ready to 121 | run, sending the ``SIGNAL_ENQUEUED`` (in the **consumer** process!). 122 | * Then the consumer will run the task and send the ``SIGNAL_EXECUTING`` signal. 123 | * ``SIGNAL_COMPLETE``. 124 | 125 | Here is an example that may fail, in which case it will be retried 126 | automatically with a delay of 10 seconds. 127 | 128 | .. code-block:: pycon 129 | 130 | >>> result = flaky_task() 131 | >>> try: 132 | ... result.get(blocking=True) 133 | ... except TaskException: 134 | ... result.reset() 135 | ... result.get(blocking=True) # Try again if first time fails. 136 | ... 137 | 138 | Assuming the task failed the first time and succeeded the second time, we would 139 | see the following signals being sent: 140 | 141 | * ``SIGNAL_ENQUEUED`` - task has been enqueued. 142 | * ``SIGNAL_EXECUTING`` - the task is being executed. 143 | * ``SIGNAL_ERROR`` - the task raised an unhandled exception. 144 | * ``SIGNAL_RETRYING`` - the task will be retried. 145 | * ``SIGNAL_SCHEDULED`` - the task has been added to the schedule for execution 146 | in ~10 seconds. 147 | * ``SIGNAL_ENQUEUED`` - 10s have elapsed and the task is ready to run and has 148 | been re-enqueued. 149 | * ``SIGNAL_EXECUTING`` - second try running task. 150 | * ``SIGNAL_COMPLETE`` - task succeeded. 151 | 152 | What happens if we revoke the ``add()`` task and then attempt to execute it: 153 | 154 | .. code-block:: pycon 155 | 156 | >>> add.revoke() 157 | >>> res = add(1, 2) 158 | 159 | The following signal will be sent: 160 | 161 | * ``SIGNAL_ENQUEUED`` - the task has been enqueued for execution. 162 | * ``SIGNAL_REVOKED`` - this is sent before the task enters the "executing" 163 | state. When a task is revoked, no other signals will be sent. 164 | 165 | Using SIGNAL_INTERRUPTED 166 | ^^^^^^^^^^^^^^^^^^^^^^^^ 167 | 168 | The correct way to shut-down the Huey consumer is to send a ``SIGINT`` signal 169 | to the worker process (e.g. Ctrl+C) - this initiates a graceful shutdown. 170 | Sometimes, however, you may need to shutdown the consumer using ``SIGTERM`` - 171 | this immediately stops the consumer. Any tasks that are currently being 172 | executed are then "lost" and will not be retried by default (see also: 173 | :ref:`consumer-shutdown`). 174 | 175 | To avoid losing these tasks, you can use a ``SIGNAL_INTERRUPTED`` handler to 176 | re-enqueue them: 177 | 178 | .. code-block:: python 179 | 180 | @huey.signal(SIGNAL_INTERRUPTED) 181 | def on_interrupted(signal, task, *args, **kwargs): 182 | # The consumer was shutdown before `task` finished executing. 183 | # Re-enqueue it. 184 | huey.enqueue(task) 185 | 186 | Performance considerations 187 | -------------------------- 188 | 189 | Signal handlers are executed **synchronously** by the consumer as it processes 190 | tasks (with the exception of ``SIGNAL_ENQUEUED``, which also runs in your 191 | application process). It is important to use care when implementing signal 192 | handlers, as one slow signal handler can impact the overall responsiveness of 193 | the consumer. 194 | 195 | For example, if you implement a signal handler that posts some data to REST 196 | API, everything might work fine until the REST API goes down or stops being 197 | responsive -- which will cause the signal handler to block, which then prevents 198 | the consumer from moving on to the next task. 199 | 200 | Another consideration is the :ref:`management of shared resources ` 201 | that may be used by signal handlers, such as database connections or open file 202 | handles. Signal handlers are called by the consumer workers, which (depending 203 | on how you are running the consumer) may be separate processes, threads or 204 | greenlets. As a result, care should be taken to ensure proper initialization 205 | and cleanup of any resources you plan to use in signal handlers. 206 | 207 | Lastly, take care when implementing ``SIGNAL_ENQUEUED`` handlers, as these may 208 | run in your application-code (e.g. whenever your application enqueues a task), 209 | **or** by the consumer process (e.g. when re-enqueueing a task for retry, or 210 | when enqueueing periodic tasks, when moving a task from the schedule to the 211 | queue, etc). 212 | -------------------------------------------------------------------------------- /docs/troubleshooting.rst: -------------------------------------------------------------------------------- 1 | .. _troubleshooting: 2 | 3 | Troubleshooting and Common Pitfalls 4 | =================================== 5 | 6 | This document outlines some of the common pitfalls you may encounter when 7 | getting set up with huey. It is arranged in a problem/solution format. 8 | 9 | Tasks not running 10 | First step is to increase logging verbosity by running the consumer with 11 | ``--verbose``. You can also specify a logfile using the ``--logfile`` 12 | option. 13 | 14 | Check for any exceptions. The most common cause of tasks not running is 15 | that they are not being loaded, in which case you will 16 | see :py:class:`HueyException` "XXX not found in TaskRegistry" errors. 17 | 18 | "HueyException: XXX not found in TaskRegistry" in log file 19 | Exception occurs when a task is called by a task producer, but is not 20 | imported by the consumer. To fix this, ensure that by loading the 21 | :py:class:`Huey` object, you also import any decorated functions as well. 22 | 23 | For more information on how tasks are imported, see the :ref:`import documentation `. 24 | 25 | "Error importing XXX" when starting consumer 26 | This error message occurs when the module containing the configuration 27 | specified cannot be loaded (not on the pythonpath, mistyped, etc). One 28 | quick way to check is to open up a python shell and try to import the 29 | configuration. 30 | 31 | Example syntax: ``huey_consumer.py main_module.huey`` 32 | 33 | Tasks not returning results 34 | Ensure that you have not accidentally specified ``results=False`` when 35 | instantiating your :py:class:`Huey` object. 36 | 37 | Additionally note that, by default, Huey does not store ``None`` in the 38 | result-store. So if your task returns ``None``, Huey will discard the 39 | result. If you need to block or detect whether a task has finished, it is 40 | recommended that you return a non-``None`` value or in extreme 41 | circumstances you can initialize Huey with ``store_none=True`` (though this 42 | can quickly fill up your result store and is only recommended for users who 43 | are very familiar with Huey). 44 | 45 | Scheduled tasks are not being run at the correct time 46 | Check the time on the server the consumer is running on - if different from 47 | the producer this may cause problems. Huey uses UTC internally by default, 48 | and naive datetimes will be converted from local time to UTC (if local time 49 | happens to not be UTC). 50 | 51 | Cronjobs are not being run 52 | The consumer and scheduler run in UTC by default. 53 | 54 | Greenlet workers seem stuck 55 | If you wish to use the Greenlet worker type, you need to be sure to 56 | monkeypatch in your application's entrypoint. At the top of your ``main`` 57 | module, you can add the following code: ``from gevent import monkey; monkey.patch_all()``. 58 | Furthermore, if your tasks are CPU-bound, ``gevent`` can appear to lock up 59 | because it only supports cooperative multi-tasking (as opposed to 60 | pre-emptive multi-tasking when using threads). For Django, it is necessary 61 | to apply the patch inside the ``manage.py`` script. See the Django docs 62 | section for the code. 63 | 64 | Testing projects using Huey 65 | Use ``immediate=True``: 66 | 67 | .. code-block:: python 68 | 69 | test_mode = os.environ.get('TEST_MODE') 70 | 71 | # When immediate=True, Huey will default to using an in-memory 72 | # storage layer. 73 | huey = RedisHuey(immediate=test_mode) 74 | 75 | # Alternatively, you can set the `immediate` attribute: 76 | huey.immediate = True if test_mode else False 77 | -------------------------------------------------------------------------------- /examples/django_ex/README: -------------------------------------------------------------------------------- 1 | In one terminal, run: 2 | 3 | ./manage.py run_huey 4 | 5 | In another terminal: 6 | 7 | ./manage.py shell 8 | 9 | Commands to try out: 10 | 11 | from djangoex.test_app.tasks import * 12 | res = add(1, 2) 13 | print(res.get(blocking=True)) # Wait for result, then print. 14 | -------------------------------------------------------------------------------- /examples/django_ex/djangoex/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/coleifer/huey/302fb8ed8c4894ee7b419f91a22ec53b236fe50c/examples/django_ex/djangoex/__init__.py -------------------------------------------------------------------------------- /examples/django_ex/djangoex/settings.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | INSTALLED_APPS = [ 4 | 'huey.contrib.djhuey', 5 | 'djangoex.test_app', 6 | ] 7 | 8 | HUEY = { 9 | 'name': 'test-django', 10 | 'consumer': { 11 | 'blocking': True, # Use blocking list pop instead of polling Redis. 12 | 'loglevel': logging.DEBUG, 13 | 'workers': 4, 14 | 'scheduler_interval': 1, 15 | 'simple_log': True, 16 | }, 17 | } 18 | 19 | DATABASES = {'default': { 20 | 'NAME': ':memory:', 21 | 'ENGINE': 'django.db.backends.sqlite3'}} 22 | 23 | SECRET_KEY = 'foo' 24 | -------------------------------------------------------------------------------- /examples/django_ex/djangoex/test_app/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/coleifer/huey/302fb8ed8c4894ee7b419f91a22ec53b236fe50c/examples/django_ex/djangoex/test_app/__init__.py -------------------------------------------------------------------------------- /examples/django_ex/djangoex/test_app/models.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/coleifer/huey/302fb8ed8c4894ee7b419f91a22ec53b236fe50c/examples/django_ex/djangoex/test_app/models.py -------------------------------------------------------------------------------- /examples/django_ex/djangoex/test_app/tasks.py: -------------------------------------------------------------------------------- 1 | import time 2 | 3 | from huey import crontab 4 | from huey.contrib.djhuey import task, periodic_task, db_task, on_commit_task 5 | 6 | 7 | def tprint(s, c=32): 8 | # Helper to print messages from within tasks using color, to make them 9 | # stand out in examples. 10 | print('\x1b[1;%sm%s\x1b[0m' % (c, s)) 11 | 12 | 13 | # Tasks used in examples. 14 | 15 | @task() 16 | def add(a, b): 17 | return a + b 18 | 19 | 20 | @task() 21 | def mul(a, b): 22 | return a * b 23 | 24 | 25 | @db_task() # Opens DB connection for duration of task. 26 | def slow(n): 27 | tprint('going to sleep for %s seconds' % n) 28 | time.sleep(n) 29 | tprint('finished sleeping for %s seconds' % n) 30 | return n 31 | 32 | 33 | @task(retries=1, retry_delay=5, context=True) 34 | def flaky_task(task=None): 35 | if task is not None and task.retries == 0: 36 | tprint('flaky task succeeded on retry.') 37 | return 'succeeded on retry.' 38 | tprint('flaky task is about to raise an exception.', 31) 39 | raise Exception('flaky task failed!') 40 | 41 | 42 | # Periodic tasks. 43 | 44 | @periodic_task(crontab(minute='*/2')) 45 | def every_other_minute(): 46 | tprint('This task runs every 2 minutes.', 35) 47 | 48 | 49 | @periodic_task(crontab(minute='*/5')) 50 | def every_five_mins(): 51 | tprint('This task runs every 5 minutes.', 34) 52 | 53 | 54 | # When this task is called, it will not be enqueued until the active 55 | # transaction commits. If no transaction is active it will enqueue immediately. 56 | # Example: 57 | # with transaction.atomic(): 58 | # rh = after_commit('hello!') 59 | # time.sleep(5) # Still not enqueued.... 60 | # 61 | # # Now the task is enqueued. 62 | # print(rh.get(True)) # prints "6". 63 | @on_commit_task() 64 | def after_commit(msg): 65 | tprint(msg, 33) 66 | return len(msg) 67 | -------------------------------------------------------------------------------- /examples/django_ex/djangoex/urls.py: -------------------------------------------------------------------------------- 1 | from django.conf.urls import patterns 2 | 3 | urlpatterns = patterns('', 4 | ) 5 | -------------------------------------------------------------------------------- /examples/django_ex/manage.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import os 3 | import sys 4 | 5 | if __name__ == "__main__": 6 | os.environ.setdefault("DJANGO_SETTINGS_MODULE", "djangoex.settings") 7 | 8 | from django.core.management import execute_from_command_line 9 | 10 | execute_from_command_line(sys.argv) 11 | -------------------------------------------------------------------------------- /examples/flask_ex/README.md: -------------------------------------------------------------------------------- 1 | ## Flask example 2 | 3 | Minimal example of using Huey with Flask. Displays a form that accepts user 4 | input and then enqueues a task with the form value when the form is submitted. 5 | 6 | To try out the example: 7 | 8 | * Run ``./run_webapp.sh`` then browse to http://localhost:5000/ 9 | * In second terminal, ``./run_huey.sh`` to run the consumer. 10 | 11 | **Important**: note that the tasks and views are imported in the `main.py`, 12 | which serves as the application entry-point. This is because any functions 13 | decorated with `@huey.task()` need to be imported to be registered with the 14 | huey instance. Similarly, we need to import the views so that our view function 15 | is registered with the Flask application. 16 | -------------------------------------------------------------------------------- /examples/flask_ex/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/coleifer/huey/302fb8ed8c4894ee7b419f91a22ec53b236fe50c/examples/flask_ex/__init__.py -------------------------------------------------------------------------------- /examples/flask_ex/app.py: -------------------------------------------------------------------------------- 1 | from flask import Flask 2 | from huey import RedisHuey 3 | 4 | 5 | DEBUG = True 6 | SECRET_KEY = 'shhh, secret' 7 | 8 | app = Flask(__name__) 9 | app.config.from_object(__name__) 10 | 11 | huey = RedisHuey() 12 | -------------------------------------------------------------------------------- /examples/flask_ex/main.py: -------------------------------------------------------------------------------- 1 | from app import app 2 | from app import huey 3 | import tasks # Import tasks so they are registered with Huey instance. 4 | import views # Import views so they are registered with Flask app. 5 | 6 | 7 | if __name__ == '__main__': 8 | app.run() 9 | -------------------------------------------------------------------------------- /examples/flask_ex/run_huey.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | # Ensure that the huey package is on the python-path, in the event it hasn't 4 | # been installed using pip. 5 | export PYTHONPATH="../../:$PYTHONPATH" 6 | 7 | # Run the consumer with 2 worker threads. 8 | python ../../huey/bin/huey_consumer.py main.huey -w2 9 | -------------------------------------------------------------------------------- /examples/flask_ex/run_webapp.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | python main.py 4 | -------------------------------------------------------------------------------- /examples/flask_ex/tasks.py: -------------------------------------------------------------------------------- 1 | from huey import crontab 2 | 3 | from app import huey 4 | 5 | 6 | @huey.task() 7 | def example_task(n): 8 | # Example task -- prints the following line to the stdout of the 9 | # consumer process and returns the argument that was passed in (n). 10 | print('-- RUNNING EXAMPLE TASK: CALLED WITH n=%s --' % n) 11 | return n 12 | 13 | 14 | @huey.periodic_task(crontab(minute='*/5')) 15 | def print_every5_minutes(): 16 | # Example periodic task -- this runs every 5 minutes and prints the 17 | # following line to the stdout of the consumer process. 18 | print('-- PERIODIC TASK -- THIS RUNS EVERY 5 MINUTES --') 19 | -------------------------------------------------------------------------------- /examples/flask_ex/templates/home.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | Flask example 6 | 7 | 8 |

Flask example

9 | {% if message %} 10 |

{{ message }}

11 | {% endif %} 12 |

13 | Submitting the form will cause an example task to be enqueued and executed 14 | by the consumer. 15 |

16 |
17 | 18 | 19 | 20 |
21 |
22 |

Links

23 | 27 | 28 | 29 | -------------------------------------------------------------------------------- /examples/flask_ex/views.py: -------------------------------------------------------------------------------- 1 | from flask import render_template 2 | from flask import request 3 | 4 | from app import app 5 | from tasks import example_task 6 | 7 | 8 | @app.route('/', methods=['GET', 'POST']) 9 | def home(): 10 | if request.method == 'POST' and request.form.get('n'): 11 | n = request.form['n'] 12 | 13 | # Enqueue our task, the consumer will pick it up and run it. 14 | example_task(n) 15 | message = 'Enqueued example_task(%s) - see consumer output' % n 16 | else: 17 | message = None 18 | 19 | return render_template('home.html', message=message) 20 | -------------------------------------------------------------------------------- /examples/mini/mini.py: -------------------------------------------------------------------------------- 1 | from gevent import monkey; monkey.patch_all() 2 | import gevent 3 | 4 | from huey.contrib.mini import MiniHuey 5 | 6 | 7 | huey = MiniHuey() 8 | 9 | # If we want to support scheduling tasks for execution in the future, or for 10 | # periodic execution (e.g. cron), then we need to call `huey.start()` which 11 | # starts a scheduler thread. 12 | huey.start() 13 | 14 | 15 | @huey.task() 16 | def add(a, b): 17 | return a + b 18 | 19 | res = add(1, 2) 20 | print(res()) # Result is calculated in separate greenlet. 21 | 22 | print('Scheduling task for execution in 2 seconds.') 23 | res = add.schedule(args=(10, 20), delay=2) 24 | print(res()) 25 | 26 | # Stop the scheduler. Not strictly necessary, but a good idea. 27 | huey.stop() 28 | -------------------------------------------------------------------------------- /examples/simple/README: -------------------------------------------------------------------------------- 1 | In one terminal, run: 2 | 3 | ./cons.sh 4 | 5 | In another terminal: 6 | 7 | python main.py 8 | 9 | To try out the various worker classes, you can run: 10 | 11 | * ./cons.sh thread 12 | * ./cons.sh greenlet 13 | * ./cons.sh process 14 | -------------------------------------------------------------------------------- /examples/simple/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/coleifer/huey/302fb8ed8c4894ee7b419f91a22ec53b236fe50c/examples/simple/__init__.py -------------------------------------------------------------------------------- /examples/simple/amain.py: -------------------------------------------------------------------------------- 1 | """ 2 | Example script showing how you can use asyncio to read results. 3 | """ 4 | import asyncio 5 | import time 6 | 7 | from huey.contrib.asyncio import aget_result 8 | from huey.contrib.asyncio import aget_result_group 9 | 10 | from tasks import * 11 | 12 | 13 | async def main(): 14 | s = time.time() 15 | r1, r2, r3 = [slow(2) for _ in range(3)] 16 | results = await asyncio.gather( 17 | aget_result(r1), 18 | aget_result(r2), 19 | aget_result(r3)) 20 | print(results) 21 | print(round(time.time() - s, 2)) 22 | 23 | # Using result group. 24 | s = time.time() 25 | results = await aget_result_group(slow.map([2, 2, 2])) 26 | print(results) 27 | print(round(time.time() - s, 2)) 28 | 29 | 30 | if __name__ == '__main__': 31 | asyncio.run(main()) 32 | -------------------------------------------------------------------------------- /examples/simple/config.py: -------------------------------------------------------------------------------- 1 | from huey import RedisHuey 2 | 3 | huey = RedisHuey('simple.test', blocking=True) 4 | -------------------------------------------------------------------------------- /examples/simple/cons.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | echo "HUEY CONSUMER" 3 | echo "-------------" 4 | echo "In another terminal, run 'python main.py'" 5 | echo "Stop the consumer using Ctrl+C" 6 | PYTHONPATH=".:$PYTHONPATH" 7 | export PYTHONPATH 8 | WORKER_CLASS=${1:-thread} 9 | export WORKER_CLASS 10 | python ../../huey/bin/huey_consumer.py main.huey --workers=4 -k $WORKER_CLASS -S 11 | -------------------------------------------------------------------------------- /examples/simple/main.py: -------------------------------------------------------------------------------- 1 | import os 2 | if os.environ.get('WORKER_CLASS') in ('greenlet', 'gevent'): 3 | print('Monkey-patching for gevent.') 4 | from gevent import monkey; monkey.patch_all() 5 | import sys 6 | 7 | from config import huey 8 | from tasks import add 9 | 10 | 11 | if __name__ == '__main__': 12 | if sys.version_info[0] == 2: 13 | input = raw_input 14 | 15 | print('Huey Demo -- adds two numbers.') 16 | a = int(input('a = ')) 17 | b = int(input('b = ')) 18 | result = add(a, b) 19 | print('Result:') 20 | print(result.get(True)) 21 | -------------------------------------------------------------------------------- /examples/simple/tasks.py: -------------------------------------------------------------------------------- 1 | import os 2 | import threading 3 | import time 4 | from huey import crontab 5 | from huey.signals import * 6 | 7 | from config import huey 8 | 9 | 10 | def tprint(s, c=32): 11 | # Helper to print messages from within tasks using color, to make them 12 | # stand out in examples. 13 | print('\x1b[1;%sm%s\x1b[0m' % (c, s)) 14 | 15 | 16 | # Tasks used in examples. 17 | 18 | @huey.task() 19 | def add(a, b): 20 | return a + b 21 | 22 | @huey.task() 23 | def mul(a, b): 24 | return a * b 25 | 26 | 27 | @huey.task() 28 | def slow(n): 29 | tprint('going to sleep for %s seconds' % n) 30 | time.sleep(n) 31 | tprint('finished sleeping for %s seconds' % n) 32 | return n 33 | 34 | 35 | # Example task that will fail on its first invocation, but succeed when 36 | # retried. Also shows how to use the `context` parameter, which passes the task 37 | # instance into the decorated function. 38 | 39 | @huey.task(retries=1, retry_delay=5, context=True) 40 | def flaky_task(task=None): 41 | if task is not None and task.retries == 0: 42 | tprint('flaky task succeeded on retry.') 43 | return 'succeeded on retry.' 44 | tprint('flaky task is about to raise an exception.', 31) 45 | raise Exception('flaky task failed!') 46 | 47 | 48 | # Pipeline example. 49 | 50 | @huey.task() 51 | def add_pipeline(a, b, *nums): 52 | # Example task that spawns a pipeline of sub-tasks. 53 | # In an interactive shell, you would call this like: 54 | # results = add_pipeline(1, 2, 3, 4, 5, 6, 7, 8, 9, 10) 55 | # print(results.get(blocking=True)) 56 | # [3, 6, 10, 15, 21, 28, 36, 45, 55] 57 | task = add.s(a, b) 58 | for num in nums: 59 | task = task.then(add, num) 60 | result_group = huey.enqueue(task) 61 | tprint('enqueued pipeline of add() tasks.') 62 | return result_group.get(blocking=True) 63 | 64 | 65 | # Periodic tasks. 66 | 67 | @huey.periodic_task(crontab(minute='*/2')) 68 | def every_other_minute(): 69 | tprint('This task runs every 2 minutes.', 35) 70 | 71 | 72 | @huey.periodic_task(crontab(minute='*/5')) 73 | def every_five_mins(): 74 | tprint('This task runs every 5 minutes.', 34) 75 | 76 | 77 | # Example of using hooks. 78 | 79 | @huey.on_startup() 80 | def startup_hook(): 81 | pid = os.getpid() 82 | tid = threading.get_ident() 83 | tprint('process %s, thread %s - startup hook' % (pid, tid)) 84 | 85 | 86 | @huey.on_shutdown() 87 | def shutdown_hook(): 88 | pid = os.getpid() 89 | tid = threading.get_ident() 90 | tprint('process %s, thread %s - shutdown hook' % (pid, tid)) 91 | 92 | 93 | # Example of using a signal. 94 | 95 | @huey.signal(SIGNAL_COMPLETE) 96 | def on_complete(signal, task, exc=None): 97 | tprint('received signal [%s] for task [%s]' % (signal, task)) 98 | 99 | @huey.signal(SIGNAL_INTERRUPTED) 100 | def on_interrupted(signal, task, exc=None): 101 | tprint('received interrupted task signal for task: %s' % task) 102 | 103 | 104 | # Example of retrying a task if it is *currently* running. 105 | 106 | from huey.constants import EmptyData 107 | from huey.exceptions import RetryTask 108 | @huey.task(context=True) 109 | def hold_on(a, task=None): 110 | if task is not None and huey.storage.peek_data('hold_on') is not EmptyData: 111 | print('appears to be running...will retry in 60s') 112 | raise RetryTask(delay=60) 113 | 114 | huey.storage.put_data('hold_on', '1') 115 | try: 116 | print('in task, sleeping for %s' % a) 117 | time.sleep(a) 118 | finally: 119 | huey.storage.pop_data('hold_on') 120 | return True 121 | 122 | # Example of limiting the time a task can run for (10s). 123 | 124 | @huey.task() 125 | def limit_time(n): 126 | s = time.time() 127 | evt = threading.Event() 128 | def run_computation(): 129 | for i in range(n): 130 | # Here we would do some kind of computation, checking our event 131 | # along the way. 132 | print('.', end='', flush=True) 133 | if evt.wait(1): 134 | print('CANCELED') 135 | return 136 | 137 | evt.set() 138 | 139 | t = threading.Thread(target=run_computation) 140 | t.start() 141 | 142 | # Attempt to wait for the thread to finish for a total of 10s. 143 | for i in range(10): 144 | t.join(1) 145 | 146 | if not evt.is_set(): 147 | # The thread still hasn't finished -- flag it that it must stop now. 148 | evt.set() 149 | t.join() 150 | 151 | print('limit_time() completed in %0.2f' % (time.time() - s)) 152 | 153 | # Task that blocks CPU, used for testing. 154 | 155 | @huey.task() 156 | def slow_cpu(): 157 | for i in range(1000000000): 158 | j = i % 13331 159 | -------------------------------------------------------------------------------- /huey/__init__.py: -------------------------------------------------------------------------------- 1 | __author__ = 'Charles Leifer' 2 | __license__ = 'MIT' 3 | __version__ = '2.5.3' 4 | 5 | from huey.api import BlackHoleHuey 6 | from huey.api import Huey 7 | from huey.api import FileHuey 8 | from huey.api import MemoryHuey 9 | from huey.api import PriorityRedisExpireHuey 10 | from huey.api import PriorityRedisHuey 11 | from huey.api import RedisExpireHuey 12 | from huey.api import RedisHuey 13 | from huey.api import SqliteHuey 14 | from huey.api import crontab 15 | from huey.exceptions import CancelExecution 16 | from huey.exceptions import RetryTask 17 | -------------------------------------------------------------------------------- /huey/bin/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/coleifer/huey/302fb8ed8c4894ee7b419f91a22ec53b236fe50c/huey/bin/__init__.py -------------------------------------------------------------------------------- /huey/bin/huey_consumer.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import logging 4 | import os 5 | import sys 6 | 7 | from huey.constants import WORKER_PROCESS 8 | from huey.consumer import Consumer 9 | from huey.consumer_options import ConsumerConfig 10 | from huey.consumer_options import OptionParserHandler 11 | from huey.utils import load_class 12 | 13 | 14 | def err(s): 15 | sys.stderr.write('\033[91m%s\033[0m\n' % s) 16 | 17 | 18 | def load_huey(path): 19 | try: 20 | return load_class(path) 21 | except: 22 | cur_dir = os.getcwd() 23 | if cur_dir not in sys.path: 24 | sys.path.insert(0, cur_dir) 25 | return load_huey(path) 26 | err('Error importing %s' % path) 27 | raise 28 | 29 | 30 | def consumer_main(): 31 | parser_handler = OptionParserHandler() 32 | parser = parser_handler.get_option_parser() 33 | options, args = parser.parse_args() 34 | 35 | if len(args) == 0: 36 | err('Error: missing import path to `Huey` instance') 37 | err('Example: huey_consumer.py app.queue.huey_instance') 38 | sys.exit(1) 39 | 40 | options = {k: v for k, v in options.__dict__.items() 41 | if v is not None} 42 | config = ConsumerConfig(**options) 43 | config.validate() 44 | 45 | if sys.platform == 'win32' and config.worker_type == WORKER_PROCESS: 46 | err('Error: huey cannot be run in "process"-mode on Windows.') 47 | sys.exit(1) 48 | 49 | huey_instance = load_huey(args[0]) 50 | 51 | # Set up logging for the "huey" namespace. 52 | logger = logging.getLogger('huey') 53 | config.setup_logger(logger) 54 | 55 | consumer = huey_instance.create_consumer(**config.values) 56 | consumer.run() 57 | 58 | 59 | if __name__ == '__main__': 60 | if sys.version_info >= (3, 8) and sys.platform == 'darwin': 61 | import multiprocessing 62 | try: 63 | multiprocessing.set_start_method('fork') 64 | except RuntimeError: 65 | pass 66 | consumer_main() 67 | -------------------------------------------------------------------------------- /huey/constants.py: -------------------------------------------------------------------------------- 1 | WORKER_THREAD = 'thread' 2 | WORKER_GREENLET = 'greenlet' 3 | WORKER_PROCESS = 'process' 4 | WORKER_TYPES = (WORKER_THREAD, WORKER_GREENLET, WORKER_PROCESS) 5 | 6 | 7 | class EmptyData(object): 8 | pass 9 | -------------------------------------------------------------------------------- /huey/consumer_options.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import optparse 3 | from collections import namedtuple 4 | from logging import FileHandler 5 | 6 | from huey.constants import WORKER_THREAD 7 | from huey.constants import WORKER_TYPES 8 | 9 | 10 | config_defaults = ( 11 | ('workers', 1), 12 | ('worker_type', WORKER_THREAD), 13 | ('initial_delay', 0.1), 14 | ('backoff', 1.15), 15 | ('max_delay', 10.0), 16 | ('check_worker_health', True), 17 | ('health_check_interval', 10), 18 | ('scheduler_interval', 1), 19 | ('periodic', True), 20 | ('logfile', None), 21 | ('verbose', None), 22 | ('simple_log', None), 23 | ('flush_locks', False), 24 | ('extra_locks', None), 25 | ) 26 | config_keys = [param for param, _ in config_defaults] 27 | 28 | 29 | def option(name, **options): 30 | if isinstance(name, tuple): 31 | letter, opt_name = name 32 | else: 33 | opt_name = name.replace('_', '-') 34 | letter = name[0] 35 | options.setdefault('dest', name) 36 | return ('-' + letter, '--' + opt_name, options) 37 | 38 | 39 | class OptionParserHandler(object): 40 | def get_worker_options(self): 41 | return ( 42 | # -w, -k, -d, -m, -b, -c, -C, -f 43 | option('workers', type='int', 44 | help='number of worker threads/processes (default=1)'), 45 | option(('k', 'worker-type'), choices=WORKER_TYPES, 46 | dest='worker_type', 47 | help=('worker execution model (thread, greenlet, ' 48 | 'process). Use process for CPU-intensive workloads, ' 49 | 'and greenlet for IO-heavy workloads. When in doubt, ' 50 | 'thread is the safest choice.')), 51 | option('delay', dest='initial_delay', 52 | help='minimum time to wait when polling queue (default=.1)', 53 | metavar='SECONDS', type='float'), 54 | option('max_delay', metavar='SECONDS', 55 | help='maximum time to wait when polling queue (default=10)', 56 | type='float'), 57 | option('backoff', metavar='SECONDS', 58 | help=('factor used to back-off polling interval when queue ' 59 | 'is empty (default=1.15, must be >= 1)'), 60 | type='float'), 61 | option(('c', 'health-check-interval'), type='float', 62 | dest='health_check_interval', metavar='SECONDS', 63 | help=('minimum time to wait between worker health checks ' 64 | '(default=1.0)')), 65 | option(('C', 'disable-health-check'), action='store_false', 66 | dest='check_worker_health', 67 | help=('disable health check that monitors worker health, ' 68 | 'restarting any worker that crashes unexpectedly.')), 69 | option('flush_locks', action='store_true', dest='flush_locks', 70 | help=('flush all locks when starting consumer.')), 71 | option(('L', 'extra-locks'), dest='extra_locks', 72 | help=('additional locks to flush, separated by comma.')), 73 | ) 74 | 75 | def get_scheduler_options(self): 76 | return ( 77 | # -s, -n 78 | option('scheduler_interval', type='int', 79 | help='Granularity of scheduler in seconds.'), 80 | option('no_periodic', action='store_false', 81 | dest='periodic', help='do NOT enqueue periodic tasks'), 82 | ) 83 | 84 | def get_logging_options(self): 85 | return ( 86 | # -l, -v, -q, -S 87 | option('logfile', metavar='FILE'), 88 | option('verbose', action='store_true', 89 | help='verbose logging (includes DEBUG statements)'), 90 | option('quiet', action='store_false', dest='verbose', 91 | help='minimal logging'), 92 | option(('S', 'simple'), action='store_true', dest='simple_log', 93 | help='simple logging format (time message)'), 94 | ) 95 | 96 | def get_option_parser(self): 97 | parser = optparse.OptionParser('Usage: %prog [options] ' 98 | 'path.to.huey_instance') 99 | 100 | def add_group(name, description, options): 101 | group = parser.add_option_group(name, description) 102 | for abbrev, name, kwargs in options: 103 | group.add_option(abbrev, name, **kwargs) 104 | 105 | add_group('Logging', 'The following options pertain to logging.', 106 | self.get_logging_options()) 107 | 108 | add_group('Workers', ( 109 | 'By default huey uses a single worker thread. To specify a ' 110 | 'different number of workers, or a different execution model (such' 111 | ' as multiple processes or greenlets), use the options below.'), 112 | self.get_worker_options()) 113 | 114 | add_group('Scheduler', ( 115 | 'By default Huey will run the scheduler once every second to check' 116 | ' for tasks scheduled in the future, or tasks set to run at ' 117 | 'specfic intervals (periodic tasks). Use the options below to ' 118 | 'configure the scheduler or to disable periodic task scheduling.'), 119 | self.get_scheduler_options()) 120 | 121 | return parser 122 | 123 | 124 | class ConsumerConfig(namedtuple('_ConsumerConfig', config_keys)): 125 | def __new__(cls, **kwargs): 126 | config = dict(config_defaults) 127 | config.update(kwargs) 128 | args = [config[key] for key in config_keys] 129 | return super(ConsumerConfig, cls).__new__(cls, *args) 130 | 131 | def validate(self): 132 | if self.backoff < 1: 133 | raise ValueError('The backoff must be greater than 1.') 134 | if not (0 < self.scheduler_interval <= 60): 135 | raise ValueError('The scheduler must run at least once per ' 136 | 'minute, and at most once per second (1-60).') 137 | if 60 % self.scheduler_interval != 0: 138 | raise ValueError('The scheduler interval must be a factor of 60: ' 139 | '1, 2, 3, 4, 5, 6, 10, 12, 15, 20, 30, or 60') 140 | 141 | @property 142 | def loglevel(self): 143 | if self.verbose is None: 144 | return logging.INFO 145 | return logging.DEBUG if self.verbose else logging.WARNING 146 | 147 | def setup_logger(self, logger=None): 148 | if self.worker_type == 'process': 149 | worker = '%(process)d' 150 | else: 151 | worker = '%(threadName)s' 152 | 153 | if self.simple_log: 154 | datefmt = '%H:%M:%S' 155 | logformat = '%(asctime)s %(message)s' 156 | else: 157 | datefmt = None # Use default 158 | logformat = ('[%(asctime)s] %(levelname)s:%(name)s:' + worker + 159 | ':%(message)s') 160 | if logger is None: 161 | logger = logging.getLogger() 162 | 163 | if self.logfile: 164 | handler = logging.FileHandler(self.logfile) 165 | else: 166 | handler = logging.StreamHandler() 167 | 168 | handler.setFormatter(logging.Formatter(logformat, datefmt)) 169 | logger.addHandler(handler) 170 | logger.setLevel(self.loglevel) 171 | 172 | @property 173 | def values(self): 174 | return dict((key, getattr(self, key)) for key in config_keys 175 | if key not in ('logfile', 'verbose', 'simple_log')) 176 | -------------------------------------------------------------------------------- /huey/contrib/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/coleifer/huey/302fb8ed8c4894ee7b419f91a22ec53b236fe50c/huey/contrib/__init__.py -------------------------------------------------------------------------------- /huey/contrib/asyncio.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | from huey.constants import EmptyData 4 | 5 | 6 | async def aget_result(res, backoff=1.15, max_delay=1.0, preserve=False): 7 | """ 8 | Await a task result. 9 | 10 | Example usage: 11 | 12 | @huey.task() 13 | def sleep(n): 14 | time.sleep(n) 15 | return n 16 | 17 | # Call the task and get the normal result-handle. 18 | rh = sleep(2) 19 | 20 | # Asynchronously await the result of the task. 21 | result = await aget_result(rh) 22 | 23 | More advanced example of waiting for multiple results concurrently: 24 | 25 | r1 = sleep(1) 26 | r2 = sleep(2) 27 | r3 = sleep(3) 28 | 29 | # Asynchronously await the results of all 3 tasks. Will take 30 | # ~3 seconds. 31 | results = await asyncio.gather( 32 | aget_result(r1), 33 | aget_result(r2), 34 | aget_result(r3)) 35 | 36 | NOTE: the Redis operation will be a normal blocking socket read, but in 37 | practice these will be super fast. The slow part is the necessity to wait 38 | between polling intervals (since the Redis command to read the result does 39 | not block). 40 | """ 41 | delay = 0.1 42 | while res._result is EmptyData: 43 | delay = min(delay, max_delay) 44 | if res._get(preserve) is EmptyData: 45 | await asyncio.sleep(delay) 46 | delay *= backoff 47 | return res._result 48 | 49 | 50 | async def aget_result_group(rg, *args, **kwargs): 51 | """ 52 | Await the results of a ResultGroup. 53 | 54 | Example usage: 55 | 56 | @huey.task() 57 | def sleep(n): 58 | time.sleep(n) 59 | return n 60 | 61 | rg = sleep.map([2, 2, 2]) 62 | 63 | # This should take ~2 seconds. 64 | results = await aget_result_group(rg) 65 | """ 66 | return await asyncio.gather(*[ 67 | aget_result(r, *args, **kwargs) 68 | for r in rg]) 69 | -------------------------------------------------------------------------------- /huey/contrib/djhuey/__init__.py: -------------------------------------------------------------------------------- 1 | from functools import wraps 2 | from importlib import import_module 3 | import sys 4 | import traceback 5 | 6 | from django.conf import settings 7 | from django.db import close_old_connections 8 | from django.db import transaction 9 | 10 | 11 | configuration_message = """ 12 | Configuring Huey for use with Django 13 | ==================================== 14 | 15 | Huey was designed to be simple to configure in the general case. For that 16 | reason, huey will "just work" with no configuration at all provided you have 17 | Redis installed and running locally. 18 | 19 | On the other hand, you can configure huey manually using the following 20 | setting structure. 21 | 22 | The following example uses Redis on localhost, and will run four worker 23 | processes: 24 | 25 | HUEY = { 26 | 'name': 'my-app', 27 | 'connection': {'host': 'localhost', 'port': 6379}, 28 | 'consumer': { 29 | 'workers': 4, 30 | 'worker_type': 'process', # "thread" or "greenlet" are other options 31 | }, 32 | } 33 | 34 | If you would like to configure Huey's logger using Django's integrated logging 35 | settings, the logger used by consumer is named "huey". 36 | 37 | Alternatively you can simply assign `settings.HUEY` to an actual `Huey` 38 | object instance: 39 | 40 | from huey import RedisHuey 41 | HUEY = RedisHuey('my-app') 42 | """ 43 | 44 | 45 | default_backend_path = 'huey.RedisHuey' 46 | 47 | def default_queue_name(): 48 | try: 49 | return settings.DATABASE_NAME 50 | except AttributeError: 51 | try: 52 | return str(settings.DATABASES['default']['NAME']) 53 | except KeyError: 54 | return 'huey' 55 | 56 | 57 | def get_backend(import_path=default_backend_path): 58 | module_path, class_name = import_path.rsplit('.', 1) 59 | module = import_module(module_path) 60 | return getattr(module, class_name) 61 | 62 | 63 | def config_error(msg): 64 | print(configuration_message) 65 | print('\n\n') 66 | print(msg) 67 | sys.exit(1) 68 | 69 | 70 | HUEY = getattr(settings, 'HUEY', None) 71 | if HUEY is None: 72 | try: 73 | RedisHuey = get_backend(default_backend_path) 74 | except ImportError: 75 | config_error('Error: Huey could not import the redis backend. ' 76 | 'Install `redis-py`.') 77 | else: 78 | HUEY = RedisHuey(default_queue_name()) 79 | 80 | if isinstance(HUEY, dict): 81 | huey_config = HUEY.copy() # Operate on a copy. 82 | name = huey_config.pop('name', default_queue_name()) 83 | if 'backend_class' in huey_config: 84 | huey_config['huey_class'] = huey_config.pop('backend_class') 85 | backend_path = huey_config.pop('huey_class', default_backend_path) 86 | conn_kwargs = huey_config.pop('connection', {}) 87 | try: 88 | del huey_config['consumer'] # Don't need consumer opts here. 89 | except KeyError: 90 | pass 91 | if 'immediate' not in huey_config: 92 | huey_config['immediate'] = settings.DEBUG 93 | huey_config.update(conn_kwargs) 94 | 95 | try: 96 | backend_cls = get_backend(backend_path) 97 | except (ValueError, ImportError, AttributeError): 98 | config_error('Error: could not import Huey backend:\n%s' 99 | % traceback.format_exc()) 100 | 101 | HUEY = backend_cls(name, **huey_config) 102 | 103 | # Function decorators. 104 | task = HUEY.task 105 | periodic_task = HUEY.periodic_task 106 | lock_task = HUEY.lock_task 107 | 108 | # Task management. 109 | enqueue = HUEY.enqueue 110 | restore = HUEY.restore 111 | restore_all = HUEY.restore_all 112 | restore_by_id = HUEY.restore_by_id 113 | revoke = HUEY.revoke 114 | revoke_all = HUEY.revoke_all 115 | revoke_by_id = HUEY.revoke_by_id 116 | is_revoked = HUEY.is_revoked 117 | result = HUEY.result 118 | scheduled = HUEY.scheduled 119 | 120 | # Hooks. 121 | on_startup = HUEY.on_startup 122 | on_shutdown = HUEY.on_shutdown 123 | pre_execute = HUEY.pre_execute 124 | post_execute = HUEY.post_execute 125 | signal = HUEY.signal 126 | disconnect_signal = HUEY.disconnect_signal 127 | 128 | 129 | def close_db(fn): 130 | """Decorator to be used with tasks that may operate on the database.""" 131 | @wraps(fn) 132 | def inner(*args, **kwargs): 133 | if not HUEY.immediate: 134 | close_old_connections() 135 | try: 136 | return fn(*args, **kwargs) 137 | finally: 138 | if not HUEY.immediate: 139 | close_old_connections() 140 | return inner 141 | 142 | 143 | def db_task(*args, **kwargs): 144 | def decorator(fn): 145 | ret = task(*args, **kwargs)(close_db(fn)) 146 | ret.call_local = fn 147 | return ret 148 | return decorator 149 | 150 | 151 | def db_periodic_task(*args, **kwargs): 152 | def decorator(fn): 153 | ret = periodic_task(*args, **kwargs)(close_db(fn)) 154 | ret.call_local = fn 155 | return ret 156 | return decorator 157 | 158 | 159 | def on_commit_task(*args, **kwargs): 160 | """ 161 | This task will register a post-commit callback to enqueue the task. A 162 | result handle will still be returned immediately, however, even though 163 | the task may not (ever) be enqueued, subject to whether or not the 164 | transaction actually commits. 165 | 166 | Because we have to setup the callback within the bit of code that performs 167 | the actual enqueueing, we cannot expose the full functionality of the 168 | TaskWrapper. If you anticipate wanting all these methods, you are probably 169 | best off decorating the same function twice, e.g.: 170 | 171 | def update_data(pk): 172 | # Do some database operation. 173 | pass 174 | 175 | my_task = task()(update_data) 176 | my_on_commit_task = on_commit_task()(update_data) 177 | """ 178 | def decorator(fn): 179 | task_wrapper = task(*args, **kwargs)(close_db(fn)) 180 | 181 | @wraps(fn) 182 | def inner(*a, **k): 183 | task = task_wrapper.s(*a, **k) 184 | def enqueue_on_commit(): 185 | task_wrapper.huey.enqueue(task) 186 | transaction.on_commit(enqueue_on_commit) 187 | return HUEY._result_handle(task) 188 | return inner 189 | return decorator 190 | -------------------------------------------------------------------------------- /huey/contrib/djhuey/management/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/coleifer/huey/302fb8ed8c4894ee7b419f91a22ec53b236fe50c/huey/contrib/djhuey/management/__init__.py -------------------------------------------------------------------------------- /huey/contrib/djhuey/management/commands/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/coleifer/huey/302fb8ed8c4894ee7b419f91a22ec53b236fe50c/huey/contrib/djhuey/management/commands/__init__.py -------------------------------------------------------------------------------- /huey/contrib/djhuey/management/commands/run_huey.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import sys 3 | 4 | from django.conf import settings 5 | from django.core.management.base import BaseCommand 6 | from django.utils.module_loading import autodiscover_modules 7 | 8 | from huey.consumer_options import ConsumerConfig 9 | from huey.consumer_options import OptionParserHandler 10 | 11 | 12 | logger = logging.getLogger(__name__) 13 | 14 | 15 | class Command(BaseCommand): 16 | """ 17 | Queue consumer. Example usage:: 18 | 19 | To start the consumer (note you must export the settings module): 20 | 21 | django-admin.py run_huey 22 | """ 23 | help = "Run the queue consumer" 24 | _type_map = {'int': int, 'float': float} 25 | 26 | def add_arguments(self, parser): 27 | option_handler = OptionParserHandler() 28 | groups = ( 29 | option_handler.get_logging_options(), 30 | option_handler.get_worker_options(), 31 | option_handler.get_scheduler_options(), 32 | ) 33 | for option_list in groups: 34 | for short, full, kwargs in option_list: 35 | if short == '-v': 36 | full = '--huey-verbose' 37 | short = '-V' 38 | if 'type' in kwargs: 39 | kwargs['type'] = self._type_map[kwargs['type']] 40 | kwargs.setdefault('default', None) 41 | parser.add_argument(full, short, **kwargs) 42 | 43 | parser.add_argument('-A', '--disable-autoload', action='store_true', 44 | dest='disable_autoload', 45 | help='Do not autoload "tasks.py"') 46 | 47 | def handle(self, *args, **options): 48 | from huey.contrib.djhuey import HUEY 49 | 50 | # Python 3.8+ on MacOS uses an incompatible multiprocess model. In this 51 | # case we must explicitly configure mp to use fork(). 52 | if sys.version_info >= (3, 8) and sys.platform == 'darwin': 53 | # Apparently this was causing a "context has already been set" 54 | # error for some user. We'll just pass and hope for the best. 55 | # They're apple users so presumably nothing important will be lost. 56 | import multiprocessing 57 | try: 58 | multiprocessing.set_start_method('fork') 59 | except RuntimeError: 60 | pass 61 | 62 | consumer_options = {} 63 | try: 64 | if isinstance(settings.HUEY, dict): 65 | consumer_options.update(settings.HUEY.get('consumer', {})) 66 | except AttributeError: 67 | pass 68 | 69 | for key, value in options.items(): 70 | if value is not None: 71 | consumer_options[key] = value 72 | 73 | consumer_options.setdefault('verbose', 74 | consumer_options.pop('huey_verbose', None)) 75 | 76 | if not options.get('disable_autoload'): 77 | autodiscover_modules("tasks") 78 | 79 | logger = logging.getLogger('huey') 80 | 81 | config = ConsumerConfig(**consumer_options) 82 | config.validate() 83 | 84 | # Only configure the "huey" logger if it has no handlers. For example, 85 | # some users may configure the huey logger via the Django global 86 | # logging config. This prevents duplicating log messages: 87 | if not logger.handlers: 88 | config.setup_logger(logger) 89 | 90 | consumer = HUEY.create_consumer(**config.values) 91 | consumer.run() 92 | -------------------------------------------------------------------------------- /huey/contrib/djhuey/models.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/coleifer/huey/302fb8ed8c4894ee7b419f91a22ec53b236fe50c/huey/contrib/djhuey/models.py -------------------------------------------------------------------------------- /huey/contrib/helpers.py: -------------------------------------------------------------------------------- 1 | from functools import wraps 2 | import time 3 | import uuid 4 | 5 | from huey import RedisHuey 6 | from huey.exceptions import TaskLockedException 7 | 8 | 9 | class RedisSemaphore(object): 10 | """ 11 | Extremely basic semaphore for use with Redis. 12 | """ 13 | def __init__(self, huey, name, value=1, timeout=None): 14 | if not isinstance(huey, RedisHuey): 15 | raise ValueError('Semaphore is only supported for Redis.') 16 | self.huey = huey 17 | self.key = '%s.lock.%s' % (huey.name, name) 18 | self.value = value 19 | self.timeout = timeout or 86400 # Set a max age for lock holders. 20 | 21 | self.huey._locks.add(self.key) 22 | self._conn = self.huey.storage.conn 23 | 24 | def acquire(self, name=None): 25 | name = name or str(uuid.uuid4()) 26 | ts = time.time() 27 | pipeline = self._conn.pipeline(True) 28 | pipeline.zremrangebyscore(self.key, '-inf', ts - self.timeout) 29 | pipeline.zadd(self.key, {name: ts}) 30 | pipeline.zrank(self.key, name) # See whether we acquired. 31 | if pipeline.execute()[-1] < self.value: 32 | return name 33 | self._conn.zrem(self.key, name) 34 | return 35 | 36 | def release(self, name): 37 | return self._conn.zrem(self.key, name) 38 | 39 | 40 | def lock_task_semaphore(huey, lock_name, value=1, timeout=None): 41 | """ 42 | Lock which can be acquired multiple times (default = 1). 43 | 44 | NOTE: no provisions are made for blocking, waiting, or notifying. This is 45 | just a lock which can be acquired a configurable number of times. 46 | 47 | Example: 48 | 49 | # Allow up to 3 workers to run this task concurrently. If the task is 50 | # locked, retry up to 2 times with a delay of 60s. 51 | @huey.task(retries=2, retry_delay=60) 52 | @lock_task_semaphore(huey, 'my-lock', 3) 53 | def my_task(): 54 | ... 55 | """ 56 | sem = RedisSemaphore(huey, lock_name, value, timeout) 57 | def decorator(fn): 58 | @wraps(fn) 59 | def inner(*args, **kwargs): 60 | tid = sem.acquire() 61 | if tid is None: 62 | raise TaskLockedException('unable to acquire lock %s' % 63 | lock_name) 64 | try: 65 | return fn(*args, **kwargs) 66 | finally: 67 | sem.release(tid) 68 | return inner 69 | return decorator 70 | -------------------------------------------------------------------------------- /huey/contrib/kyototycoon.py: -------------------------------------------------------------------------------- 1 | from functools import partial 2 | import time 3 | 4 | from ukt import KT_NONE 5 | from ukt import KyotoTycoon 6 | 7 | from huey.api import Huey 8 | from huey.constants import EmptyData 9 | from huey.storage import BaseStorage 10 | from huey.utils import decode 11 | 12 | 13 | class KyotoTycoonStorage(BaseStorage): 14 | priority = True 15 | 16 | def __init__(self, name='huey', host='127.0.0.1', port=1978, db=None, 17 | timeout=None, max_age=3600, queue_db=None, client=None, 18 | blocking=False, result_expire_time=None): 19 | super(KyotoTycoonStorage, self).__init__(name) 20 | if client is None: 21 | client = KyotoTycoon(host, port, timeout, db, serializer=KT_NONE, 22 | max_age=max_age) 23 | 24 | self.blocking = blocking 25 | self.expire_time = result_expire_time 26 | 27 | self.kt = client 28 | self._db = db 29 | self._queue_db = queue_db if queue_db is not None else db 30 | 31 | self.qname = self.name + '.q' 32 | self.sname = self.name + '.s' 33 | 34 | self.q = self.kt.Queue(self.qname, self._queue_db) 35 | self.s = self.kt.Schedule(self.sname, self._queue_db) 36 | 37 | def enqueue(self, data, priority=None): 38 | self.q.add(data, priority) 39 | 40 | def dequeue(self): 41 | if self.blocking: 42 | return self.q.bpop(timeout=30) 43 | else: 44 | return self.q.pop() 45 | 46 | def queue_size(self): 47 | return len(self.q) 48 | 49 | def enqueued_items(self, limit=None): 50 | return self.q.peek(n=limit or -1) 51 | 52 | def flush_queue(self): 53 | return self.q.clear() 54 | 55 | def convert_ts(self, ts): 56 | return int(time.mktime(ts.timetuple())) 57 | 58 | def add_to_schedule(self, data, ts): 59 | self.s.add(data, self.convert_ts(ts)) 60 | 61 | def read_schedule(self, ts): 62 | return self.s.read(self.convert_ts(ts)) 63 | 64 | def schedule_size(self): 65 | return len(self.s) 66 | 67 | def scheduled_items(self, limit=None): 68 | return self.s.items(limit) 69 | 70 | def flush_schedule(self): 71 | return self.s.clear() 72 | 73 | def prefix_key(self, key): 74 | return '%s.%s' % (self.qname, decode(key)) 75 | 76 | def put_data(self, key, value, is_result=False): 77 | xt = self.expire_time if is_result else None 78 | self.kt.set(self.prefix_key(key), value, self._db, expire_time=xt) 79 | 80 | def peek_data(self, key): 81 | result = self.kt.get_bytes(self.prefix_key(key), self._db) 82 | return EmptyData if result is None else result 83 | 84 | def pop_data(self, key): 85 | if self.expire_time is not None: 86 | return self.peek_data(key) 87 | 88 | result = self.kt.seize(self.prefix_key(key), self._db) 89 | return EmptyData if result is None else result 90 | 91 | def delete_data(self, key): 92 | return self.kt.seize(self.prefix_key(key), self._db) is not None 93 | 94 | def has_data_for_key(self, key): 95 | return self.kt.exists(self.prefix_key(key), self._db) 96 | 97 | def put_if_empty(self, key, value): 98 | return self.kt.add(self.prefix_key(key), value, self._db) 99 | 100 | def result_store_size(self): 101 | return len(self.kt.match_prefix(self.prefix_key(''), db=self._db)) 102 | 103 | def result_items(self): 104 | prefix = self.prefix_key('') 105 | keys = self.kt.match_prefix(prefix, db=self._db) 106 | result = self.kt.get_bulk(keys, self._db) 107 | 108 | plen = len(prefix) 109 | return {key[plen:]: value for key, value in result.items()} 110 | 111 | def flush_results(self): 112 | prefix = self.prefix_key('') 113 | keys = self.kt.match_prefix(prefix, db=self._db) 114 | return self.kt.remove_bulk(keys, self._db) 115 | 116 | def flush_all(self): 117 | self.flush_queue() 118 | self.flush_schedule() 119 | self.flush_results() 120 | 121 | 122 | class KyotoTycoonHuey(Huey): 123 | storage_class = KyotoTycoonStorage 124 | -------------------------------------------------------------------------------- /huey/contrib/mini.py: -------------------------------------------------------------------------------- 1 | # 2 | # Minimal huey-like API using gevent and running within the parent process. 3 | # 4 | import datetime 5 | import heapq 6 | import logging 7 | import time 8 | from functools import wraps 9 | 10 | import gevent 11 | from gevent.event import AsyncResult 12 | from gevent.event import Event 13 | from gevent.pool import Pool 14 | 15 | from huey.api import crontab 16 | from huey.utils import time_clock 17 | 18 | 19 | logger = logging.getLogger('huey.mini') 20 | 21 | 22 | class MiniHueyResult(AsyncResult): 23 | __call__ = AsyncResult.get 24 | 25 | 26 | class MiniHuey(object): 27 | def __init__(self, name='huey', interval=1, pool_size=None): 28 | self.name = name 29 | self._interval = interval 30 | self._last_check = datetime.datetime.now() 31 | self._periodic_interval = datetime.timedelta(seconds=60) 32 | self._periodic_tasks = [] 33 | self._scheduled_tasks = [] 34 | self._shutdown = Event() 35 | self._pool = Pool(pool_size) 36 | self._run_t = None 37 | 38 | def task(self, validate_func=None): 39 | if validate_func is not None: 40 | def periodic_task_wrapper(fn): 41 | self._periodic_tasks.append((validate_func, fn)) 42 | return fn 43 | return periodic_task_wrapper 44 | 45 | def decorator(fn): 46 | @wraps(fn) 47 | def _inner(*args, **kwargs): 48 | async_result = MiniHueyResult() 49 | self._enqueue(fn, args, kwargs, async_result) 50 | return async_result 51 | 52 | def _schedule(args=None, kwargs=None, delay=None, eta=None): 53 | if delay is not None: 54 | eta = (datetime.datetime.now() + 55 | datetime.timedelta(seconds=delay)) 56 | if eta is None: 57 | raise ValueError('Either a delay (in seconds) or an ' 58 | 'eta (datetime) must be specified.') 59 | async_result = MiniHueyResult() 60 | heapq.heappush(self._scheduled_tasks, 61 | (eta, fn, args, kwargs, async_result)) 62 | return async_result 63 | 64 | _inner.schedule = _schedule 65 | return _inner 66 | 67 | return decorator 68 | 69 | def periodic_task(self, validate_func): 70 | def decorator(fn): 71 | return self.task(validate_func)(fn) 72 | return decorator 73 | 74 | def start(self): 75 | if self._run_t is not None: 76 | raise Exception('Task runner is already running.') 77 | self._run_t = gevent.spawn(self._run) 78 | 79 | def stop(self): 80 | if self._run_t is None: 81 | raise Exception('Task runner does not appear to have started.') 82 | self._shutdown.set() 83 | logger.info('shutdown requested.') 84 | self._run_t.join() 85 | self._run_t = None 86 | 87 | def _enqueue(self, fn, args=None, kwargs=None, async_result=None): 88 | logger.info('enqueueing %s' % fn.__name__) 89 | self._pool.spawn(self._execute, fn, args, kwargs, async_result) 90 | 91 | def _execute(self, fn, args, kwargs, async_result): 92 | args = args or () 93 | kwargs = kwargs or {} 94 | start = time_clock() 95 | try: 96 | ret = fn(*args, **kwargs) 97 | except Exception as exc: 98 | logger.exception('task %s failed' % fn.__name__) 99 | async_result.set_exception(exc) 100 | raise 101 | else: 102 | duration = time_clock() - start 103 | 104 | if async_result is not None: 105 | async_result.set(ret) 106 | logger.info('executed %s in %0.3fs', fn.__name__, duration) 107 | 108 | def _run(self): 109 | logger.info('task runner started.') 110 | while not self._shutdown.is_set(): 111 | start = time_clock() 112 | now = datetime.datetime.now() 113 | if self._last_check + self._periodic_interval <= now: 114 | logger.debug('checking periodic task schedule') 115 | self._last_check = now 116 | for validate_func, fn in self._periodic_tasks: 117 | if validate_func(now): 118 | self._enqueue(fn) 119 | 120 | if self._scheduled_tasks: 121 | logger.debug('checking scheduled tasks') 122 | # The 0-th item of a heap is always the smallest. 123 | while self._scheduled_tasks and \ 124 | self._scheduled_tasks[0][0] <= now: 125 | 126 | eta, fn, args, kwargs, async_result = ( 127 | heapq.heappop(self._scheduled_tasks)) 128 | self._enqueue(fn, args, kwargs, async_result) 129 | 130 | # Wait for most of the remained of the time remaining. 131 | remaining = self._interval - (time_clock() - start) 132 | if remaining > 0: 133 | if not self._shutdown.wait(remaining * 0.9): 134 | gevent.sleep(self._interval - (time_clock() - start)) 135 | logger.info('exiting task runner') 136 | -------------------------------------------------------------------------------- /huey/contrib/sql_huey.py: -------------------------------------------------------------------------------- 1 | from functools import partial 2 | import operator 3 | 4 | from peewee import * 5 | from playhouse.db_url import connect as db_url_connect 6 | 7 | from huey.api import Huey 8 | from huey.constants import EmptyData 9 | from huey.exceptions import ConfigurationError 10 | from huey.storage import BaseStorage 11 | 12 | 13 | class BytesBlobField(BlobField): 14 | def python_value(self, value): 15 | return value if isinstance(value, bytes) else bytes(value) 16 | 17 | 18 | class SqlStorage(BaseStorage): 19 | def __init__(self, name='huey', database=None, **kwargs): 20 | super(SqlStorage, self).__init__(name) 21 | 22 | if database is None: 23 | raise ConfigurationError('Use of SqlStorage requires a ' 24 | 'database= argument, which should be a ' 25 | 'peewee database or a connection string.') 26 | 27 | if isinstance(database, Database): 28 | self.database = database 29 | else: 30 | # Treat database argument as a URL connection string. 31 | self.database = db_url_connect(database) 32 | 33 | self.KV, self.Schedule, self.Task = self.create_models() 34 | self.create_tables() 35 | 36 | # Check for FOR UPDATE SKIP LOCKED support. 37 | if isinstance(self.database, PostgresqlDatabase): 38 | self.for_update = 'FOR UPDATE SKIP LOCKED' 39 | elif isinstance(self.database, MySQLDatabase): 40 | self.for_update = 'FOR UPDATE SKIP LOCKED' # Assume support. 41 | # Try to determine if we're using MariaDB or MySQL. 42 | version, = self.database.execute_sql('select version()').fetchone() 43 | if 'mariadb' in str(version).lower(): 44 | # MariaDB added support in 10.6.0. 45 | if self.database.server_version < (10, 6): 46 | self.for_update = 'FOR UPDATE' 47 | elif self.database.server_version < (8, 0, 1): 48 | # MySQL added support in 8.0.1. 49 | self.for_update = 'FOR UPDATE' 50 | else: 51 | self.for_update = None 52 | 53 | def create_models(self): 54 | class Base(Model): 55 | class Meta: 56 | database = self.database 57 | 58 | class KV(Base): 59 | queue = CharField() 60 | key = CharField() 61 | value = BytesBlobField() 62 | class Meta: 63 | primary_key = CompositeKey('queue', 'key') 64 | 65 | class Schedule(Base): 66 | queue = CharField() 67 | data = BytesBlobField() 68 | timestamp = TimestampField(resolution=1000) 69 | class Meta: 70 | indexes = ((('queue', 'timestamp'), False),) 71 | 72 | class Task(Base): 73 | queue = CharField() 74 | data = BytesBlobField() 75 | priority = FloatField(default=0.0) 76 | 77 | Task.add_index(Task.priority.desc(), Task.id) 78 | 79 | return (KV, Schedule, Task) 80 | 81 | def create_tables(self): 82 | with self.database: 83 | self.database.create_tables([self.KV, self.Schedule, self.Task]) 84 | 85 | def drop_tables(self): 86 | with self.database: 87 | self.database.drop_tables([self.KV, self.Schedule, self.Task]) 88 | 89 | def close(self): 90 | return self.database.close() 91 | 92 | def tasks(self, *columns): 93 | return self.Task.select(*columns).where(self.Task.queue == self.name) 94 | 95 | def schedule(self, *columns): 96 | return (self.Schedule.select(*columns) 97 | .where(self.Schedule.queue == self.name)) 98 | 99 | def kv(self, *columns): 100 | return self.KV.select(*columns).where(self.KV.queue == self.name) 101 | 102 | def check_conn(self): 103 | if not self.database.is_connection_usable(): 104 | self.database.close() 105 | self.database.connect() 106 | 107 | def enqueue(self, data, priority=None): 108 | self.check_conn() 109 | self.Task.create(queue=self.name, data=data, priority=priority or 0) 110 | 111 | def dequeue(self): 112 | self.check_conn() 113 | query = (self.tasks(self.Task.id, self.Task.data) 114 | .order_by(self.Task.priority.desc(), self.Task.id) 115 | .limit(1)) 116 | if self.for_update: 117 | query = query.for_update(self.for_update) 118 | 119 | with self.database.atomic(): 120 | try: 121 | task = query.get() 122 | except self.Task.DoesNotExist: 123 | return 124 | 125 | nrows = self.Task.delete().where(self.Task.id == task.id).execute() 126 | if nrows == 1: 127 | return task.data 128 | 129 | def queue_size(self): 130 | return self.tasks().count() 131 | 132 | def enqueued_items(self, limit=None): 133 | query = self.tasks(self.Task.data).order_by(self.Task.priority.desc(), 134 | self.Task.id) 135 | if limit is not None: 136 | query = query.limit(limit) 137 | return list(map(operator.itemgetter(0), query.tuples())) 138 | 139 | def flush_queue(self): 140 | self.Task.delete().where(self.Task.queue == self.name).execute() 141 | 142 | def add_to_schedule(self, data, timestamp): 143 | self.check_conn() 144 | self.Schedule.create(queue=self.name, data=data, timestamp=timestamp) 145 | 146 | def read_schedule(self, timestamp): 147 | self.check_conn() 148 | query = (self.schedule(self.Schedule.id, self.Schedule.data) 149 | .where(self.Schedule.timestamp <= timestamp) 150 | .tuples()) 151 | if self.for_update: 152 | query = query.for_update(self.for_update) 153 | 154 | with self.database.atomic(): 155 | results = list(query) 156 | if not results: 157 | return [] 158 | 159 | id_list, data = zip(*results) 160 | (self.Schedule 161 | .delete() 162 | .where(self.Schedule.id.in_(id_list)) 163 | .execute()) 164 | 165 | return list(data) 166 | 167 | def schedule_size(self): 168 | return self.schedule().count() 169 | 170 | def scheduled_items(self): 171 | tasks = (self.schedule(self.Schedule.data) 172 | .order_by(self.Schedule.timestamp) 173 | .tuples()) 174 | return list(map(operator.itemgetter(0), tasks)) 175 | 176 | def flush_schedule(self): 177 | (self.Schedule 178 | .delete() 179 | .where(self.Schedule.queue == self.name) 180 | .execute()) 181 | 182 | def put_data(self, key, value, is_result=False): 183 | self.check_conn() 184 | if isinstance(self.database, PostgresqlDatabase): 185 | (self.KV 186 | .insert(queue=self.name, key=key, value=value) 187 | .on_conflict(conflict_target=[self.KV.queue, self.KV.key], 188 | preserve=[self.KV.value]) 189 | .execute()) 190 | else: 191 | self.KV.replace(queue=self.name, key=key, value=value).execute() 192 | 193 | def peek_data(self, key): 194 | self.check_conn() 195 | try: 196 | kv = self.kv(self.KV.value).where(self.KV.key == key).get() 197 | except self.KV.DoesNotExist: 198 | return EmptyData 199 | else: 200 | return kv.value 201 | 202 | def pop_data(self, key): 203 | self.check_conn() 204 | query = self.kv().where(self.KV.key == key) 205 | if self.for_update: 206 | query = query.for_update(self.for_update) 207 | 208 | with self.database.atomic(): 209 | try: 210 | kv = query.get() 211 | except self.KV.DoesNotExist: 212 | return EmptyData 213 | else: 214 | dq = self.KV.delete().where( 215 | (self.KV.queue == self.name) & 216 | (self.KV.key == key)) 217 | return kv.value if dq.execute() == 1 else EmptyData 218 | 219 | def has_data_for_key(self, key): 220 | self.check_conn() 221 | return self.kv().where(self.KV.key == key).exists() 222 | 223 | def put_if_empty(self, key, value): 224 | self.check_conn() 225 | try: 226 | with self.database.atomic(): 227 | self.KV.insert(queue=self.name, key=key, value=value).execute() 228 | except IntegrityError: 229 | return False 230 | else: 231 | return True 232 | 233 | def result_store_size(self): 234 | return self.kv().count() 235 | 236 | def result_items(self): 237 | query = self.kv(self.KV.key, self.KV.value).tuples() 238 | return dict((k, v) for k, v in query.iterator()) 239 | 240 | def flush_results(self): 241 | self.KV.delete().where(self.KV.queue == self.name).execute() 242 | 243 | 244 | SqlHuey = partial(Huey, storage_class=SqlStorage) 245 | -------------------------------------------------------------------------------- /huey/exceptions.py: -------------------------------------------------------------------------------- 1 | class HueyException(Exception): pass 2 | class ConfigurationError(HueyException): pass 3 | class TaskLockedException(HueyException): pass 4 | class ResultTimeout(HueyException): pass 5 | 6 | class CancelExecution(Exception): 7 | def __init__(self, retry=None, *args, **kwargs): 8 | self.retry = retry 9 | super(CancelExecution, self).__init__(*args, **kwargs) 10 | class RetryTask(Exception): 11 | def __init__(self, msg=None, eta=None, delay=None, *args, **kwargs): 12 | self.eta, self.delay = eta, delay 13 | super(RetryTask, self).__init__(msg, *args, **kwargs) 14 | class TaskException(Exception): 15 | def __init__(self, metadata=None, *args): 16 | self.metadata = metadata or {} 17 | super(TaskException, self).__init__(*args) 18 | 19 | def __unicode__(self): 20 | return self.metadata.get('error') or 'unknown error' 21 | __str__ = __unicode__ 22 | -------------------------------------------------------------------------------- /huey/registry.py: -------------------------------------------------------------------------------- 1 | from collections import namedtuple 2 | 3 | from huey.exceptions import HueyException 4 | 5 | 6 | Message = namedtuple('Message', ('id', 'name', 'eta', 'retries', 'retry_delay', 7 | 'priority', 'args', 'kwargs', 'on_complete', 8 | 'on_error', 'expires', 'expires_resolved')) 9 | 10 | # Automatically set missing parameters to None. This is kind-of a hack, but it 11 | # allows us to add new parameters while continuing to be able to handle 12 | # messages enqueued with a smaller-set of arguments. 13 | Message.__new__.__defaults__ = (None,) * len(Message._fields) 14 | 15 | 16 | class Registry(object): 17 | def __init__(self): 18 | self._registry = {} 19 | self._periodic_tasks = [] 20 | 21 | def task_to_string(self, task_class): 22 | return '%s.%s' % (task_class.__module__, task_class.__name__) 23 | 24 | def register(self, task_class): 25 | task_str = self.task_to_string(task_class) 26 | if task_str in self._registry: 27 | raise ValueError('Attempting to register a task with the same ' 28 | 'identifier as existing task. Specify a different' 29 | ' name= to register this task. "%s"' % task_str) 30 | 31 | self._registry[task_str] = task_class 32 | if hasattr(task_class, 'validate_datetime'): 33 | self._periodic_tasks.append(task_class) 34 | return True 35 | 36 | def unregister(self, task_class): 37 | task_str = self.task_to_string(task_class) 38 | if task_str not in self._registry: 39 | return False 40 | 41 | del self._registry[task_str] 42 | if hasattr(task_class, 'validate_datetime'): 43 | self._periodic_tasks = [t for t in self._periodic_tasks 44 | if t is not task_class] 45 | return True 46 | 47 | def string_to_task(self, task_str): 48 | if task_str not in self._registry: 49 | raise HueyException('%s not found in TaskRegistry' % task_str) 50 | return self._registry[task_str] 51 | 52 | def create_message(self, task): 53 | task_str = self.task_to_string(type(task)) 54 | if task_str not in self._registry: 55 | raise HueyException('%s not found in TaskRegistry' % task_str) 56 | 57 | # Remove the "task" instance from any arguments before serializing. 58 | if task.kwargs and 'task' in task.kwargs: 59 | task.kwargs.pop('task') 60 | 61 | on_complete = None 62 | if task.on_complete is not None: 63 | on_complete = self.create_message(task.on_complete) 64 | 65 | on_error = None 66 | if task.on_error is not None: 67 | on_error = self.create_message(task.on_error) 68 | 69 | return Message( 70 | task.id, 71 | task_str, 72 | task.eta, 73 | task.retries, 74 | task.retry_delay, 75 | task.priority, 76 | task.args, 77 | task.kwargs, 78 | on_complete, 79 | on_error, 80 | task.expires, 81 | task.expires_resolved) 82 | 83 | def create_task(self, message): 84 | # Compatibility with Huey 1.11 message format. 85 | if not isinstance(message, Message) and isinstance(message, tuple): 86 | tid, name, eta, retries, retry_delay, (args, kwargs), oc = message 87 | message = Message(tid, name, eta, retries, retry_delay, None, args, 88 | kwargs, oc, None) 89 | 90 | TaskClass = self.string_to_task(message.name) 91 | 92 | on_complete = None 93 | if message.on_complete is not None: 94 | on_complete = self.create_task(message.on_complete) 95 | 96 | on_error = None 97 | if message.on_error is not None: 98 | on_error = self.create_task(message.on_error) 99 | 100 | return TaskClass( 101 | message.args, 102 | message.kwargs, 103 | message.id, 104 | message.eta, 105 | message.retries, 106 | message.retry_delay, 107 | message.priority, 108 | message.expires, 109 | on_complete, 110 | on_error, 111 | message.expires_resolved) 112 | 113 | @property 114 | def periodic_tasks(self): 115 | return [task_class() for task_class in self._periodic_tasks] 116 | -------------------------------------------------------------------------------- /huey/serializer.py: -------------------------------------------------------------------------------- 1 | try: 2 | import gzip 3 | except ImportError: 4 | gzip = None 5 | try: 6 | import zlib 7 | except ImportError: 8 | zlib = None 9 | import hashlib 10 | import hmac 11 | import logging 12 | import pickle 13 | import sys 14 | 15 | from huey.exceptions import ConfigurationError 16 | from huey.utils import encode 17 | 18 | 19 | logger = logging.getLogger('huey.serializer') 20 | 21 | 22 | if gzip is not None: 23 | if sys.version_info[0] > 2: 24 | gzip_compress = gzip.compress 25 | gzip_decompress = gzip.decompress 26 | else: 27 | from io import BytesIO 28 | 29 | def gzip_compress(data, comp_level): 30 | buf = BytesIO() 31 | fh = gzip.GzipFile(fileobj=buf, mode='wb', 32 | compresslevel=comp_level) 33 | fh.write(data) 34 | fh.close() 35 | return buf.getvalue() 36 | 37 | def gzip_decompress(data): 38 | buf = BytesIO(data) 39 | fh = gzip.GzipFile(fileobj=buf, mode='rb') 40 | try: 41 | return fh.read() 42 | finally: 43 | fh.close() 44 | 45 | 46 | if sys.version_info[0] == 2: 47 | def is_compressed(data): 48 | return data and (data[0] == b'\x1f' or data[0] == b'\x78') 49 | else: 50 | def is_compressed(data): 51 | return data and data[0] == 0x1f or data[0] == 0x78 52 | 53 | 54 | class Serializer(object): 55 | def __init__(self, compression=False, compression_level=6, use_zlib=False, 56 | pickle_protocol=pickle.HIGHEST_PROTOCOL): 57 | self.comp = compression 58 | self.comp_level = compression_level 59 | self.use_zlib = use_zlib 60 | self.pickle_protocol = pickle_protocol or pickle.HIGHEST_PROTOCOL 61 | if self.comp: 62 | if self.use_zlib and zlib is None: 63 | raise ConfigurationError('use_zlib specified, but zlib module ' 64 | 'not found.') 65 | elif gzip is None: 66 | raise ConfigurationError('gzip module required to enable ' 67 | 'compression.') 68 | 69 | def _serialize(self, data): 70 | return pickle.dumps(data, self.pickle_protocol) 71 | 72 | def _deserialize(self, data): 73 | return pickle.loads(data) 74 | 75 | def serialize(self, data): 76 | data = self._serialize(data) 77 | if self.comp: 78 | if self.use_zlib: 79 | data = zlib.compress(data, self.comp_level) 80 | else: 81 | data = gzip_compress(data, self.comp_level) 82 | return data 83 | 84 | def deserialize(self, data): 85 | if self.comp: 86 | if not is_compressed(data): 87 | logger.warning('compression enabled but message data does not ' 88 | 'appear to be compressed.') 89 | elif self.use_zlib: 90 | data = zlib.decompress(data) 91 | else: 92 | data = gzip_decompress(data) 93 | return self._deserialize(data) 94 | 95 | 96 | def constant_time_compare(s1, s2): 97 | return hmac.compare_digest(s1, s2) 98 | 99 | 100 | class SignedSerializer(Serializer): 101 | def __init__(self, secret=None, salt='huey', **kwargs): 102 | super(SignedSerializer, self).__init__(**kwargs) 103 | if not secret or not salt: 104 | raise ConfigurationError('The secret and salt parameters are ' 105 | 'required by %r' % type(self)) 106 | self.secret = encode(secret) 107 | self.salt = encode(salt) 108 | self.separator = b':' 109 | self._key = hashlib.sha1(self.salt + self.secret).digest() 110 | 111 | def _signature(self, message): 112 | signature = hmac.new(self._key, msg=message, digestmod=hashlib.sha1) 113 | return signature.hexdigest().encode('utf8') 114 | 115 | def _sign(self, message): 116 | return message + self.separator + self._signature(message) 117 | 118 | def _unsign(self, signed): 119 | if self.separator not in signed: 120 | raise ValueError('Separator "%s" not found' % self.separator) 121 | 122 | msg, sig = signed.rsplit(self.separator, 1) 123 | if constant_time_compare(sig, self._signature(msg)): 124 | return msg 125 | 126 | raise ValueError('Signature "%s" mismatch!' % sig) 127 | 128 | def _serialize(self, message): 129 | data = super(SignedSerializer, self)._serialize(message) 130 | return self._sign(data) 131 | 132 | def _deserialize(self, data): 133 | return super(SignedSerializer, self)._deserialize(self._unsign(data)) 134 | -------------------------------------------------------------------------------- /huey/signals.py: -------------------------------------------------------------------------------- 1 | import itertools 2 | 3 | 4 | SIGNAL_CANCELED = 'canceled' 5 | SIGNAL_COMPLETE = 'complete' 6 | SIGNAL_ERROR = 'error' 7 | SIGNAL_EXECUTING = 'executing' 8 | SIGNAL_EXPIRED = 'expired' 9 | SIGNAL_LOCKED = 'locked' 10 | SIGNAL_RETRYING = 'retrying' 11 | SIGNAL_REVOKED = 'revoked' 12 | SIGNAL_SCHEDULED = 'scheduled' 13 | SIGNAL_INTERRUPTED = 'interrupted' 14 | SIGNAL_ENQUEUED = 'enqueued' 15 | 16 | 17 | class Signal(object): 18 | __slots__ = ('receivers',) 19 | 20 | def __init__(self): 21 | self.receivers = {'any': []} 22 | 23 | def connect(self, receiver, *signals): 24 | if not signals: 25 | signals = ('any',) 26 | for signal in signals: 27 | self.receivers.setdefault(signal, []) 28 | self.receivers[signal].append(receiver) 29 | 30 | def disconnect(self, receiver, *signals): 31 | if not signals: 32 | signals = list(self.receivers) 33 | for signal in signals: 34 | try: 35 | self.receivers[signal].remove(receiver) 36 | except ValueError: 37 | pass 38 | 39 | def send(self, signal, task, *args, **kwargs): 40 | receivers = itertools.chain(self.receivers.get(signal, ()), 41 | self.receivers['any']) 42 | for receiver in receivers: 43 | receiver(signal, task, *args, **kwargs) 44 | -------------------------------------------------------------------------------- /huey/tests/__init__.py: -------------------------------------------------------------------------------- 1 | from huey.tests.test_api import * 2 | from huey.tests.test_consumer import * 3 | from huey.tests.test_crontab import * 4 | from huey.tests.test_helpers import * 5 | from huey.tests.test_immediate import * 6 | from huey.tests.test_kt_huey import * 7 | from huey.tests.test_priority import * 8 | from huey.tests.test_registry import * 9 | from huey.tests.test_serializer import * 10 | from huey.tests.test_signals import * 11 | from huey.tests.test_sql_huey import * 12 | from huey.tests.test_storage import * 13 | from huey.tests.test_utils import * 14 | from huey.tests.test_wrappers import * 15 | -------------------------------------------------------------------------------- /huey/tests/__main__.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import unittest 3 | 4 | from huey.tests import * 5 | 6 | 7 | if __name__ == '__main__': 8 | unittest.main(argv=sys.argv) 9 | -------------------------------------------------------------------------------- /huey/tests/base.py: -------------------------------------------------------------------------------- 1 | import contextlib 2 | import logging 3 | import os 4 | import unittest 5 | 6 | from huey.api import MemoryHuey 7 | from huey.consumer import Consumer 8 | from huey.exceptions import TaskException 9 | 10 | 11 | class NullHandler(logging.Handler): 12 | def emit(self, record): pass 13 | 14 | 15 | logger = logging.getLogger('huey') 16 | logger.addHandler(NullHandler()) 17 | 18 | TRAVIS = bool(os.environ.get('HUEY_TRAVIS')) 19 | 20 | 21 | class BaseTestCase(unittest.TestCase): 22 | consumer_class = Consumer 23 | 24 | def setUp(self): 25 | super(BaseTestCase, self).setUp() 26 | self.huey = self.get_huey() 27 | 28 | def get_huey(self): 29 | return MemoryHuey(utc=False) 30 | 31 | def execute_next(self, timestamp=None): 32 | task = self.huey.dequeue() 33 | self.assertTrue(task is not None) 34 | return self.huey.execute(task, timestamp=timestamp) 35 | 36 | def trap_exception(self, fn, exc_type=TaskException): 37 | try: 38 | fn() 39 | except exc_type as exc_val: 40 | return exc_val 41 | raise AssertionError('trap_exception() failed to catch %s' % exc_type) 42 | 43 | def consumer(self, **params): 44 | params.setdefault('initial_delay', 0.001) 45 | params.setdefault('max_delay', 0.001) 46 | params.setdefault('workers', 2) 47 | params.setdefault('check_worker_health', False) 48 | return self.consumer_class(self.huey, **params) 49 | 50 | @contextlib.contextmanager 51 | def consumer_context(self, **kwargs): 52 | consumer = self.consumer(**kwargs) 53 | consumer.start() 54 | try: 55 | yield 56 | finally: 57 | consumer.stop(graceful=True) 58 | -------------------------------------------------------------------------------- /huey/tests/test_consumer.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import time 3 | 4 | from huey.api import crontab 5 | from huey.consumer import Consumer 6 | from huey.consumer import Scheduler 7 | from huey.consumer_options import ConsumerConfig 8 | from huey.tests.base import BaseTestCase 9 | from huey.utils import time_clock 10 | 11 | 12 | class TestConsumer(Consumer): 13 | class _Scheduler(Scheduler): 14 | def sleep_for_interval(self, current, interval): 15 | pass 16 | scheduler_class = _Scheduler 17 | 18 | 19 | class TestConsumerIntegration(BaseTestCase): 20 | consumer_class = TestConsumer 21 | 22 | def test_consumer_minimal(self): 23 | @self.huey.task() 24 | def task_a(n): 25 | return n + 1 26 | 27 | with self.consumer_context(): 28 | result = task_a(1) 29 | self.assertEqual(result.get(blocking=True, timeout=2), 2) 30 | 31 | def work_on_tasks(self, consumer, n=1, now=None): 32 | worker, _ = consumer.worker_threads[0] 33 | for i in range(n): 34 | self.assertEqual(len(self.huey), n - i) 35 | worker.loop(now) 36 | 37 | def schedule_tasks(self, consumer, now=None): 38 | scheduler = consumer._create_scheduler() 39 | scheduler._next_loop = time_clock() + 60 40 | scheduler._next_periodic = time_clock() - 60 41 | scheduler.loop(now) 42 | 43 | def test_consumer_schedule_task(self): 44 | @self.huey.task() 45 | def task_a(n): 46 | return n + 1 47 | 48 | now = datetime.datetime.now() 49 | eta = now + datetime.timedelta(days=1) 50 | r60 = task_a.schedule((2,), delay=60) 51 | rday = task_a.schedule((3,), eta=eta) 52 | 53 | consumer = self.consumer(workers=1) 54 | self.work_on_tasks(consumer, 2) # Process the two messages. 55 | 56 | self.assertEqual(len(self.huey), 0) 57 | self.assertEqual(self.huey.scheduled_count(), 2) 58 | 59 | self.schedule_tasks(consumer, now) 60 | self.assertEqual(len(self.huey), 0) 61 | self.assertEqual(self.huey.scheduled_count(), 2) 62 | 63 | # Ensure that the task that had a delay of 60s is read from schedule. 64 | later = now + datetime.timedelta(seconds=65) 65 | self.schedule_tasks(consumer, later) 66 | self.assertEqual(len(self.huey), 1) 67 | self.assertEqual(self.huey.scheduled_count(), 1) 68 | 69 | # We can now work on our scheduled task. 70 | self.work_on_tasks(consumer, 1, later) 71 | self.assertEqual(r60.get(), 3) 72 | 73 | # Verify the task was run and that there is only one task remaining to 74 | # be scheduled (in a day). 75 | self.assertEqual(len(self.huey), 0) 76 | self.assertEqual(self.huey.scheduled_count(), 1) 77 | 78 | tomorrow = now + datetime.timedelta(days=1) 79 | self.schedule_tasks(consumer, tomorrow) 80 | self.work_on_tasks(consumer, 1, tomorrow) 81 | self.assertEqual(rday.get(), 4) 82 | self.assertEqual(len(self.huey), 0) 83 | self.assertEqual(self.huey.scheduled_count(), 0) 84 | 85 | def test_consumer_periodic_tasks(self): 86 | state = [] 87 | 88 | @self.huey.periodic_task(crontab(minute='*/10')) 89 | def task_p1(): 90 | state.append('p1') 91 | 92 | @self.huey.periodic_task(crontab(minute='0', hour='0')) 93 | def task_p2(): 94 | state.append('p2') 95 | 96 | consumer = self.consumer(workers=1) 97 | dt = datetime.datetime(2000, 1, 1, 0, 0) 98 | self.schedule_tasks(consumer, dt) 99 | self.assertEqual(len(self.huey), 2) 100 | self.work_on_tasks(consumer, 2) 101 | self.assertEqual(state, ['p1', 'p2']) 102 | 103 | dt = datetime.datetime(2000, 1, 1, 12, 0) 104 | self.schedule_tasks(consumer, dt) 105 | self.assertEqual(len(self.huey), 1) 106 | self.work_on_tasks(consumer, 1) 107 | self.assertEqual(state, ['p1', 'p2', 'p1']) 108 | 109 | task_p1.revoke() 110 | self.schedule_tasks(consumer, dt) 111 | self.assertEqual(len(self.huey), 1) # Enqueued despite being revoked. 112 | self.work_on_tasks(consumer, 1) 113 | self.assertEqual(state, ['p1', 'p2', 'p1']) # No change, not executed. 114 | 115 | 116 | class TestConsumerConfig(BaseTestCase): 117 | def test_default_config(self): 118 | cfg = ConsumerConfig() 119 | cfg.validate() 120 | consumer = self.huey.create_consumer(**cfg.values) 121 | self.assertEqual(consumer.workers, 1) 122 | self.assertEqual(consumer.worker_type, 'thread') 123 | self.assertTrue(consumer.periodic) 124 | self.assertEqual(consumer.default_delay, 0.1) 125 | self.assertEqual(consumer.scheduler_interval, 1) 126 | self.assertTrue(consumer._health_check) 127 | 128 | def test_consumer_config(self): 129 | cfg = ConsumerConfig(workers=3, worker_type='process', initial_delay=1, 130 | backoff=2, max_delay=4, check_worker_health=False, 131 | scheduler_interval=30, periodic=False) 132 | cfg.validate() 133 | consumer = self.huey.create_consumer(**cfg.values) 134 | 135 | self.assertEqual(consumer.workers, 3) 136 | self.assertEqual(consumer.worker_type, 'process') 137 | self.assertFalse(consumer.periodic) 138 | self.assertEqual(consumer.default_delay, 1) 139 | self.assertEqual(consumer.backoff, 2) 140 | self.assertEqual(consumer.max_delay, 4) 141 | self.assertEqual(consumer.scheduler_interval, 30) 142 | self.assertFalse(consumer._health_check) 143 | 144 | def test_invalid_values(self): 145 | def assertInvalid(**kwargs): 146 | cfg = ConsumerConfig(**kwargs) 147 | self.assertRaises(ValueError, cfg.validate) 148 | 149 | assertInvalid(backoff=0.5) 150 | assertInvalid(scheduler_interval=90) 151 | assertInvalid(scheduler_interval=7) 152 | assertInvalid(scheduler_interval=45) 153 | -------------------------------------------------------------------------------- /huey/tests/test_crontab.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import unittest 3 | 4 | from huey import crontab 5 | 6 | 7 | class TestCrontab(unittest.TestCase): 8 | def test_crontab_month(self): 9 | # validates the following months, 1, 4, 7, 8, 9 10 | valids = [1, 4, 7, 8, 9] 11 | validate_m = crontab(month='1,4,*/6,8-9') 12 | 13 | for x in range(1, 13): 14 | res = validate_m(datetime.datetime(2011, x, 1)) 15 | self.assertEqual(res, x in valids) 16 | 17 | def test_crontab_day(self): 18 | # validates the following days 19 | valids = [1, 4, 7, 8, 9, 13, 19, 25, 31] 20 | validate_d = crontab(day='*/6,1,4,8-9') 21 | 22 | for x in range(1, 32): 23 | res = validate_d(datetime.datetime(2011, 1, x)) 24 | self.assertEqual(res, x in valids) 25 | 26 | valids = [1, 11, 21, 31] 27 | validate_d = crontab(day='*/10') 28 | for x in range(1, 32): 29 | res = validate_d(datetime.datetime(2011, 1, x)) 30 | self.assertEqual(res, x in valids) 31 | 32 | valids.pop() # Remove 31, as feb only has 28 days. 33 | for x in range(1, 29): 34 | res = validate_d(datetime.datetime(2011, 2, x)) 35 | self.assertEqual(res, x in valids) 36 | 37 | def test_crontab_hour(self): 38 | # validates the following hours 39 | valids = [0, 1, 4, 6, 8, 9, 12, 18] 40 | validate_h = crontab(hour='8-9,*/6,1,4') 41 | 42 | for x in range(24): 43 | res = validate_h(datetime.datetime(2011, 1, 1, x)) 44 | self.assertEqual(res, x in valids) 45 | 46 | edge = crontab(hour=0) 47 | self.assertTrue(edge(datetime.datetime(2011, 1, 1, 0, 0))) 48 | self.assertFalse(edge(datetime.datetime(2011, 1, 1, 12, 0))) 49 | 50 | def test_crontab_minute(self): 51 | # validates the following minutes 52 | valids = [0, 1, 4, 6, 8, 9, 12, 18, 24, 30, 36, 42, 48, 54] 53 | validate_m = crontab(minute='4,8-9,*/6,1') 54 | 55 | for x in range(60): 56 | res = validate_m(datetime.datetime(2011, 1, 1, 1, x)) 57 | self.assertEqual(res, x in valids) 58 | 59 | # We don't ensure *every* X minutes, but just on the given intervals. 60 | valids = [0, 16, 32, 48] 61 | validate_m = crontab(minute='*/16') 62 | for x in range(60): 63 | res = validate_m(datetime.datetime(2011, 1, 1, 1, x)) 64 | self.assertEqual(res, x in valids) 65 | 66 | def test_crontab_day_of_week(self): 67 | # validates the following days of week 68 | # jan, 1, 2011 is a saturday 69 | valids = [2, 4, 9, 11, 16, 18, 23, 25, 30] 70 | validate_dow = crontab(day_of_week='0,2') 71 | 72 | for x in range(1, 32): 73 | res = validate_dow(datetime.datetime(2011, 1, x)) 74 | self.assertEqual(res, x in valids) 75 | 76 | def test_crontab_sunday(self): 77 | for dow in ('0', '7'): 78 | validate = crontab(day_of_week=dow, hour='0', minute='0') 79 | valid = set((2, 9, 16, 23, 30)) 80 | for x in range(1, 32): 81 | if x in valid: 82 | self.assertTrue(validate(datetime.datetime(2011, 1, x))) 83 | else: 84 | self.assertFalse(validate(datetime.datetime(2011, 1, x))) 85 | 86 | def test_crontab_all_together(self): 87 | # jan 1, 2011 is a saturday 88 | # may 1, 2011 is a sunday 89 | validate = crontab( 90 | month='1,5', 91 | day='1,4,7', 92 | day_of_week='0,6', 93 | hour='*/4', 94 | minute='1-5,10-15,50' 95 | ) 96 | 97 | self.assertTrue(validate(datetime.datetime(2011, 5, 1, 4, 11))) 98 | self.assertTrue(validate(datetime.datetime(2011, 5, 7, 20, 50))) 99 | self.assertTrue(validate(datetime.datetime(2011, 1, 1, 0, 1))) 100 | 101 | # fails validation on month 102 | self.assertFalse(validate(datetime.datetime(2011, 6, 4, 4, 11))) 103 | 104 | # fails validation on day 105 | self.assertFalse(validate(datetime.datetime(2011, 1, 6, 4, 11))) 106 | 107 | # fails validation on day_of_week 108 | self.assertFalse(validate(datetime.datetime(2011, 1, 4, 4, 11))) 109 | 110 | # fails validation on hour 111 | self.assertFalse(validate(datetime.datetime(2011, 1, 1, 1, 11))) 112 | 113 | # fails validation on minute 114 | self.assertFalse(validate(datetime.datetime(2011, 1, 1, 4, 6))) 115 | 116 | def test_invalid_crontabs(self): 117 | # check invalid configurations are detected and reported 118 | self.assertRaises(ValueError, crontab, minute='61') 119 | self.assertRaises(ValueError, crontab, minute='0-61') 120 | self.assertRaises(ValueError, crontab, day_of_week='*/3') 121 | 122 | def test_invalid_crontabs_2(self): 123 | self.assertTrue(crontab(minute='*abc')) 124 | invalid = ('abc', '*abc', 'a-b', '1-c', '0x9') 125 | for i in invalid: 126 | self.assertRaises(ValueError, crontab, minute=i, strict=True) 127 | -------------------------------------------------------------------------------- /huey/tests/test_helpers.py: -------------------------------------------------------------------------------- 1 | from huey import RedisHuey 2 | from huey.contrib.helpers import RedisSemaphore 3 | from huey.contrib.helpers import lock_task_semaphore 4 | from huey.exceptions import TaskLockedException 5 | from huey.tests.base import BaseTestCase 6 | 7 | 8 | class TestLockTaskSemaphore(BaseTestCase): 9 | def setUp(self): 10 | super(TestLockTaskSemaphore, self).setUp() 11 | self.semaphore = RedisSemaphore(self.huey, 'lock_a', 2) 12 | self.huey.storage.conn.delete(self.semaphore.key) 13 | 14 | def tearDown(self): 15 | self.huey.storage.conn.delete(self.semaphore.key) 16 | super(TestLockTaskSemaphore, self).tearDown() 17 | 18 | def get_huey(self): 19 | return RedisHuey() 20 | 21 | def test_redis_semaphore(self): 22 | s = self.semaphore 23 | aid1 = s.acquire() 24 | self.assertTrue(aid1 is not None) 25 | aid2 = s.acquire() 26 | self.assertTrue(aid2 is not None) # We can acquire it twice. 27 | self.assertTrue(s.acquire() is None) # Cannot acquire 3 times. 28 | self.assertEqual(s.release(aid2), 1) # Release succeeded. 29 | self.assertEqual(s.release(aid2), 0) # Already released. 30 | self.assertEqual(s.acquire(aid2), aid2) # Re-acquired. 31 | self.assertEqual(s.acquire(aid2), aid2) # No-op (still acquired). 32 | 33 | self.assertEqual(s.release(aid2), 1) # Release succeeded. 34 | self.assertEqual(s.release(aid1), 1) # Release succeeded. 35 | 36 | self.assertTrue(s.acquire() is not None) # Acquire twice. 37 | self.assertTrue(s.acquire() is not None) 38 | self.assertTrue(s.acquire() is None) # Cannot acquire 3 times. 39 | self.huey.storage.conn.delete(s.key) 40 | -------------------------------------------------------------------------------- /huey/tests/test_immediate.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | 3 | from huey.api import Huey 4 | from huey.api import MemoryHuey 5 | from huey.exceptions import TaskException 6 | from huey.storage import BlackHoleStorage 7 | from huey.tests.base import BaseTestCase 8 | 9 | 10 | class TestImmediate(BaseTestCase): 11 | def get_huey(self): 12 | return MemoryHuey(immediate=True, utc=False) 13 | 14 | def test_immediate(self): 15 | @self.huey.task() 16 | def task_a(n): 17 | return n + 1 18 | 19 | r = task_a(3) 20 | 21 | # Task is not enqueued, but the result *is* stored in the result-store. 22 | self.assertEqual(len(self.huey), 0) 23 | self.assertEqual(self.huey.result_count(), 1) 24 | self.assertEqual(r.get(), 4) 25 | 26 | # After reading, result is removed, as we would expect. 27 | self.assertEqual(self.huey.result_count(), 0) 28 | 29 | # Cannot add 1 to "None", this produces an error. We get the usual 30 | # TaskException, which wraps the TypeError. 31 | r_err = task_a(None) 32 | self.assertRaises(TaskException, r_err.get) 33 | 34 | def test_immediate_pipeline(self): 35 | @self.huey.task() 36 | def add(a, b): 37 | return a + b 38 | 39 | p = add.s(3, 4).then(add, 5).then(add, 6).then(add, 7) 40 | result_group = self.huey.enqueue(p) 41 | self.assertEqual(result_group(), [7, 12, 18, 25]) 42 | 43 | def test_immediate_scheduling(self): 44 | @self.huey.task() 45 | def task_a(n): 46 | return n + 1 47 | 48 | r = task_a.schedule((3,), delay=10) 49 | 50 | # Task is not enqueued, no result is generated, the task is added to 51 | # the schedule, however -- even though the scheduler never runs in 52 | # immediate mode. 53 | self.assertEqual(len(self.huey), 0) 54 | self.assertEqual(self.huey.result_count(), 0) 55 | self.assertEqual(self.huey.scheduled_count(), 1) 56 | self.assertTrue(r.get() is None) 57 | 58 | def test_immediate_reschedule(self): 59 | state = [] 60 | 61 | @self.huey.task(context=True) 62 | def task_s(task=None): 63 | state.append(task.id) 64 | return 1 65 | 66 | r = task_s.schedule(delay=60) 67 | self.assertEqual(len(self.huey), 0) 68 | self.assertTrue(r() is None) 69 | 70 | r2 = r.reschedule() 71 | self.assertTrue(r.id != r2.id) 72 | self.assertEqual(state, [r2.id]) 73 | self.assertEqual(r2(), 1) 74 | self.assertEqual(len(self.huey), 0) 75 | self.assertTrue(r.is_revoked()) 76 | 77 | # Because the scheduler never picks up the original task (r), its 78 | # revocation key sits in the result store and the task is in the 79 | # schedule still. 80 | self.assertEqual(self.huey.result_count(), 1) 81 | self.assertEqual(self.huey.scheduled_count(), 1) 82 | 83 | def test_immediate_revoke_restore(self): 84 | @self.huey.task() 85 | def task_a(n): 86 | return n + 1 87 | 88 | task_a.revoke() 89 | r = task_a(3) 90 | self.assertEqual(len(self.huey), 0) 91 | self.assertTrue(r.get() is None) 92 | 93 | self.assertTrue(task_a.restore()) 94 | r = task_a(4) 95 | self.assertEqual(r.get(), 5) 96 | 97 | def test_swap_immediate(self): 98 | @self.huey.task() 99 | def task_a(n): 100 | return n + 1 101 | 102 | r = task_a(1) 103 | self.assertEqual(r.get(), 2) 104 | 105 | self.huey.immediate = False 106 | r = task_a(2) 107 | self.assertEqual(len(self.huey), 1) 108 | self.assertEqual(self.huey.result_count(), 0) 109 | task = self.huey.dequeue() 110 | self.assertEqual(self.huey.execute(task), 3) 111 | self.assertEqual(r.get(), 3) 112 | 113 | self.huey.immediate = True 114 | r = task_a(3) 115 | self.assertEqual(r.get(), 4) 116 | self.assertEqual(len(self.huey), 0) 117 | self.assertEqual(self.huey.result_count(), 0) 118 | 119 | def test_map(self): 120 | @self.huey.task() 121 | def task_a(n): 122 | return n + 1 123 | 124 | result_group = task_a.map(range(8)) 125 | self.assertEqual(result_group(), [1, 2, 3, 4, 5, 6, 7, 8]) 126 | 127 | 128 | class NoUseException(Exception): pass 129 | class NoUseStorage(BlackHoleStorage): 130 | def enqueue(self, data, priority=None): raise NoUseException() 131 | def dequeue(self): raise NoUseException() 132 | def add_to_schedule(self, data, ts, utc): raise NoUseException() 133 | def read_schedule(self, ts): raise NoUseException() 134 | def put_data(self, key, value): raise NoUseException() 135 | def peek_data(self, key): raise NoUseException() 136 | def pop_data(self, key): raise NoUseException() 137 | def has_data_for_key(self, key): raise NoUseException() 138 | def put_if_empty(self, key, value): raise NoUseException() 139 | class NoUseHuey(Huey): 140 | def get_storage(self, **storage_kwargs): 141 | return NoUseStorage() 142 | 143 | 144 | class TestImmediateMemoryStorage(BaseTestCase): 145 | def get_huey(self): 146 | return NoUseHuey(utc=False) 147 | 148 | def test_immediate_storage(self): 149 | @self.huey.task() 150 | def task_a(n): 151 | return n + 1 152 | 153 | self.huey.immediate = True 154 | 155 | # If any operation happens to touch the "real" storage engine, an 156 | # exception will be raised. These tests validate that immediate mode 157 | # doesn't accidentally interact with the live storage. 158 | res = task_a(2) 159 | self.assertEqual(res(), 3) 160 | 161 | task_a.revoke() 162 | res = task_a(3) 163 | self.assertTrue(res() is None) 164 | self.assertTrue(task_a.restore()) 165 | 166 | res = task_a(4) 167 | self.assertEqual(res(), 5) 168 | 169 | eta = datetime.datetime.now() + datetime.timedelta(seconds=60) 170 | res = task_a.schedule((5,), eta=eta) 171 | self.assertTrue(res() is None) 172 | 173 | minus_1 = eta - datetime.timedelta(seconds=1) 174 | self.assertEqual(self.huey.read_schedule(minus_1), []) 175 | 176 | tasks = self.huey.read_schedule(eta) 177 | self.assertEqual([t.id for t in tasks], [res.id]) 178 | self.assertTrue(res() is None) 179 | 180 | # Switch back to regular storage / non-immediate mode. 181 | self.huey.immediate = False 182 | self.assertRaises(NoUseException, task_a, 1) 183 | 184 | # Switch back to immediate mode. 185 | self.huey.immediate = True 186 | res = task_a(10) 187 | self.assertEqual(res(), 11) 188 | 189 | def test_immediate_real_storage(self): 190 | self.huey.immediate_use_memory = False 191 | 192 | @self.huey.task() 193 | def task_a(n): 194 | return n + 1 195 | 196 | self.huey.immediate = True 197 | self.assertRaises(NoUseException, task_a, 1) 198 | 199 | self.huey.immediate = False 200 | self.assertRaises(NoUseException, task_a, 2) 201 | -------------------------------------------------------------------------------- /huey/tests/test_kt_huey.py: -------------------------------------------------------------------------------- 1 | import os 2 | import subprocess as sp 3 | import unittest 4 | 5 | try: 6 | import ukt 7 | except ImportError: 8 | ukt = None 9 | 10 | try: 11 | from huey.contrib.kyototycoon import KyotoTycoonHuey 12 | from huey.contrib.kyototycoon import KyotoTycoonStorage 13 | except ImportError: 14 | if ukt is not None: 15 | raise 16 | 17 | from huey.tests.base import BaseTestCase 18 | from huey.tests.test_storage import StorageTests 19 | 20 | has_ktserver = sp.call(['which', 'ktserver'], stdout=sp.PIPE) == 0 21 | 22 | 23 | @unittest.skipIf(ukt is None, 'requires ukt') 24 | @unittest.skipIf(not has_ktserver, 'kyototycoon server not installed') 25 | class TestKyotoTycoonHuey(StorageTests, BaseTestCase): 26 | @classmethod 27 | def setUpClass(cls): 28 | lua_path = os.path.join(os.path.dirname(__file__), 'scripts/') 29 | lua_script = os.path.join(lua_path, 'kt.lua') 30 | cls._server = ukt.EmbeddedServer(database='%', serializer=ukt.KT_NONE, 31 | server_args=['-scr', lua_script]) 32 | cls._server.run() 33 | cls.db = cls._server.client 34 | 35 | @classmethod 36 | def tearDownClass(cls): 37 | if cls._server is not None: 38 | cls._server.stop() 39 | cls.db.close_all() 40 | cls.db = None 41 | 42 | def tearDown(self): 43 | if self.db is not None: 44 | self.db.clear() 45 | 46 | def get_huey(self): 47 | return KyotoTycoonHuey(client=self.db, utc=False) 48 | 49 | def test_expire_results(self): 50 | huey = KyotoTycoonHuey(client=self.db, utc=False, 51 | result_expire_time=3600) 52 | s = huey.storage 53 | 54 | s.put_data(b'k1', b'v1') 55 | s.put_data(b'k2', b'v2', is_result=True) 56 | self.assertEqual(s.pop_data(b'k1'), b'v1') 57 | self.assertEqual(s.pop_data(b'k2'), b'v2') 58 | 59 | self.assertTrue(s.has_data_for_key(b'k2')) 60 | self.assertFalse(s.put_if_empty(b'k2', b'v2-x')) 61 | self.assertFalse(s.has_data_for_key(b'k3')) 62 | self.assertTrue(s.put_if_empty(b'k3', b'v3')) 63 | 64 | self.assertTrue(s.delete_data(b'k2')) 65 | self.assertFalse(s.delete_data(b'k2')) 66 | self.assertEqual(s.result_items(), {'k1': b'v1', 'k3': b'v3'}) 67 | self.assertEqual(s.result_store_size(), 2) 68 | -------------------------------------------------------------------------------- /huey/tests/test_priority.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | 3 | from huey.api import MemoryHuey 4 | from huey.api import crontab 5 | from huey.exceptions import TaskException 6 | from huey.tests.base import BaseTestCase 7 | 8 | 9 | class TestPriority(BaseTestCase): 10 | def setUp(self): 11 | super(TestPriority, self).setUp() 12 | 13 | self.state = [] 14 | 15 | def task(n): 16 | self.state.append(n) 17 | return n 18 | 19 | self.task_1 = self.huey.task(priority=1, name='task_1')(task) 20 | self.task_2 = self.huey.task(priority=2, name='task_2')(task) 21 | self.task_0 = self.huey.task(name='task_0')(task) 22 | 23 | def tearDown(self): 24 | super(TestPriority, self).tearDown() 25 | self.task_1 = self.task_2 = self.task_0 = None 26 | 27 | def test_priority_simple(self): 28 | self.task_0(0) 29 | self.task_1(10) 30 | self.task_2(100) 31 | 32 | self.task_0(2) 33 | self.task_1(12) 34 | self.task_2(120) 35 | self.assertEqual(len(self.huey), 6) 36 | 37 | # First the task_2 invocations, then the task_1, then the task_0. 38 | results = [100, 120, 10, 12, 0, 2] 39 | for result in results: 40 | self.assertEqual(self.execute_next(), result) 41 | 42 | self.assertEqual(len(self.huey), 0) 43 | self.assertEqual(self.state, results) 44 | 45 | def test_priority_override(self): 46 | r0_0 = self.task_0(0) 47 | r1_0 = self.task_1(10) 48 | r2_0 = self.task_2(100) 49 | 50 | r0_1 = self.task_0(1, priority=2) 51 | r1_1 = self.task_1(11, priority=0) 52 | r2_1 = self.task_2(110, priority=1) 53 | 54 | r0_2 = self.task_0(2, priority=1) 55 | r1_2 = self.task_1(12, priority=2) 56 | r2_2 = self.task_2(120, priority=0) 57 | 58 | results = [100, 1, 12, 10, 110, 2, 0, 11, 120] 59 | for result in results: 60 | self.assertEqual(self.execute_next(), result) 61 | 62 | self.assertEqual(len(self.huey), 0) 63 | self.assertEqual(self.state, results) 64 | 65 | r0_3 = self.task_0(3) 66 | r1_3 = self.task_1(13) 67 | r2_3 = self.task_2(130) 68 | rx = self.task_0(9, priority=9) 69 | results.extend((9, 130, 13, 3)) 70 | for result in results[-4:]: 71 | self.assertEqual(self.execute_next(), result) 72 | 73 | self.assertEqual(len(self.huey), 0) 74 | self.assertEqual(self.state, results) 75 | 76 | def test_schedule_priority(self): 77 | eta = datetime.datetime.now() + datetime.timedelta(seconds=60) 78 | r0_0 = self.task_0.schedule((0,), eta=eta) 79 | r1_0 = self.task_1.schedule((10,), eta=eta) 80 | r2_0 = self.task_2.schedule((100,), eta=eta) 81 | 82 | r0_1 = self.task_0.schedule((1,), eta=eta, priority=2) 83 | r1_1 = self.task_1.schedule((11,), eta=eta, priority=0) 84 | r2_1 = self.task_2.schedule((110,), eta=eta, priority=1) 85 | 86 | expected = { 87 | r0_0.id: None, 88 | r1_0.id: 1, 89 | r2_0.id: 2, 90 | r0_1.id: 2, 91 | r1_1.id: 0, 92 | r2_1.id: 1} 93 | 94 | for _ in range(6): 95 | self.assertTrue(self.execute_next() is None) 96 | 97 | # Priorities are preserved when added to the schedule. 98 | priorities = dict((t.id, t.priority) for t in self.huey.scheduled()) 99 | self.assertEqual(priorities, expected) 100 | 101 | # Priorities are preserved when read from the schedule. 102 | items = self.huey.read_schedule(timestamp=eta) 103 | priorities = dict((t.id, t.priority) for t in items) 104 | self.assertEqual(priorities, expected) 105 | 106 | def test_periodic_priority(self): 107 | @self.huey.periodic_task(crontab(), priority=3, name='ptask') 108 | def task_p(): 109 | pass 110 | 111 | self.task_0(0) 112 | self.task_1(10) 113 | self.task_2(100) 114 | 115 | for task in self.huey.read_periodic(datetime.datetime.now()): 116 | self.huey.enqueue(task) 117 | 118 | # Our periodic task has a higher priority than the other tasks in the 119 | # queue, and will be executed first. 120 | self.assertEqual(len(self.huey), 4) 121 | ptask = self.huey.dequeue() 122 | self.assertEqual(ptask.name, 'ptask') # Verify is our periodic task. 123 | self.assertEqual(ptask.priority, 3) # Priority is preserved. 124 | 125 | def test_priority_retry(self): 126 | @self.huey.task(priority=3, retries=1) 127 | def task_3(n): 128 | raise ValueError('uh-oh') 129 | 130 | self.task_0(0) 131 | self.task_1(10) 132 | r2 = self.task_2(100) 133 | r3 = task_3(3) 134 | 135 | self.assertEqual(len(self.huey), 4) 136 | task = self.huey.dequeue() 137 | self.assertEqual(task.id, r3.id) 138 | self.assertEqual(task.priority, 3) 139 | self.assertEqual(task.retries, 1) 140 | self.assertTrue(self.huey.execute(task) is None) 141 | self.assertRaises(TaskException, r3.get) 142 | 143 | # Task has been re-enqueued for retry. Verify priority is preserved. 144 | self.assertEqual(len(self.huey), 4) 145 | rtask = self.huey.dequeue() 146 | self.assertEqual(rtask.id, r3.id) 147 | self.assertEqual(rtask.priority, 3) 148 | self.assertEqual(rtask.retries, 0) 149 | self.assertTrue(self.huey.execute(rtask) is None) 150 | 151 | # No more retries, now we'll get our task_2. 152 | self.assertEqual(len(self.huey), 3) 153 | task = self.huey.dequeue() 154 | self.assertEqual(task.id, r2.id) 155 | -------------------------------------------------------------------------------- /huey/tests/test_registry.py: -------------------------------------------------------------------------------- 1 | import pickle 2 | 3 | from huey.exceptions import HueyException 4 | from huey.tests.base import BaseTestCase 5 | 6 | 7 | class TestRegistry(BaseTestCase): 8 | def setUp(self): 9 | super(TestRegistry, self).setUp() 10 | self.registry = self.huey._registry 11 | 12 | def test_register_unique(self): 13 | def task_a(): pass 14 | def task_b(): pass 15 | 16 | ta = self.huey.task()(task_a) 17 | self.assertRaises(ValueError, self.huey.task(), task_a) 18 | self.assertRaises(ValueError, self.huey.task(name='task_a'), task_b) 19 | 20 | # We can register task_b and re-register task_a providing a new name. 21 | tb = self.huey.task()(task_b) 22 | ta2 = self.huey.task(name='task_a2')(task_a) 23 | 24 | t1 = ta.s() 25 | t2 = ta2.s() 26 | self.assertTrue(t1.name != t2.name) 27 | 28 | def test_register_unregister(self): 29 | @self.huey.task() 30 | def task_a(): 31 | pass 32 | 33 | self.assertTrue(task_a.unregister()) 34 | self.assertFalse(task_a.unregister()) 35 | 36 | def test_message_wrapping(self): 37 | @self.huey.task(retries=1) 38 | def task_a(p1, p2, p3=3, p4=None): 39 | pass 40 | 41 | task = task_a.s('v1', 'v2', p4='v4') 42 | message = self.registry.create_message(task) 43 | self.assertEqual(message.id, task.id) 44 | self.assertEqual(message.retries, 1) 45 | self.assertEqual(message.retry_delay, 0) 46 | self.assertEqual(message.args, ('v1', 'v2')) 47 | self.assertEqual(message.kwargs, {'p4': 'v4'}) 48 | self.assertTrue(message.on_complete is None) 49 | self.assertTrue(message.on_error is None) 50 | self.assertTrue(message.expires is None) 51 | self.assertTrue(message.expires_resolved is None) 52 | 53 | task2 = self.registry.create_task(message) 54 | self.assertEqual(task2.id, task.id) 55 | self.assertEqual(task2.retries, 1) 56 | self.assertEqual(task2.retry_delay, 0) 57 | self.assertEqual(task2.args, ('v1', 'v2')) 58 | self.assertEqual(task2.kwargs, {'p4': 'v4'}) 59 | self.assertTrue(task2.on_complete is None) 60 | self.assertTrue(task2.on_error is None) 61 | self.assertTrue(task2.expires is None) 62 | self.assertTrue(task2.expires_resolved is None) 63 | 64 | def test_missing_task(self): 65 | @self.huey.task() 66 | def task_a(): 67 | pass 68 | 69 | # Serialize the task invocation. 70 | task = task_a.s() 71 | message = self.registry.create_message(task) 72 | 73 | # Unregister the task, which will raise an error when we try to 74 | # deserialize the message back into a task instance. 75 | self.assertTrue(task_a.unregister()) 76 | self.assertRaises(HueyException, self.registry.create_task, message) 77 | 78 | # Similarly, we can no longer serialize the task to a message. 79 | self.assertRaises(HueyException, self.registry.create_message, task) 80 | 81 | def test_periodic_tasks(self): 82 | def task_fn(): pass 83 | self.huey.task(name='a')(task_fn) 84 | p1 = self.huey.periodic_task(lambda _: False, name='p1')(task_fn) 85 | p2 = self.huey.periodic_task(lambda _: False, name='p2')(task_fn) 86 | self.huey.task(name='b')(task_fn) 87 | 88 | periodic = sorted(t.name for t in self.registry.periodic_tasks) 89 | self.assertEqual(periodic, ['p1', 'p2']) 90 | 91 | self.assertTrue(p1.unregister()) 92 | periodic = sorted(t.name for t in self.registry.periodic_tasks) 93 | self.assertEqual(periodic, ['p2']) 94 | 95 | def test_huey1_compat(self): 96 | @self.huey.task() 97 | def task_a(n): 98 | return n + 1 99 | 100 | t = task_a.s(2) 101 | 102 | # Enqueue a message using the old message serialization format. 103 | tc = task_a.task_class 104 | old_message = (t.id, '%s.%s' % (tc.__module__, tc.__name__), None, 0, 105 | 0, ((2,), {}), None) 106 | self.huey.storage.enqueue(pickle.dumps(old_message)) 107 | 108 | self.assertEqual(len(self.huey), 1) 109 | self.assertEqual(self.execute_next(), 3) 110 | self.assertEqual(self.huey.result(t.id), 3) 111 | -------------------------------------------------------------------------------- /huey/tests/test_serializer.py: -------------------------------------------------------------------------------- 1 | try: 2 | import gzip 3 | except ImportError: 4 | gzip = None 5 | import unittest 6 | try: 7 | import zlib 8 | except ImportError: 9 | zlib = None 10 | 11 | from huey.serializer import Serializer 12 | from huey.tests.base import BaseTestCase 13 | 14 | 15 | class TestSerializer(BaseTestCase): 16 | data = [ 17 | None, 18 | 0, 1, 19 | b'a' * 1024, 20 | ['k1', 'k2', 'k3'], 21 | {'k1': 'v1', 'k2': 'v2', 'k3': 'v3'}] 22 | 23 | def _test_serializer(self, s): 24 | for item in self.data: 25 | self.assertEqual(s.deserialize(s.serialize(item)), item) 26 | 27 | def test_serializer(self): 28 | self._test_serializer(Serializer()) 29 | 30 | @unittest.skipIf(gzip is None, 'gzip module not installed') 31 | def test_serializer_gzip(self): 32 | self._test_serializer(Serializer(compression=True)) 33 | 34 | @unittest.skipIf(zlib is None, 'zlib module not installed') 35 | def test_serializer_zlib(self): 36 | self._test_serializer(Serializer(compression=True, use_zlib=True)) 37 | 38 | @unittest.skipIf(zlib is None, 'zlib module not installed') 39 | @unittest.skipIf(gzip is None, 'gzip module not installed') 40 | def test_mismatched_compression(self): 41 | for use_zlib in (False, True): 42 | s = Serializer() 43 | scomp = Serializer(compression=True, use_zlib=use_zlib) 44 | for item in self.data: 45 | self.assertEqual(scomp.deserialize(s.serialize(item)), item) 46 | -------------------------------------------------------------------------------- /huey/tests/test_signals.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | 3 | from huey.signals import * 4 | from huey.tests.base import BaseTestCase 5 | 6 | 7 | class TestSignals(BaseTestCase): 8 | def setUp(self): 9 | super(TestSignals, self).setUp() 10 | self._state = [] 11 | 12 | @self.huey.signal() 13 | def signal_handle(signal, task, *args): 14 | self._state.append((signal, task, args)) 15 | 16 | def assertSignals(self, expected): 17 | self.assertEqual([s[0] for s in self._state], expected) 18 | self._state = [] 19 | 20 | def test_signals_simple(self): 21 | @self.huey.task() 22 | def task_a(n): 23 | return n + 1 24 | 25 | r = task_a(3) 26 | self.assertSignals([SIGNAL_ENQUEUED]) 27 | self.assertEqual(self.execute_next(), 4) 28 | self.assertSignals([SIGNAL_EXECUTING, SIGNAL_COMPLETE]) 29 | 30 | r = task_a.schedule((2,), delay=60) 31 | self.assertSignals([SIGNAL_ENQUEUED]) 32 | self.assertTrue(self.execute_next() is None) 33 | self.assertSignals([SIGNAL_SCHEDULED]) 34 | 35 | r = task_a(None) 36 | self.assertSignals([SIGNAL_ENQUEUED]) 37 | self.assertTrue(self.execute_next() is None) 38 | self.assertSignals([SIGNAL_EXECUTING, SIGNAL_ERROR]) 39 | 40 | def test_signal_complete_result_ready(self): 41 | @self.huey.task() 42 | def task_a(n): 43 | return n + 1 44 | 45 | results = [] 46 | 47 | @self.huey.signal(SIGNAL_COMPLETE) 48 | def on_complete(sig, task, *_): 49 | results.append(self.huey.result(task.id)) 50 | 51 | r = task_a(2) 52 | self.assertEqual(self.execute_next(), 3) 53 | self.assertEqual(results, [3]) 54 | 55 | def test_signals_on_retry(self): 56 | @self.huey.task(retries=1) 57 | def task_a(n): 58 | return n + 1 59 | 60 | r = task_a(None) 61 | self.assertSignals([SIGNAL_ENQUEUED]) 62 | self.assertTrue(self.execute_next() is None) 63 | self.assertSignals([SIGNAL_EXECUTING, SIGNAL_ERROR, SIGNAL_RETRYING, 64 | SIGNAL_ENQUEUED]) 65 | self.assertTrue(self.execute_next() is None) 66 | self.assertSignals([SIGNAL_EXECUTING, SIGNAL_ERROR]) 67 | 68 | @self.huey.task(retries=1, retry_delay=60) 69 | def task_b(n): 70 | return n + 1 71 | 72 | r = task_b(None) 73 | self.assertSignals([SIGNAL_ENQUEUED]) 74 | self.assertTrue(self.execute_next() is None) 75 | self.assertSignals([SIGNAL_EXECUTING, SIGNAL_ERROR, SIGNAL_RETRYING, 76 | SIGNAL_SCHEDULED]) 77 | 78 | def test_signals_revoked(self): 79 | @self.huey.task() 80 | def task_a(n): 81 | return n + 1 82 | 83 | task_a.revoke(revoke_once=True) 84 | r = task_a(2) 85 | self.assertSignals([SIGNAL_ENQUEUED]) 86 | self.assertTrue(self.execute_next() is None) 87 | self.assertSignals([SIGNAL_REVOKED]) 88 | 89 | r = task_a(3) 90 | self.assertEqual(self.execute_next(), 4) 91 | self.assertSignals([SIGNAL_ENQUEUED, SIGNAL_EXECUTING, 92 | SIGNAL_COMPLETE]) 93 | 94 | def test_signals_locked(self): 95 | @self.huey.task() 96 | @self.huey.lock_task('lock-a') 97 | def task_a(n): 98 | return n + 1 99 | 100 | r = task_a(1) 101 | self.assertSignals([SIGNAL_ENQUEUED]) 102 | self.assertEqual(self.execute_next(), 2) 103 | self.assertSignals([SIGNAL_EXECUTING, SIGNAL_COMPLETE]) 104 | 105 | with self.huey.lock_task('lock-a'): 106 | r = task_a(2) 107 | self.assertSignals([SIGNAL_ENQUEUED]) 108 | self.assertTrue(self.execute_next() is None) 109 | self.assertSignals([SIGNAL_EXECUTING, SIGNAL_LOCKED]) 110 | 111 | def test_signal_expired(self): 112 | @self.huey.task(expires=10) 113 | def task_a(n): 114 | return n + 1 115 | 116 | now = datetime.datetime.now() 117 | expires = now + datetime.timedelta(seconds=15) 118 | r = task_a(2) 119 | self.assertSignals([SIGNAL_ENQUEUED]) 120 | self.assertTrue(self.execute_next(expires) is None) 121 | self.assertSignals([SIGNAL_EXPIRED]) 122 | 123 | r = task_a(3) 124 | self.assertSignals([SIGNAL_ENQUEUED]) 125 | self.assertTrue(self.execute_next(), 4) 126 | self.assertSignals([SIGNAL_EXECUTING, SIGNAL_COMPLETE]) 127 | 128 | def test_specific_handler(self): 129 | extra_state = [] 130 | 131 | @self.huey.signal(SIGNAL_EXECUTING) 132 | def extra_handler(signal, task): 133 | extra_state.append(task.args[0]) 134 | 135 | @self.huey.task() 136 | def task_a(n): 137 | return n + 1 138 | 139 | r = task_a(3) 140 | self.assertEqual(extra_state, []) 141 | self.assertEqual(self.execute_next(), 4) 142 | self.assertEqual(extra_state, [3]) 143 | self.assertSignals([SIGNAL_ENQUEUED, SIGNAL_EXECUTING, 144 | SIGNAL_COMPLETE]) 145 | 146 | r2 = task_a(1) 147 | self.assertEqual(self.execute_next(), 2) 148 | self.assertEqual(extra_state, [3, 1]) 149 | self.assertSignals([SIGNAL_ENQUEUED, SIGNAL_EXECUTING, 150 | SIGNAL_COMPLETE]) 151 | 152 | self.huey.disconnect_signal(extra_handler, SIGNAL_EXECUTING) 153 | r3 = task_a(2) 154 | self.assertEqual(self.execute_next(), 3) 155 | self.assertEqual(extra_state, [3, 1]) 156 | self.assertSignals([SIGNAL_ENQUEUED, SIGNAL_EXECUTING, 157 | SIGNAL_COMPLETE]) 158 | 159 | def test_multi_handlers(self): 160 | state1 = [] 161 | state2 = [] 162 | 163 | @self.huey.signal(SIGNAL_EXECUTING, SIGNAL_COMPLETE) 164 | def handler1(signal, task): 165 | state1.append(signal) 166 | 167 | @self.huey.signal(SIGNAL_EXECUTING, SIGNAL_COMPLETE) 168 | def handler2(signal, task): 169 | state2.append(signal) 170 | 171 | @self.huey.task() 172 | def task_a(n): 173 | return n + 1 174 | 175 | r = task_a(1) 176 | self.assertEqual(self.execute_next(), 2) 177 | self.assertEqual(state1, ['executing', 'complete']) 178 | self.assertEqual(state2, ['executing', 'complete']) 179 | 180 | self.huey.disconnect_signal(handler1, SIGNAL_COMPLETE) 181 | self.huey.disconnect_signal(handler2) 182 | 183 | r2 = task_a(2) 184 | self.assertEqual(self.execute_next(), 3) 185 | self.assertEqual(state1, ['executing', 'complete', 'executing']) 186 | self.assertEqual(state2, ['executing', 'complete']) 187 | -------------------------------------------------------------------------------- /huey/tests/test_sql_huey.py: -------------------------------------------------------------------------------- 1 | import os 2 | import unittest 3 | 4 | try: 5 | import peewee 6 | except ImportError: 7 | peewee = None 8 | 9 | try: 10 | from huey.contrib.sql_huey import SqlHuey 11 | from huey.contrib.sql_huey import SqlStorage 12 | except ImportError: 13 | if peewee is not None: 14 | raise 15 | from huey.tests.base import BaseTestCase 16 | from huey.tests.test_storage import StorageTests 17 | 18 | 19 | SQLHUEY_URL = os.environ.get('SQLHUEY_URL') or 'sqlite:////tmp/huey-sqlite.db' 20 | 21 | 22 | @unittest.skipIf(peewee is None, 'requires peewee') 23 | class TestSqlStorage(StorageTests, BaseTestCase): 24 | db_file = '/tmp/huey-sqlite.db' 25 | 26 | def setUp(self): 27 | if os.path.exists(self.db_file): 28 | os.unlink(self.db_file) 29 | super(TestSqlStorage, self).setUp() 30 | 31 | def tearDown(self): 32 | super(TestSqlStorage, self).tearDown() 33 | self.huey.storage.drop_tables() 34 | 35 | @classmethod 36 | def tearDownClass(cls): 37 | super(TestSqlStorage, cls).tearDownClass() 38 | if os.path.exists(cls.db_file): 39 | os.unlink(cls.db_file) 40 | 41 | def get_huey(self): 42 | return SqlHuey(database=SQLHUEY_URL, utc=False) 43 | 44 | def test_sql_huey_basic(self): 45 | @self.huey.task() 46 | def task_a(n): 47 | return n + 1 48 | 49 | r1 = task_a(1) 50 | r2 = task_a(2) 51 | self.assertEqual(self.execute_next(), 2) 52 | self.assertEqual(len(self.huey), 1) 53 | self.assertEqual(self.huey.result_count(), 1) 54 | r2.revoke() 55 | self.assertEqual(self.huey.result_count(), 2) 56 | 57 | self.assertTrue(self.execute_next() is None) 58 | self.assertEqual(len(self.huey), 0) 59 | self.assertEqual(self.huey.result_count(), 1) 60 | 61 | r3 = task_a.schedule((3,), delay=10) 62 | self.assertEqual(len(self.huey), 1) 63 | self.assertTrue(self.execute_next() is None) 64 | self.assertEqual(self.huey.scheduled_count(), 1) 65 | self.assertEqual(len(self.huey), 0) 66 | self.assertEqual(self.huey.result_count(), 1) 67 | 68 | tasks = self.huey.read_schedule(r3.task.eta) 69 | self.assertEqual(len(tasks), 1) 70 | self.assertEqual(tasks[0].id, r3.id) 71 | 72 | def test_sql_huey_priority(self): 73 | @self.huey.task() 74 | def task_a(n): 75 | return n 76 | 77 | @self.huey.task(priority=1) 78 | def task_b(n): 79 | return n * 10 80 | 81 | task_a(1) 82 | task_b(2) 83 | task_a(3, priority=2) 84 | task_b(4, priority=2) 85 | task_a(5, priority=1) 86 | task_b(6, priority=0) 87 | task_a(7) 88 | task_b(8) 89 | 90 | results = [3, 40, 20, 5, 80, 1, 60, 7] 91 | for result in results: 92 | self.assertEqual(self.execute_next(), result) 93 | 94 | self.assertEqual(len(self.huey), 0) 95 | -------------------------------------------------------------------------------- /huey/tests/test_utils.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import os 3 | import time 4 | import unittest 5 | 6 | from huey.utils import UTC 7 | from huey.utils import normalize_time 8 | from huey.utils import reraise_as 9 | 10 | 11 | class MyException(Exception): pass 12 | 13 | 14 | class TestReraiseAs(unittest.TestCase): 15 | def test_wrap_exception(self): 16 | def raise_keyerror(): 17 | try: 18 | {}['huey'] 19 | except KeyError as exc: 20 | reraise_as(MyException) 21 | 22 | self.assertRaises(MyException, raise_keyerror) 23 | try: 24 | raise_keyerror() 25 | except MyException as exc: 26 | self.assertEqual(str(exc), "KeyError: 'huey'") 27 | else: 28 | raise AssertionError('MyException not raised as expected.') 29 | 30 | 31 | class FakePacific(datetime.tzinfo): 32 | def utcoffset(self, dt): 33 | return datetime.timedelta(hours=-8) 34 | def tzname(self, dt): 35 | return 'US/Pacific' 36 | def dst(self, dt): 37 | return datetime.timedelta(0) 38 | 39 | 40 | class TestNormalizeTime(unittest.TestCase): 41 | def setUp(self): 42 | self._orig_tz = os.environ.get('TZ') 43 | os.environ['TZ'] = 'US/Pacific' 44 | time.tzset() 45 | 46 | def tearDown(self): 47 | del os.environ['TZ'] 48 | if self._orig_tz: 49 | os.environ['TZ'] = self._orig_tz 50 | time.tzset() 51 | 52 | def test_normalize_time(self): 53 | ts_local = datetime.datetime(2000, 1, 1, 12, 0, 0) # Noon on Jan 1. 54 | ts_utc = ts_local + datetime.timedelta(hours=8) # For fake tz. 55 | ts_inv = ts_local - datetime.timedelta(hours=8) 56 | 57 | # Naive datetime. 58 | 59 | # No conversion is applied, as we treat everything as local time. 60 | self.assertEqual(normalize_time(ts_local, utc=False), ts_local) 61 | 62 | # So we provided a naive timestamp from the localtime (us/pacific), 63 | # which is 8 hours behind UTC in January. 64 | self.assertEqual(normalize_time(ts_local, utc=True), ts_utc) 65 | 66 | # TZ-aware datetime in local timezone (Fake US/Pacific). 67 | 68 | # Here we provide a tz-aware timestamp from the localtime (us/pacific). 69 | ts = datetime.datetime(2000, 1, 1, 12, 0, 0, tzinfo=FakePacific()) 70 | 71 | # No conversion, treated as local time. 72 | self.assertEqual(normalize_time(ts, utc=False), ts_local) 73 | 74 | # Converted to UTC according to rules from our fake tzinfo, +8 hours. 75 | self.assertEqual(normalize_time(ts, utc=True), ts_utc) 76 | 77 | # TZ-aware datetime in UTC timezone. 78 | 79 | # Here we provide a tz-aware timestamp using UTC timezone. 80 | ts = datetime.datetime(2000, 1, 1, 12, 0, 0, tzinfo=UTC()) 81 | 82 | # Since we're specifying utc=False, we are dealing with localtimes 83 | # internally. The timestamp passed in is a tz-aware timestamp in UTC. 84 | # To convert to a naive localtime, we subtract 8 hours (since UTC is 85 | # 8 hours ahead of our local time). 86 | self.assertEqual(normalize_time(ts, utc=False), ts_inv) 87 | 88 | # When utc=True there's no change, since the timestamp is already UTC. 89 | self.assertEqual(normalize_time(ts, utc=True), ts_local) 90 | -------------------------------------------------------------------------------- /huey/tests/test_wrappers.py: -------------------------------------------------------------------------------- 1 | from functools import partial 2 | 3 | from huey import api 4 | from huey import storage 5 | from huey.tests.base import BaseTestCase 6 | 7 | 8 | class TestWrappers(BaseTestCase): 9 | def test_wrappers(self): 10 | wrappers = { 11 | api.BlackHoleHuey: storage.BlackHoleStorage, 12 | api.MemoryHuey: storage.MemoryStorage, 13 | api.RedisExpireHuey: storage.RedisExpireStorage, 14 | api.RedisHuey: storage.RedisStorage, 15 | api.SqliteHuey: storage.SqliteStorage, 16 | } 17 | for huey_wrapper, storage_class in wrappers.items(): 18 | h = huey_wrapper('testhuey') 19 | self.assertEqual(h.name, 'testhuey') 20 | self.assertEqual(h.storage.name, 'testhuey') 21 | self.assertTrue(isinstance(h.storage, storage_class)) 22 | 23 | def test_fake_wrapper(self): 24 | # This is kind-of a silly test, as we're essentially just testing 25 | # functools.partial(), but let's just make sure that parameters are 26 | # getting passed to the storage correctly - and that the storage is 27 | # initialized correctly. 28 | class BogusStorage(storage.BlackHoleStorage): 29 | def __init__(self, name, host='127.0.0.1', port=None, db=None): 30 | super(BogusStorage, self).__init__(name) 31 | self.host = host 32 | self.port = port 33 | self.db = db 34 | 35 | BH = partial(api.Huey, storage_class=BogusStorage, port=1337) 36 | 37 | bh = BH('test') 38 | self.assertEqual(bh.name, 'test') 39 | self.assertEqual(bh.storage.name, 'test') 40 | self.assertTrue(isinstance(bh.storage, BogusStorage)) 41 | self.assertEqual(bh.storage.host, '127.0.0.1') 42 | self.assertEqual(bh.storage.port, 1337) 43 | self.assertTrue(bh.storage.db is None) 44 | 45 | bh2 = BH('test2', host='localhost', port=31337, db=15) 46 | self.assertEqual(bh2.storage.host, 'localhost') 47 | self.assertEqual(bh2.storage.port, 31337) 48 | self.assertEqual(bh2.storage.db, 15) 49 | -------------------------------------------------------------------------------- /huey/utils.py: -------------------------------------------------------------------------------- 1 | from collections import namedtuple 2 | import calendar 3 | import datetime 4 | import errno 5 | import os 6 | import sys 7 | import time 8 | import warnings 9 | try: 10 | import fcntl 11 | except ImportError: 12 | fcntl = None 13 | 14 | if sys.version_info < (3, 12): 15 | utcnow = datetime.datetime.utcnow 16 | else: 17 | def utcnow(): 18 | return (datetime.datetime 19 | .now(datetime.timezone.utc) 20 | .replace(tzinfo=None)) 21 | 22 | 23 | Error = namedtuple('Error', ('metadata',)) 24 | 25 | 26 | class UTC(datetime.tzinfo): 27 | zero = datetime.timedelta(0) 28 | 29 | def __repr__(self): 30 | return "" 31 | def utcoffset(self, dt): 32 | return self.zero 33 | def tzname(self, dt): 34 | return "UTC" 35 | def dst(self, dt): 36 | return self.zero 37 | _UTC = UTC() 38 | 39 | 40 | def load_class(s): 41 | path, klass = s.rsplit('.', 1) 42 | __import__(path) 43 | mod = sys.modules[path] 44 | return getattr(mod, klass) 45 | 46 | 47 | def reraise_as(new_exc_class): 48 | exc_class, exc, tb = sys.exc_info() 49 | raise new_exc_class('%s: %s' % (exc_class.__name__, exc)) 50 | 51 | 52 | def is_naive(dt): 53 | """ 54 | Determines if a given datetime.datetime is naive. 55 | The concept is defined in Python's docs: 56 | http://docs.python.org/library/datetime.html#datetime.tzinfo 57 | Assuming value.tzinfo is either None or a proper datetime.tzinfo, 58 | value.utcoffset() implements the appropriate logic. 59 | """ 60 | return dt.utcoffset() is None 61 | 62 | 63 | def make_naive(dt): 64 | """ 65 | Makes an aware datetime.datetime naive in local time zone. 66 | """ 67 | tt = dt.utctimetuple() 68 | ts = calendar.timegm(tt) 69 | local_tt = time.localtime(ts) 70 | return datetime.datetime(*local_tt[:6]) 71 | 72 | 73 | def aware_to_utc(dt): 74 | """ 75 | Converts an aware datetime.datetime in UTC time zone. 76 | """ 77 | return dt.astimezone(_UTC).replace(tzinfo=None) 78 | 79 | 80 | def local_to_utc(dt): 81 | """ 82 | Converts a naive local datetime.datetime in UTC time zone. 83 | """ 84 | return datetime.datetime(*time.gmtime(time.mktime(dt.timetuple()))[:6]) 85 | 86 | 87 | def normalize_expire_time(expires, utc=True): 88 | if isinstance(expires, datetime.datetime): 89 | return normalize_time(eta=expires, utc=utc) 90 | return normalize_time(delay=expires, utc=utc) 91 | 92 | 93 | def normalize_time(eta=None, delay=None, utc=True): 94 | if not ((delay is None) ^ (eta is None)): 95 | raise ValueError('Specify either an eta (datetime) or delay (seconds)') 96 | elif delay: 97 | method = (utc and utcnow or 98 | datetime.datetime.now) 99 | if not isinstance(delay, datetime.timedelta): 100 | delay = datetime.timedelta(seconds=delay) 101 | return method() + delay 102 | elif eta: 103 | has_tz = not is_naive(eta) 104 | if utc: 105 | if not has_tz: 106 | eta = local_to_utc(eta) 107 | else: 108 | eta = aware_to_utc(eta) 109 | elif has_tz: 110 | # Convert TZ-aware into naive localtime. 111 | eta = make_naive(eta) 112 | return eta 113 | 114 | 115 | if sys.version_info[0] == 2: 116 | string_type = basestring 117 | text_type = unicode 118 | def to_timestamp(dt): 119 | return time.mktime(dt.timetuple()) 120 | else: 121 | string_type = (bytes, str) 122 | text_type = str 123 | def to_timestamp(dt): 124 | return dt.timestamp() 125 | 126 | 127 | def encode(s): 128 | if isinstance(s, bytes): 129 | return s 130 | elif isinstance(s, text_type): 131 | return s.encode('utf8') 132 | elif s is not None: 133 | return text_type(s).encode('utf8') 134 | 135 | 136 | def decode(s): 137 | if isinstance(s, text_type): 138 | return s 139 | elif isinstance(s, bytes): 140 | return s.decode('utf8') 141 | elif s is not None: 142 | return text_type(s) 143 | 144 | 145 | class FileLock(object): 146 | def __init__(self, filename): 147 | if fcntl is None: 148 | warnings.warn('FileLock not supported on this platform. Please ' 149 | 'use a different storage implementation.') 150 | self.filename = filename 151 | self.fd = None 152 | 153 | dirname = os.path.dirname(filename) 154 | if not os.path.exists(dirname): 155 | os.makedirs(dirname) 156 | elif os.path.exists(self.filename): 157 | os.unlink(self.filename) 158 | 159 | def acquire(self): 160 | flags = os.O_CREAT | os.O_TRUNC | os.O_RDWR 161 | self.fd = os.open(self.filename, flags) 162 | if fcntl is not None: 163 | fcntl.flock(self.fd, fcntl.LOCK_EX) 164 | 165 | def release(self): 166 | if self.fd is not None: 167 | fd, self.fd = self.fd, None 168 | if fcntl is not None: 169 | fcntl.flock(fd, fcntl.LOCK_UN) 170 | os.close(fd) 171 | 172 | def __enter__(self): 173 | self.acquire() 174 | return self 175 | 176 | def __exit__(self, exc_type, exc_val, exc_tb): 177 | self.release() 178 | 179 | 180 | if sys.version_info[0] < 3: 181 | time_clock = time.time 182 | else: 183 | time_clock = time.monotonic 184 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["setuptools", "wheel"] 3 | -------------------------------------------------------------------------------- /runtests.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import glob 4 | import optparse 5 | import os 6 | import sys 7 | import unittest 8 | 9 | from huey import tests 10 | 11 | 12 | def collect_tests(args=None): 13 | suite = unittest.TestSuite() 14 | 15 | if not args: 16 | from huey import tests 17 | module_suite = unittest.TestLoader().loadTestsFromModule(tests) 18 | suite.addTest(module_suite) 19 | else: 20 | tmpl = 'huey.tests.test_%s' 21 | cleaned = [tmpl % arg if not arg.startswith('test') else arg 22 | for arg in args] 23 | user_suite = unittest.TestLoader().loadTestsFromNames(cleaned) 24 | suite.addTest(user_suite) 25 | return suite 26 | 27 | 28 | def runtests(suite, verbosity=1, failfast=False): 29 | runner = unittest.TextTestRunner(verbosity=verbosity, failfast=failfast) 30 | results = runner.run(suite) 31 | return results.failures, results.errors 32 | 33 | 34 | if __name__ == '__main__': 35 | parser = optparse.OptionParser() 36 | parser.add_option('-v', '--verbosity', dest='verbosity', default=1, 37 | type='int', help='Verbosity of output') 38 | parser.add_option('-f', '--failfast', action='store_true', default=False, 39 | help='Stop on first failure or error.') 40 | 41 | options, args = parser.parse_args() 42 | suite = collect_tests(args) 43 | failures, errors = runtests(suite, options.verbosity, options.failfast) 44 | for f in glob.glob('huey*.db*'): 45 | os.unlink(f) 46 | 47 | if errors or failures: 48 | sys.exit(1) 49 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | import os 2 | from setuptools import setup, find_packages 3 | 4 | 5 | with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as fh: 6 | readme = fh.read() 7 | 8 | extras_require = { 9 | 'backends': ['redis>=3.0.0'], 10 | 'redis': ['redis>=3.0.0'], 11 | } 12 | 13 | setup( 14 | name='huey', 15 | version=__import__('huey').__version__, 16 | description='huey, a little task queue', 17 | long_description=readme, 18 | author='Charles Leifer', 19 | author_email='coleifer@gmail.com', 20 | url='http://github.com/coleifer/huey/', 21 | packages=find_packages(), 22 | extras_require=extras_require, 23 | package_data={ 24 | 'huey': [ 25 | ], 26 | }, 27 | classifiers=[ 28 | 'Development Status :: 5 - Production/Stable', 29 | 'Intended Audience :: Developers', 30 | 'License :: OSI Approved :: MIT License', 31 | 'Operating System :: OS Independent', 32 | 'Programming Language :: Python', 33 | 'Programming Language :: Python :: 2.7', 34 | 'Programming Language :: Python :: 3.4', 35 | 'Programming Language :: Python :: 3.5', 36 | 'Programming Language :: Python :: 3.6', 37 | 'Programming Language :: Python :: 3.7', 38 | 'Programming Language :: Python :: 3.8', 39 | 'Programming Language :: Python :: 3.9', 40 | 'Programming Language :: Python :: 3.10', 41 | 'Programming Language :: Python :: 3.11', 42 | 'Programming Language :: Python :: 3.12', 43 | #'Programming Language :: Python :: 3.13', 44 | 'Topic :: Software Development :: Libraries :: Python Modules', 45 | ], 46 | test_suite='runtests.collect_tests', 47 | entry_points={ 48 | 'console_scripts': [ 49 | 'huey_consumer = huey.bin.huey_consumer:consumer_main' 50 | ] 51 | }, 52 | scripts=['huey/bin/huey_consumer.py'], 53 | ) 54 | --------------------------------------------------------------------------------