├── .hgignore ├── LICENSE ├── MANIFEST.in ├── README.rst ├── runtests.py ├── setup.py ├── slacker ├── __init__.py ├── adisp │ ├── __init__.py │ └── _adisp.py ├── django_backend │ ├── __init__.py │ ├── conf.py │ ├── urls.py │ └── views.py ├── postpone.py ├── serialization.py ├── tests.py └── workers │ ├── __init__.py │ ├── django.py │ ├── http.py │ └── local.py └── test_project ├── __init__.py ├── settings.py ├── templates ├── 404.html └── 500.html ├── testapp ├── __init__.py ├── models.py └── tests │ ├── __init__.py │ ├── django_tests.py │ └── workers.py └── urls.py /.hgignore: -------------------------------------------------------------------------------- 1 | syntax: glob 2 | 3 | #projects 4 | .settings/* 5 | .project 6 | .pydevproject 7 | .cache/* 8 | nbproject/* 9 | .buildpath 10 | build.properties 11 | MANIFEST.MF 12 | 13 | #temp files 14 | *.pyc 15 | *.pyo 16 | *.orig 17 | *~ 18 | stuff/ 19 | 20 | #os files 21 | .DS_Store 22 | Thumbs.db 23 | 24 | #setup files 25 | build/ 26 | dist/ 27 | MANIFEST 28 | tornado_slacker.egg-info 29 | 30 | #my files 31 | db.sqlite 32 | .coverage 33 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2011 Mikhail Korobov 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy 4 | of this software and associated documentation files (the "Software"), to deal 5 | in the Software without restriction, including without limitation the rights 6 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 7 | copies of the Software, and to permit persons to whom the Software is 8 | furnished to do so, subject to the following conditions: 9 | 10 | The above copyright notice and this permission notice shall be included in 11 | all copies or substantial portions of the Software. 12 | 13 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 14 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 15 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 16 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 17 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 18 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 19 | THE SOFTWARE. 20 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include *.txt 2 | include *.rst 3 | recursive-include test_project *.html *.py 4 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | =============== 2 | tornado-slacker 3 | =============== 4 | 5 | This package provides an easy API for moving the work out of 6 | the tornado process / event loop. 7 | 8 | Currently implemented methods are: 9 | 10 | * execute the code in another server's http hook 11 | (django implementation is included); 12 | * execute the code in a separate thread (thread pool is used); 13 | * dummy immediate execution. 14 | 15 | API example:: 16 | 17 | from django.contrib.auth.models import User 18 | from slacker import adisp 19 | from slacker import Slacker 20 | from slacker.workers import DjangoWorker 21 | 22 | AsyncUser = Slacker(User, DjangoWorker()) 23 | 24 | @adisp.process 25 | def process_data(): 26 | # all the django ORM is supported; the query will be executed 27 | # on remote end, this will not block the IOLoop 28 | 29 | users = yield AsyncUser.objects.filter(is_staff=True)[:5] 30 | print users 31 | 32 | (pep-342 syntax and adisp library are optional, callback-style code 33 | is also supported) 34 | 35 | 36 | Installation 37 | ============ 38 | 39 | :: 40 | 41 | pip install tornado-slacker 42 | 43 | 44 | Slackers and workers 45 | ==================== 46 | 47 | In order to execute some code in non-blocking manner: 48 | 49 | 1. Create a Slacker (configured with the desired worker) for some python object:: 50 | 51 | from slacker import Slacker 52 | from slacker.workers import ThreadWorker 53 | 54 | class Foo(object): 55 | # ... 56 | 57 | worker = ThreadWorker() 58 | AsyncFoo = Slacker(Foo, worker) 59 | 60 | 2. build a query (you can access attributes, do calls and slicing):: 61 | 62 | query = AsyncFoo('foo').do_blocking_operation(param1, param2)[0] 63 | 64 | 3. execute the query:: 65 | 66 | def callback(result): 67 | # ... 68 | 69 | query.proceed(callback) 70 | 71 | or, using pep-342 style:: 72 | 73 | from slacker import adisp 74 | 75 | @adisp.process 76 | def handler(): 77 | result = yield query 78 | # ... 79 | 80 | Slackers 81 | ======== 82 | 83 | Slackers are special objects that are collecting operations (attribute 84 | access, calls, slicing) without actually executing them:: 85 | 86 | >>> from slacker import Slacker 87 | >>> class Foo(): 88 | ... pass 89 | ... 90 | >>> FooSlacker = Slacker(Foo) 91 | >>> FooSlacker.hello.world() 92 | __main__.Foo: [('hello',), ('world', (), {})] 93 | 94 | >>> FooSlacker(name='me').hello.world(1, y=3)[:3] 95 | __main__.Foo: [(None, (), {'name': 'me'}), 96 | ('hello',), 97 | ('world', (1,), {'y': 3}), 98 | (slice(None, 3, None), None)] 99 | 100 | Callables arguments must be picklable. Slackers also provide a 101 | method to apply the collected operations to a base object. 102 | 103 | Any picklable object (including top-level functions and classes) can 104 | be wrapped into Slacker, e.g.:: 105 | 106 | from slacker import adisp 107 | from slacker import Slacker 108 | from slacker.workers import ThreadWorker 109 | 110 | def task(param1, param2): 111 | # do something blocking and io-bound 112 | return results 113 | 114 | async_task = Slacker(task, ThreadWorker()) 115 | 116 | # pep-342-style 117 | @adisp.process 118 | def process_data(): 119 | results = yield async_task('foo', 'bar') 120 | print results 121 | 122 | # callback style 123 | def process_data2(): 124 | async_task('foo', 'bar').proceed(on_result) 125 | 126 | def on_result(results): 127 | print results 128 | 129 | 130 | Python modules also can be Slackers:: 131 | 132 | import shutil 133 | from slacker import Slacker 134 | from slacker.workers import ThreadWorker 135 | 136 | shutil_async = Slacker(shutil, ThreadWorker()) 137 | op = shutil_async.copy('file1.txt', 'file2.txt') 138 | op.proceed() 139 | 140 | Workers 141 | ======= 142 | 143 | Workers are classes that decides how and where the work should be done: 144 | 145 | * ``slacker.workers.DummyWorker`` executes code in-place (this 146 | is blocking); 147 | 148 | * ``slacker.workers.ThreadWorker`` executes code in a thread from 149 | a thread pool; 150 | 151 | * ``slacker.workers.HttpWorker`` pickles the slacker, makes an async 152 | http request with this data to a given server hook and expects it 153 | to execute the code and return pickled results; 154 | 155 | .. note:: 156 | 157 | IOLoop blocks on any CPU activity and making http requests plus 158 | unpickling the returned result can cause a significant overhead 159 | here. So if the query is fast (e.g. database primary key or index 160 | lookup, say 10ms) then it may be better not to use tornado-slacker 161 | and call the query in 'blocking' way: the overall blocking time 162 | may be less than with 'async' approach because of reduced 163 | computations amount. 164 | 165 | It is also wise to return as little as possible if HttpWorker is used. 166 | 167 | 168 | * ``slacker.workers.DjangoWorker`` is just a HttpWorker with default 169 | values for use with bundled django remote server hook implementation 170 | (``slacker.django_backend``). 171 | 172 | In order to enable django hook, include 'slacker.django_backend.urls' 173 | into urls.py and add SLACKER_SERVER option with server address to 174 | settings.py. 175 | 176 | SLACKER_SERVER is '127.0.0.1:8000' by default so this should work for 177 | development server out of box. 178 | 179 | .. warning:: 180 | 181 | Do not expose django server hook to public, this is insecure! 182 | The best way is to configure additional server instance to listen 183 | some local port (e.g. bind it to the default 127.0.0.1:8000 address). 184 | 185 | .. note:: 186 | 187 | Django's QuerySet arguments like Q, F objects, aggregate and annotate 188 | functions (e.g. Count) are picklable so tornado-slacker can handle 189 | them fine:: 190 | 191 | AsyncAuthor = Slacker(Author, DjangoWorker()) 192 | 193 | # ... 194 | qs = AsyncAuthor.objects.filter( 195 | Q(name='vasia') or Q(is_great=True) 196 | ).values('name').annotate(average_rating=Avg('book__rating'))[:10] 197 | 198 | authors = yield qs 199 | 200 | Using slacker.Slacker is better than pickling queryset.query 201 | (as adviced at http://docs.djangoproject.com/en/dev/ref/models/querysets/#pickling-querysets) 202 | because this allows to pickle any ORM calls including ones that 203 | don't return QuerySets (http://docs.djangoproject.com/en/dev/ref/models/querysets/#methods-that-do-not-return-querysets):: 204 | 205 | yield AsyncUser.objects.create_superuser('foo') 206 | 207 | Moreover, slacker.Slacker adds transparent support for remote invocation 208 | of custom managers and model methods, returning just the model instance 209 | attributes, etc. 210 | 211 | 212 | Parallel execution 213 | ================== 214 | 215 | Parallel task execution is supported by adisp library:: 216 | 217 | def _task1(param1, param2): 218 | # do something blocking 219 | return results 220 | 221 | def _task2(): 222 | # do something blocking 223 | return results 224 | 225 | # worker can be reused 226 | worker = ThreadWorker() 227 | task1 = Slacker(_task1, worker) 228 | task2 = Slacker(_task2, worker) 229 | 230 | @adisp.process 231 | def process_data(): 232 | # this will execute task1 and task2 in parallel 233 | # and return the result after all data is ready 234 | res1, res2 = yield task1('foo', 'bar'), task2() 235 | print res1, res2 236 | 237 | .. note:: 238 | 239 | this will fail with ``DjangoWorker`` and django development server 240 | because django development server is single-threaded 241 | 242 | 243 | Contributing 244 | ============ 245 | 246 | If you have any suggestions, bug reports or 247 | annoyances please report them to the issue tracker: 248 | 249 | * https://github.com/kmike/tornado-slacker/issues 250 | 251 | Source code: 252 | 253 | * https://bitbucket.org/kmike/tornado-slacker/ 254 | * https://github.com/kmike/tornado-slacker/ 255 | 256 | Both hg and git pull requests are welcome! 257 | 258 | Credits 259 | ======= 260 | 261 | Inspiration: 262 | 263 | * https://github.com/satels/django-async-dbslayer/ 264 | * https://bitbucket.org/david/django-roa/ 265 | * http://tornadogists.org/654157/ 266 | 267 | Third-party software: 268 | 269 | * `adisp `_ (tornado adisp implementation 270 | is taken from `brukva `_); 271 | * exception serialization utils are from 272 | `billiard `_ by Ask Solem. 273 | 274 | License 275 | ======= 276 | 277 | The license is MIT. 278 | 279 | Bundled adisp library uses Simplified BSD License. 280 | 281 | slacker.serialization is under BSD License. 282 | -------------------------------------------------------------------------------- /runtests.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | import os 4 | import sys 5 | from django.core.management import execute_manager 6 | 7 | # always use slacker from the checkout, not the installed version 8 | sys.path.insert(0, os.path.dirname(__file__)) 9 | 10 | # add project to pythonpath 11 | sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'test_project')) 12 | 13 | sys.argv.insert(1, 'test') 14 | if len(sys.argv) == 2: 15 | sys.argv.extend(['testapp']) 16 | 17 | from test_project import settings 18 | execute_manager(settings) 19 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | from distutils.core import setup 3 | 4 | for cmd in ('egg_info', 'develop', 'upload_sphinx', 'build_sphinx'): 5 | import sys 6 | if cmd in sys.argv: 7 | from setuptools import setup 8 | 9 | version='0.1' 10 | 11 | setup( 12 | name = 'tornado-slacker', 13 | version = version, 14 | author = 'Mikhail Korobov', 15 | author_email = 'kmike84@gmail.com', 16 | url = 'https://github.com/kmike/tornado-slacker/', 17 | download_url = 'https://bitbucket.org/kmike/tornado-slacker/get/tip.zip', 18 | 19 | description = 'This package provides an easy API for moving the work out of the tornado process / event loop.', 20 | long_description = open('README.rst').read(), 21 | license = 'MIT license', 22 | requires = ['tornado (>= 1.2)'], 23 | 24 | packages=[ 25 | 'slacker', 26 | 'slacker.adisp', 27 | 'slacker.workers', 28 | 'slacker.django_backend', 29 | ], 30 | 31 | classifiers=[ 32 | 'Development Status :: 3 - Alpha', 33 | 'Framework :: Django', 34 | 'Intended Audience :: Developers', 35 | 'License :: OSI Approved :: MIT License', 36 | 'Programming Language :: Python', 37 | 'Programming Language :: Python :: 2.6', 38 | 'Programming Language :: Python :: 2.7', 39 | 'Topic :: Software Development :: Libraries :: Python Modules', 40 | 'Topic :: Database', 41 | ], 42 | ) 43 | -------------------------------------------------------------------------------- /slacker/__init__.py: -------------------------------------------------------------------------------- 1 | from slacker.postpone import Slacker 2 | -------------------------------------------------------------------------------- /slacker/adisp/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Slacker's adisp library wrapper. 3 | 4 | Removes the need to call 'adisp.async(proceed)' manually on yield for Slacker 5 | and Postponed objects: they can now be yielded directly. 6 | """ 7 | 8 | from slacker.adisp._adisp import CallbackDispatcher, async 9 | from slacker.postpone import Postponed, Slacker 10 | 11 | 12 | class _SlackerCallbackDispatcher(CallbackDispatcher): 13 | 14 | def call(self, callers): 15 | if hasattr(callers, '__iter__'): 16 | callers = map(self._prepare, callers) 17 | else: 18 | callers = self._prepare(callers) 19 | return super(_SlackerCallbackDispatcher, self).call(callers) 20 | 21 | def _prepare(self, func): 22 | if isinstance(func, (Postponed, Slacker)): 23 | return async(func.proceed)() 24 | return func 25 | 26 | 27 | def process(func): 28 | def wrapper(*args, **kwargs): 29 | _SlackerCallbackDispatcher(func(*args, **kwargs)) 30 | return wrapper 31 | -------------------------------------------------------------------------------- /slacker/adisp/_adisp.py: -------------------------------------------------------------------------------- 1 | # -*- coding:utf-8 -*- 2 | ''' 3 | Adisp is a library that allows structuring code with asynchronous calls and 4 | callbacks without defining callbacks as separate functions. The code then 5 | becomes sequential and easy to read. The library is not a framework by itself 6 | and can be used in other environments that provides asynchronous working model 7 | (see an example with Tornado server in proxy_example.py). 8 | 9 | Usage: 10 | 11 | ## Organizing calling code 12 | 13 | All the magic is done with Python 2.5 decorators that allow for control flow to 14 | leave a function, do sometihing else for some time and then return into the 15 | calling function with a result. So the function that makes asynchronous calls 16 | should look like this: 17 | 18 | @process 19 | def my_handler(): 20 | response = yield some_async_func() 21 | data = parse_response(response) 22 | result = yield some_other_async_func(data) 23 | store_result(result) 24 | 25 | Each `yield` is where the function returns and lets the framework around it to 26 | do its job. And the code after `yield` is what usually goes in a callback. 27 | 28 | The @process decorator is needed around such a function. It makes it callable 29 | as an ordinary function and takes care of dispatching callback calls back into 30 | it. 31 | 32 | ## Writing asynchronous function 33 | 34 | In the example above functions "some_async_func" and "some_other_async_func" 35 | are those that actually run an asynchronous process. They should follow two 36 | conditions: 37 | 38 | - accept a "callback" parameter with a callback function that they should call 39 | after an asynchronous process is finished 40 | - a callback should be called with one parameter -- the result 41 | - be wrapped in the @async decorator 42 | 43 | The @async decorator makes a function call lazy allowing the @process that 44 | calls it to provide a callback to call. 45 | 46 | Using async with @-syntax is most convenient when you write your own 47 | asynchronous function (and can make your callback parameter to be named 48 | "callback"). But when you want to call some library function you can wrap it in 49 | async in place. 50 | 51 | # call http.fetch(url, callback=callback) 52 | result = yield async(http.fetch) 53 | 54 | # call http.fetch(url, cb=safewrap(callback)) 55 | result = yield async(http.fetch, cbname='cb', cbwrapper=safewrap)(url) 56 | 57 | Here you can use two optional parameters for async: 58 | 59 | - `cbname`: a name of a parameter in which the function expects callbacks 60 | - `cbwrapper`: a wrapper for the callback iself that will be applied before 61 | calling it 62 | 63 | ## Chain calls 64 | 65 | @async function can also be @process'es allowing to effectively chain 66 | asynchronous calls as it can be done with normal functions. In this case the 67 | @async decorator shuold be the outer one: 68 | 69 | @async 70 | @process 71 | def async_calling_other_asyncs(arg, callback): 72 | # .... 73 | 74 | ## Multiple asynchronous calls 75 | 76 | The library also allows to call multiple asynchronous functions in parallel and 77 | get all their result for processing at once: 78 | 79 | @async 80 | def async_http_get(url, callback): 81 | # get url asynchronously 82 | # call callback(response) at the end 83 | 84 | @process 85 | def get_stat(): 86 | urls = ['http://.../', 'http://.../', ... ] 87 | responses = yield map(async_http_get, urls) 88 | 89 | After *all* the asynchronous calls will complete `responses` will be a list of 90 | responses corresponding to given urls. 91 | ''' 92 | from functools import partial 93 | from tornado.ioloop import IOLoop 94 | 95 | class CallbackDispatcher(object): 96 | def __init__(self, generator): 97 | self.io_loop = IOLoop.instance() 98 | self.g = generator 99 | try: 100 | self.call(self.g.next()) 101 | except StopIteration: 102 | pass 103 | 104 | def _queue_send_result(self, result, single): 105 | self.io_loop.add_callback(partial(self._send_result, result, single)) 106 | 107 | def _send_result(self, results, single): 108 | try: 109 | result = results[0] if single else results 110 | if isinstance(result, Exception): 111 | self.call(self.g.throw(result)) 112 | else: 113 | self.call(self.g.send(result)) 114 | except StopIteration: 115 | pass 116 | 117 | def call(self, callers): 118 | single = not hasattr(callers, '__iter__') 119 | if single: 120 | callers = [callers] 121 | self.call_count = len(list(callers)) 122 | results = [None] * self.call_count 123 | if self.call_count == 0: 124 | self._queue_send_result(results, single) 125 | else: 126 | for count, caller in enumerate(callers): 127 | caller(callback=partial(self.callback, results, count, single)) 128 | 129 | def callback(self, results, index, single, arg): 130 | self.call_count -= 1 131 | results[index] = arg 132 | if self.call_count > 0: 133 | return 134 | self._queue_send_result(results, single) 135 | 136 | def process(func): 137 | def wrapper(*args, **kwargs): 138 | CallbackDispatcher(func(*args, **kwargs)) 139 | return wrapper 140 | 141 | def async(func, cbname='callback', cbwrapper=lambda x: x): 142 | def wrapper(*args, **kwargs): 143 | def caller(callback): 144 | kwargs[cbname] = cbwrapper(callback) 145 | return func(*args, **kwargs) 146 | return caller 147 | return wrapper 148 | -------------------------------------------------------------------------------- /slacker/django_backend/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kmike/tornado-slacker/3f11ad29777fa24bde6bf42b66c37beca97eafc5/slacker/django_backend/__init__.py -------------------------------------------------------------------------------- /slacker/django_backend/conf.py: -------------------------------------------------------------------------------- 1 | from django.conf import settings 2 | SLACKER_SERVER = getattr(settings, 'SLACKER_SERVER', 'http://127.0.0.1:8000') 3 | -------------------------------------------------------------------------------- /slacker/django_backend/urls.py: -------------------------------------------------------------------------------- 1 | from django.conf.urls.defaults import * 2 | 3 | from slacker.django_backend.views import slacker_execute 4 | 5 | urlpatterns = patterns('', 6 | url(r'^execute/$', slacker_execute, name='slacker-execute'), 7 | ) 8 | -------------------------------------------------------------------------------- /slacker/django_backend/views.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | from django.http import HttpResponse, Http404 3 | from django.views.decorators.csrf import csrf_exempt 4 | 5 | from slacker.postpone import proceed_pickled 6 | 7 | @csrf_exempt 8 | def slacker_execute(request): 9 | # FIXME: auth? 10 | 11 | if request.method != 'POST': 12 | raise Http404 13 | 14 | data = proceed_pickled(request.raw_post_data) 15 | return HttpResponse(data) 16 | -------------------------------------------------------------------------------- /slacker/postpone.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | import pprint 3 | import sys 4 | import types 5 | try: 6 | import cPickle as pickle 7 | except ImportError: 8 | import pickle 9 | 10 | from slacker.serialization import get_pickleable_exception 11 | 12 | 13 | class SlackerException(Exception): 14 | pass 15 | 16 | 17 | class _Module(object): 18 | """ Helper class for pickling python modules """ 19 | 20 | def __init__(self, module): 21 | self.module = module 22 | 23 | def __getstate__(self): 24 | return self.module.__name__ 25 | 26 | def __setstate__(self, name): 27 | __import__(name) 28 | self.module = sys.modules[name] 29 | 30 | 31 | class Postponed(object): 32 | """ 33 | Stores attribute, call and slice chain without actully 34 | calling methods, accessing attributes and performing slicing. 35 | 36 | Wrapped object and method arguments must be picklable. 37 | 38 | Collecting the access to private methods and attributes 39 | (beginning with __two_underscores) is not supported. 40 | 41 | FIXME: some attributes (e.g. '_obj', '_chain', '_extra', 'proceed') 42 | of original object are replaced with the ones from this proxy. 43 | """ 44 | 45 | def __init__(self, obj, worker = None): 46 | self._obj = obj 47 | self._chain = [] 48 | if not worker: 49 | from .workers.local import DummyWorker 50 | worker = DummyWorker() 51 | self._worker = worker 52 | 53 | 54 | def __repr__(self): 55 | return "%s: %s" % (self._obj, pprint.pformat(self._chain)) 56 | 57 | def __getstate__(self): 58 | 59 | if isinstance(self._obj, types.ModuleType): 60 | return self._chain, _Module(self._obj) 61 | 62 | return self._chain, self._obj 63 | 64 | def __setstate__(self, state): 65 | self._chain, self._obj = state 66 | 67 | if isinstance(self._obj, _Module): 68 | self._obj = self._obj.module 69 | 70 | # always use local worker after unpickling 71 | from .workers.local import DummyWorker 72 | self._worker = DummyWorker() 73 | 74 | @property 75 | def _pickled(self): 76 | return pickle.dumps(self, pickle.HIGHEST_PROTOCOL) 77 | 78 | def __getattr__(self, attr): 79 | # pickle.dumps internally checks if __getnewargs__ is defined 80 | # and thus returning ChainProxy object instead of 81 | # raising AttributeError breaks pickling. Returning self instead 82 | # of raising an exception for private attributes can possibly 83 | # break something else so the access to other private methods 84 | # and attributes is also not overriden. 85 | if attr.startswith('__'): 86 | return self.__getattribute__(attr) 87 | 88 | # attribute access is stored as 1-element tuple 89 | self._chain.append((attr,)) 90 | return self 91 | 92 | def __getitem__(self, slice): 93 | # slicing operation is stored as 2-element tuple 94 | self._chain.append((slice, None,)) 95 | return self 96 | 97 | def __call__(self, *args, **kwargs): 98 | # method call is stored as 3-element tuple 99 | if not self._chain: 100 | # top-level call 101 | self._chain.append((None, args, kwargs)) 102 | else: 103 | method_name = self._chain[-1][0] 104 | self._chain[-1] = (method_name, args, kwargs) 105 | return self 106 | 107 | def _proceed(self): 108 | """ Executes the collected chain and returns the result. """ 109 | result = self._obj 110 | for op in self._chain: 111 | if len(op) == 1: # attribute 112 | result = getattr(result, op[0]) 113 | elif len(op) == 2: # slice or index 114 | result = result[op[0]] 115 | elif len(op) == 3: # callable 116 | func = result if op[0] is None else getattr(result, op[0]) 117 | result = func(*op[1], **op[2]) 118 | return result 119 | 120 | def proceed(self, callback=None, worker=None): 121 | """ 122 | Executes the collected chain using given worker and calls the 123 | callback with results. 124 | """ 125 | worker = worker or self._worker 126 | worker.proceed(self, callback) 127 | 128 | 129 | class Slacker(object): 130 | """ 131 | Starts a new Postponed instance for every attribute access. 132 | Useful for wrapping existing classes into postponing proxies. 133 | """ 134 | def __init__(self, obj, worker=None): 135 | self._obj = obj 136 | self._worker = worker 137 | 138 | def __getattr__(self, item): 139 | return getattr(Postponed(self._obj, self._worker), item) 140 | 141 | def __call__(self, *args, **kwargs): 142 | return Postponed(self._obj, self._worker).__call__(*args, **kwargs) 143 | 144 | 145 | def safe_proceed(postponed): 146 | """ 147 | Proceeds postponed object locally and returns the result. 148 | If and exception is thrown during execution, this exception 149 | is catched and returned as a result. It is also ensured that 150 | returned exception can be pickled. 151 | """ 152 | try: 153 | return postponed._proceed() 154 | except Exception, e: 155 | return get_pickleable_exception(e) 156 | 157 | 158 | def proceed_pickled(pickled_postponed_obj): 159 | """ 160 | Unpickles postponed object, proceeds it locally, then pickles the result 161 | and returns it (or the pickled exception if the processing fails). 162 | On unpickling errors SlackerException is returned. 163 | 164 | Useful for worker implementation. 165 | """ 166 | 167 | def get_result(): 168 | try: 169 | postponed = pickle.loads(pickled_postponed_obj) 170 | except pickle.PicklingError, e: 171 | return SlackerException(str(e)) 172 | 173 | if not isinstance(postponed, Postponed): 174 | return SlackerException('Pickled object is not an instance of Postponed') 175 | 176 | return safe_proceed(postponed) 177 | 178 | return pickle.dumps(get_result(), pickle.HIGHEST_PROTOCOL) 179 | -------------------------------------------------------------------------------- /slacker/serialization.py: -------------------------------------------------------------------------------- 1 | # Exception serialization utils are borrowed from 2 | # https://github.com/ask/billiard 3 | 4 | import sys 5 | import types 6 | import operator 7 | try: 8 | import cPickle as pickle 9 | except ImportError: 10 | import pickle 11 | from copy import deepcopy 12 | 13 | try: 14 | _error_bases = (BaseException, ) 15 | except NameError: 16 | _error_bases = (SystemExit, KeyboardInterrupt) 17 | 18 | 19 | def is_unwanted_exception_cls(exc_cls): 20 | unwanted_classes = (Exception, ) + _error_bases + (object, ) 21 | for unwanted_cls in unwanted_classes: 22 | if exc_cls is unwanted_cls: 23 | return True 24 | return False 25 | 26 | if sys.version_info < (2, 5): 27 | 28 | # Prior to Python 2.5, Exception was an old-style class 29 | def subclass_exception(name, parent, unused): 30 | return types.ClassType(name, (parent,), {}) 31 | else: 32 | def subclass_exception(name, parent, module): 33 | return type(name, (parent,), {'__module__': module}) 34 | 35 | 36 | def find_nearest_pickleable_exception(exc): 37 | """With an exception instance, iterate over its super classes (by mro) 38 | and find the first super exception that is pickleable. It does 39 | not go below :exc:`Exception` (i.e. it skips :exc:`Exception`, 40 | :class:`BaseException` and :class:`object`). If that happens 41 | you should use :exc:`UnpickleableException` instead. 42 | 43 | :param exc: An exception instance. 44 | 45 | :returns: the nearest exception if it's not :exc:`Exception` or below, 46 | if it is it returns ``None``. 47 | 48 | :rtype: :exc:`Exception` 49 | 50 | """ 51 | 52 | mro_ = getattr(exc.__class__, "mro", lambda: []) 53 | for supercls in mro_(): 54 | if is_unwanted_exception_cls(supercls): 55 | # only BaseException and object, from here on down, 56 | # we don't care about these. 57 | return None 58 | try: 59 | exc_args = getattr(exc, "args", []) 60 | superexc = supercls(*exc_args) 61 | pickle.dumps(superexc) 62 | except: 63 | pass 64 | else: 65 | return superexc 66 | return None 67 | 68 | 69 | def create_exception_cls(name, module, parent=None): 70 | """Dynamically create an exception class.""" 71 | if not parent: 72 | parent = Exception 73 | return subclass_exception(name, parent, module) 74 | 75 | 76 | class UnpickleableExceptionWrapper(Exception): 77 | """Wraps unpickleable exceptions. 78 | 79 | :param exc_module: see :attr:`exc_module`. 80 | 81 | :param exc_cls_name: see :attr:`exc_cls_name`. 82 | 83 | :param exc_args: see :attr:`exc_args` 84 | 85 | .. attribute:: exc_module 86 | 87 | The module of the original exception. 88 | 89 | .. attribute:: exc_cls_name 90 | 91 | The name of the original exception class. 92 | 93 | .. attribute:: exc_args 94 | 95 | The arguments for the original exception. 96 | 97 | Example 98 | 99 | >>> try: 100 | ... something_raising_unpickleable_exc() 101 | >>> except Exception, e: 102 | ... exc = UnpickleableException(e.__class__.__module__, 103 | ... e.__class__.__name__, 104 | ... e.args) 105 | ... pickle.dumps(exc) # Works fine. 106 | 107 | """ 108 | 109 | def __init__(self, exc_module, exc_cls_name, exc_args): 110 | self.exc_module = exc_module 111 | self.exc_cls_name = exc_cls_name 112 | self.exc_args = exc_args 113 | Exception.__init__(self, exc_module, exc_cls_name, exc_args) 114 | 115 | 116 | def get_pickleable_exception(exc): 117 | """Make sure exception is pickleable.""" 118 | nearest = find_nearest_pickleable_exception(exc) 119 | if nearest: 120 | return nearest 121 | 122 | try: 123 | pickle.dumps(deepcopy(exc)) 124 | except Exception: 125 | excwrapper = UnpickleableExceptionWrapper( 126 | exc.__class__.__module__, 127 | exc.__class__.__name__, 128 | getattr(exc, "args", [])) 129 | return excwrapper 130 | return exc 131 | 132 | 133 | def get_pickled_exception(exc): 134 | """Get original exception from exception pickled using 135 | :meth:`get_pickleable_exception`.""" 136 | if isinstance(exc, UnpickleableExceptionWrapper): 137 | exc_cls = create_exception_cls(exc.exc_cls_name, 138 | exc.exc_module) 139 | return exc_cls(*exc.exc_args) 140 | return exc 141 | -------------------------------------------------------------------------------- /slacker/tests.py: -------------------------------------------------------------------------------- 1 | import pickle 2 | from django.utils import unittest 3 | 4 | from slacker.postpone import Postponed, Slacker 5 | 6 | class Foo(object): 7 | 8 | def __init__(self, name): 9 | self._name = name 10 | self.name_accessed = False 11 | 12 | def create_bar(self): 13 | self.bar = Foo(self.name+' bar') 14 | return self 15 | 16 | def get_name(self): 17 | return self._name 18 | 19 | def xy(self, x, y): 20 | return "%s-%s" % (x,y) 21 | 22 | @property 23 | def name(self): 24 | self.name_accessed = True 25 | return self._name 26 | 27 | FooSlacker = Slacker(Foo) 28 | 29 | class PostponeTest(unittest.TestCase): 30 | 31 | def assertRestored(self, chain, value): 32 | self.assertEqual(chain._proceed(), value) 33 | 34 | def setUp(self): 35 | self.foo = FooSlacker('foo') 36 | 37 | def test_method_basic(self): 38 | self.assertRestored(self.foo.get_name(), 'foo') 39 | 40 | def test_method_args(self): 41 | chain = self.foo.xy(1, 2) 42 | self.assertRestored(chain, '1-2') 43 | 44 | def test_method_kwargs(self): 45 | self.assertRestored(self.foo.xy(y=2, x=1), '1-2') 46 | 47 | def test_attributes(self): 48 | self.assertRestored(self.foo.name, 'foo') 49 | 50 | def test_slicing(self): 51 | self.assertRestored(self.foo.name[0], 'f') 52 | 53 | def test_chaining(self): 54 | chain = self.foo.create_bar().bar.create_bar().bar.name[1:-1] 55 | self.assertRestored(chain, 'oo bar ba') 56 | 57 | def test_no_execution(self): 58 | real_foo = Foo('foo') 59 | foo = Postponed(real_foo) 60 | 61 | foo.create_bar() 62 | self.assertFalse(hasattr(real_foo, 'bar')) 63 | 64 | foo.name 65 | self.assertFalse(real_foo.name_accessed) 66 | 67 | real_foo.name 68 | self.assertTrue(real_foo.name_accessed) 69 | 70 | def test_top_level_callables(self): 71 | chain = Postponed(Foo)('bar') 72 | self.assertEqual(chain._proceed().name, 'bar') 73 | 74 | 75 | class ModulesTest(unittest.TestCase): 76 | 77 | def assertRepickled(self, chain, value): 78 | repickled = pickle.loads(chain._pickled) 79 | self.assertEqual(repickled._proceed(), value) 80 | 81 | def test_modules(self): 82 | import string 83 | chain = Postponed(string).ascii_letters 84 | self.assertEqual(chain._proceed(), string.ascii_letters) 85 | 86 | def test_module_pickling_simple(self): 87 | import string 88 | chain = Postponed(string).ascii_letters 89 | self.assertRepickled(chain, string.ascii_letters) 90 | 91 | def test_module_pickling_dotted(self): 92 | import logging.handlers 93 | chain = Postponed(logging.handlers).SocketHandler 94 | self.assertRepickled(chain, logging.handlers.SocketHandler) 95 | 96 | def test_module_pickling_renamed(self): 97 | from logging import handlers as hh 98 | chain = Postponed(hh).SocketHandler 99 | self.assertRepickled(chain, hh.SocketHandler) 100 | 101 | def test_slacker(self): 102 | from logging import handlers as hh 103 | handlers = Slacker(hh) 104 | self.assertRepickled(handlers.SocketHandler, hh.SocketHandler) 105 | self.assertRepickled(handlers.DatagramHandler, hh.DatagramHandler) 106 | -------------------------------------------------------------------------------- /slacker/workers/__init__.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | from .local import DummyWorker, ThreadWorker 3 | from .http import HttpWorker, CurlHttpWorker 4 | 5 | try: 6 | from .django import DjangoWorker 7 | except ImportError, e: # django is not installed 8 | import warnings 9 | warnings.warn("DjangoWorker is not available: %s" % e) 10 | -------------------------------------------------------------------------------- /slacker/workers/django.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | 3 | from django.core.urlresolvers import reverse 4 | from slacker.django_backend.conf import SLACKER_SERVER 5 | from .http import HttpWorker 6 | 7 | class DjangoWorker(HttpWorker): 8 | 9 | """ HttpWorker with django's defaults """ 10 | 11 | def __init__(self, server=None, path=None): 12 | server = server or SLACKER_SERVER 13 | path = path or reverse('slacker-execute') 14 | super(DjangoWorker, self).__init__(server, path) 15 | 16 | -------------------------------------------------------------------------------- /slacker/workers/http.py: -------------------------------------------------------------------------------- 1 | try: 2 | import cPickle as pickle 3 | except ImportError: 4 | import pickle 5 | 6 | from tornado.httpclient import AsyncHTTPClient 7 | from tornado.curl_httpclient import CurlAsyncHTTPClient 8 | 9 | 10 | class HttpWorker(object): 11 | """ 12 | This worker sends pickled postponed object to a web server via 13 | HTTP POST request and waits for a response. Response is unpickled 14 | and passed to the callback. 15 | 16 | Combined with traditional threaded web server like apache2 + mod_wsgi 17 | or cherrypy this enables easy deployment, code isolation and a thread 18 | pool for free (managed by webserver). HTTP and pickling/unpickling 19 | the result, however, may cause a significant overhead. 20 | 21 | Django backend implementation can be found at ``slacker.django_backend``. 22 | """ 23 | 24 | HTTPClient = AsyncHTTPClient 25 | 26 | def __init__(self, server='http://127.0.0.1:8000', path='/'): 27 | self.url = server + path 28 | 29 | def proceed(self, postponed, callback=None): 30 | 31 | def on_response(response): 32 | if callback: 33 | result = pickle.loads(response.body) 34 | callback(result) 35 | 36 | http = self.HTTPClient() 37 | http.fetch(self.url, on_response, method='POST', body=postponed._pickled) 38 | 39 | 40 | class CurlHttpWorker(HttpWorker): 41 | """ HttpWorker that uses CurlAsyncHTTPClient instead of AsyncHTTPClient """ 42 | HTTPClient = CurlAsyncHTTPClient 43 | 44 | -------------------------------------------------------------------------------- /slacker/workers/local.py: -------------------------------------------------------------------------------- 1 | from functools import partial 2 | from tornado import stack_context 3 | from tornado.ioloop import IOLoop 4 | from slacker.postpone import safe_proceed 5 | 6 | class DummyWorker(object): 7 | """ Dummy worker for local immediate execution """ 8 | def proceed(self, postponed, callback = None): 9 | # safe_proceed is called instead of _proceed 10 | # for consistent error handling 11 | res = safe_proceed(postponed) 12 | if callback: 13 | callback(res) 14 | 15 | 16 | class ThreadWorker(object): 17 | """ 18 | Executes code in a thread from a ThreadPool. 19 | 20 | .. warning:: 21 | 22 | postponed code shouldn't interact with tornado because 23 | tornado is not thread-safe. 24 | 25 | .. waring:: 26 | 27 | I'm bad at threads so this can be broken ;) 28 | """ 29 | 30 | _default_pool = None 31 | 32 | def __init__(self, pool=None, ioloop=None): 33 | """ 34 | Initializes ThreadWorker. 35 | 'pool' is a multiprocessing.pool.ThreadPool instance, 36 | 'ioloop' is a tornado.ioloop.IOLoop instance. 37 | """ 38 | self.ioloop = ioloop or IOLoop.instance() 39 | 40 | # create default pool only if necessary 41 | if not pool and not self.__class__._default_pool: 42 | from multiprocessing.pool import ThreadPool 43 | self.__class__._default_pool = ThreadPool(5) 44 | 45 | self.pool = pool or self.__class__._default_pool 46 | 47 | 48 | def proceed(self, postponed, callback=None): 49 | _proceed = partial(safe_proceed, postponed) 50 | 51 | if callback is None: 52 | self.pool.apply_async(_proceed) 53 | return 54 | 55 | # Without stack_context.wrap exceptions will not be propagated, 56 | # they'll be catched by tornado. Hours of debugging ;) 57 | @stack_context.wrap 58 | def on_response(result): 59 | self.ioloop.add_callback(partial(callback, result)) 60 | 61 | self.pool.apply_async(_proceed, callback = on_response) 62 | -------------------------------------------------------------------------------- /test_project/__init__.py: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /test_project/settings.py: -------------------------------------------------------------------------------- 1 | # Django settings for test project. 2 | import os, sys 3 | PROJECT_ROOT = os.path.dirname(os.path.abspath(__file__)) 4 | join = lambda p: os.path.abspath(os.path.join(PROJECT_ROOT, p)) 5 | 6 | sys.path.insert(0, join('..')) 7 | 8 | DEBUG = True 9 | TEMPLATE_DEBUG = DEBUG 10 | 11 | DATABASES = { 12 | 'default': { 13 | 'ENGINE': 'django.db.backends.sqlite3', 14 | 'NAME': join('db.sqlite'), 15 | 16 | # :memory: databases cause obscure bugs in multithreaded environment 17 | # and django uses :memory: as TEST_NAME by default so it is necessary 18 | # to make test database real file. 19 | 'TEST_NAME': join('db-test.sqlite'), 20 | } 21 | } 22 | 23 | SECRET_KEY = '5mcs97ar-(nnxhfkx0%^+0^sr!e(ax=x$2-!8dqy25ff-l1*a=' 24 | TEMPLATE_LOADERS = ( 25 | 'django.template.loaders.filesystem.Loader', 26 | 'django.template.loaders.app_directories.Loader', 27 | # 'django.template.loaders.eggs.Loader', 28 | ) 29 | 30 | TEMPLATE_DIRS = ( 31 | join('templates'), 32 | ) 33 | 34 | MIDDLEWARE_CLASSES = ( 35 | 'django.middleware.common.CommonMiddleware', 36 | 'django.contrib.sessions.middleware.SessionMiddleware', 37 | 'django.middleware.csrf.CsrfViewMiddleware', 38 | 'django.contrib.auth.middleware.AuthenticationMiddleware', 39 | 'django.contrib.messages.middleware.MessageMiddleware', 40 | ) 41 | 42 | ROOT_URLCONF = 'urls' 43 | 44 | INSTALLED_APPS=( 45 | 'django.contrib.auth', 46 | 'django.contrib.contenttypes', 47 | 'testapp', 48 | ) 49 | -------------------------------------------------------------------------------- /test_project/templates/404.html: -------------------------------------------------------------------------------- 1 | page not found 2 | -------------------------------------------------------------------------------- /test_project/templates/500.html: -------------------------------------------------------------------------------- 1 | error 500 2 | -------------------------------------------------------------------------------- /test_project/testapp/__init__.py: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /test_project/testapp/models.py: -------------------------------------------------------------------------------- 1 | # hello, django testrunner! 2 | -------------------------------------------------------------------------------- /test_project/testapp/tests/__init__.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | 3 | from slacker.tests import * 4 | from .django_tests import * 5 | from .workers import * 6 | 7 | -------------------------------------------------------------------------------- /test_project/testapp/tests/django_tests.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | import pickle 3 | from django.test import TransactionTestCase as DjangoTestCase 4 | from django.contrib.auth.models import User 5 | from slacker.postpone import Postponed 6 | 7 | class DjangoQueryPostponeTest(DjangoTestCase): 8 | 9 | def setUp(self): 10 | self.user = User.objects.create_user('example', 'example@example.com') 11 | 12 | @property 13 | def AsyncUser(self): 14 | return Postponed(User) 15 | 16 | def test_restore(self): 17 | user_query = self.AsyncUser.objects.get(username='example') 18 | self.assertEqual(user_query._proceed(), self.user) 19 | 20 | def test_pickling_unpickling(self): 21 | user_query = self.AsyncUser.objects.get(username='example') 22 | self.assertEqual(pickle.loads(user_query._pickled)._proceed(), self.user) 23 | 24 | -------------------------------------------------------------------------------- /test_project/testapp/tests/workers.py: -------------------------------------------------------------------------------- 1 | from tornado.testing import AsyncHTTPTestCase 2 | from tornado.wsgi import WSGIContainer 3 | from tornado.ioloop import IOLoop 4 | 5 | from django.core.handlers.wsgi import WSGIHandler 6 | from django.test import TransactionTestCase as DjangoTestCase 7 | from django.contrib.auth.models import User 8 | from django.core.exceptions import ObjectDoesNotExist 9 | 10 | from slacker import Slacker, adisp 11 | from slacker.workers import ThreadWorker, DjangoWorker 12 | 13 | class BaseWorkerTest(AsyncHTTPTestCase, DjangoTestCase): 14 | 15 | SlackerClass = Slacker(User) 16 | 17 | def setUp(self): 18 | self.user = User.objects.create_user('example', 'example@example.com') 19 | self.res = None 20 | super(BaseWorkerTest, self).setUp() 21 | 22 | def get_new_ioloop(self): 23 | return IOLoop.instance() 24 | 25 | def get_app(self): 26 | return WSGIContainer(WSGIHandler()) 27 | 28 | @adisp.process 29 | def get_user(self, username=None): 30 | username = username or self.user.username 31 | self.res = yield self.SlackerClass.objects.get(username=username) 32 | self.stop() 33 | 34 | def test_get_user(self): 35 | self.get_user() 36 | self.wait() 37 | self.assertEqual(self.res, self.user) 38 | 39 | def test_error_handling(self): 40 | 41 | def run(): 42 | self.get_user('vasia') 43 | self.wait() 44 | 45 | self.assertRaises(ObjectDoesNotExist, run) 46 | 47 | 48 | 49 | class DjangoWorkerTest(BaseWorkerTest): 50 | def setUp(self): 51 | super(DjangoWorkerTest, self).setUp() 52 | self.SlackerClass = Slacker(User, DjangoWorker(self.get_url(''))) 53 | 54 | 55 | class ThreadedWorkerTest(BaseWorkerTest): 56 | SlackerClass = Slacker(User, ThreadWorker()) 57 | 58 | 59 | -------------------------------------------------------------------------------- /test_project/urls.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from django.conf.urls.defaults import * 3 | 4 | urlpatterns = patterns('', 5 | (r'^async/', include('slacker.django_backend.urls')), 6 | ) 7 | --------------------------------------------------------------------------------