├── .github └── workflows │ ├── build.yml │ ├── codecov.yml │ ├── codeql-analysis.yml │ └── pythonpublish.yml ├── .gitignore ├── LICENSE ├── README.md ├── SECURITY.md ├── celery_rpc ├── __init__.py ├── app.py ├── base.py ├── client.py ├── codecs.py ├── config.py ├── exceptions.py ├── models.py ├── runtests │ ├── __init__.py │ ├── runtests.py │ └── settings.py ├── tasks.py ├── tests │ ├── __init__.py │ ├── factories.py │ ├── models.py │ ├── tasks.py │ ├── test_client.py │ ├── test_codecs.py │ ├── test_errors.py │ ├── test_m2m.py │ ├── test_pipeline.py │ ├── test_tasks.py │ ├── tests.py │ └── utils.py └── utils.py ├── requirements.txt ├── runtests.py ├── setup.py └── tox.ini /.github/workflows/build.yml: -------------------------------------------------------------------------------- 1 | name: build 2 | 3 | on: [push, pull_request] 4 | 5 | jobs: 6 | build: 7 | runs-on: ubuntu-latest 8 | strategy: 9 | max-parallel: 5 10 | matrix: 11 | python-version: [2.7, 3.7, 3.8, 3.9, "3.10"] 12 | 13 | steps: 14 | - uses: actions/checkout@v3 15 | - name: Set up Python ${{ matrix.python-version }} 16 | uses: actions/setup-python@v4 17 | with: 18 | python-version: ${{ matrix.python-version }} 19 | architecture: x64 20 | - name: Install dependencies 21 | run: | 22 | python -m pip install --upgrade pip 23 | pip install tox tox-gh-actions wheel 24 | - name: Test with tox 25 | run: | 26 | tox 27 | python setup.py sdist bdist_wheel install 28 | -------------------------------------------------------------------------------- /.github/workflows/codecov.yml: -------------------------------------------------------------------------------- 1 | name: codecov 2 | on: [push, pull_request] 3 | jobs: 4 | run: 5 | runs-on: ubuntu-latest 6 | steps: 7 | - uses: actions/checkout@master 8 | - name: Setup Python 9 | uses: actions/setup-python@v4 10 | with: 11 | python-version: 3.x 12 | - name: Generate coverage report 13 | run: | 14 | pip install -r requirements.txt 15 | pip install coverage 16 | pip install -q -e . 17 | coverage run runtests.py 18 | coverage xml 19 | - name: Upload coverage to Codecov 20 | uses: codecov/codecov-action@v2 21 | with: 22 | token: ${{ secrets.CODECOV_TOKEN }} 23 | file: ./coverage.xml 24 | flags: unittests 25 | name: codecov-umbrella 26 | fail_ci_if_error: true 27 | -------------------------------------------------------------------------------- /.github/workflows/codeql-analysis.yml: -------------------------------------------------------------------------------- 1 | name: "Code scanning - action" 2 | 3 | on: 4 | push: 5 | pull_request: 6 | schedule: 7 | - cron: '0 21 * * 2' 8 | 9 | jobs: 10 | CodeQL-Build: 11 | 12 | runs-on: ubuntu-latest 13 | 14 | steps: 15 | - name: Checkout repository 16 | uses: actions/checkout@v3 17 | with: 18 | # We must fetch at least the immediate parents so that if this is 19 | # a pull request then we can checkout the head. 20 | fetch-depth: 2 21 | 22 | # If this run was triggered by a pull request event, then checkout 23 | # the head of the pull request instead of the merge commit. 24 | - run: git checkout HEAD^2 25 | if: ${{ github.event_name == 'pull_request' }} 26 | 27 | # Initializes the CodeQL tools for scanning. 28 | - name: Initialize CodeQL 29 | uses: github/codeql-action/init@v1 30 | # Override language selection by uncommenting this and choosing your languages 31 | # with: 32 | # languages: go, javascript, csharp, python, cpp, java 33 | 34 | # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). 35 | # If this step fails, then you should remove it and run the build manually (see below) 36 | - name: Autobuild 37 | uses: github/codeql-action/autobuild@v1 38 | 39 | # ℹ️ Command-line programs to run using the OS shell. 40 | # 📚 https://git.io/JvXDl 41 | 42 | # ✏️ If the Autobuild fails above, remove it and uncomment the following three lines 43 | # and modify them (or add more) to build your code if your project 44 | # uses a compiled language 45 | 46 | #- run: | 47 | # make bootstrap 48 | # make release 49 | 50 | - name: Perform CodeQL Analysis 51 | uses: github/codeql-action/analyze@v1 52 | -------------------------------------------------------------------------------- /.github/workflows/pythonpublish.yml: -------------------------------------------------------------------------------- 1 | # This workflow will upload a Python Package using Twine when a release is created 2 | 3 | name: Upload Python Package 4 | 5 | on: 6 | release: 7 | types: [published] 8 | 9 | jobs: 10 | deploy: 11 | 12 | runs-on: ubuntu-latest 13 | 14 | steps: 15 | - uses: actions/checkout@v3 16 | - name: Set up Python 17 | uses: actions/setup-python@v4 18 | with: 19 | python-version: '3.x' 20 | - name: Install dependencies 21 | run: | 22 | python -m pip install --upgrade pip 23 | pip install setuptools wheel twine 24 | - name: Build and publish 25 | env: 26 | TWINE_USERNAME: __token__ 27 | TWINE_PASSWORD: ${{ secrets.PYPI_TOKEN }} 28 | run: | 29 | python setup.py sdist bdist_wheel 30 | twine upload dist/* 31 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.log 2 | *.pot 3 | *.pyc 4 | .tox 5 | local_settings.py 6 | /.eggs/ 7 | /.idea/ 8 | /build/lib/celery_rpc/ 9 | /dist/ 10 | /django_celery_rpc.egg-info/ 11 | /djangoceleryrpc.egg-info/ 12 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | This is free and unencumbered software released into the public domain. 2 | 3 | Anyone is free to copy, modify, publish, use, compile, sell, or 4 | distribute this software, either in source code form or as a compiled 5 | binary, for any purpose, commercial or non-commercial, and by any 6 | means. 7 | 8 | In jurisdictions that recognize copyright laws, the author or authors 9 | of this software dedicate any and all copyright interest in the 10 | software to the public domain. We make this dedication for the benefit 11 | of the public at large and to the detriment of our heirs and 12 | successors. We intend this dedication to be an overt act of 13 | relinquishment in perpetuity of all present and future rights to this 14 | software under copyright law. 15 | 16 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 17 | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF 18 | MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. 19 | IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR 20 | OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, 21 | ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR 22 | OTHER DEALINGS IN THE SOFTWARE. 23 | 24 | For more information, please refer to 25 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | django-celery-rpc 2 | ================= 3 | 4 | [![Build Status](https://github.com/just-work/django-celery-rpc/workflows/build/badge.svg?branch=master&event=push)](https://github.com/just-work/django-celery-rpc/actions?query=event%3Apush+branch%3Amaster+workflow%3Abuild) 5 | [![codecov](https://codecov.io/gh/just-work/django-celery-rpc/branch/master/graph/badge.svg)](https://codecov.io/gh/just-work/django-celery-rpc) 6 | [![PyPI version](https://badge.fury.io/py/django-celery-rpc.svg)](https://badge.fury.io/py/djangoceleryrpc) 7 | 8 | Remote access from one system to models and functions of other one using Celery machinery. 9 | 10 | Relies on three outstanding python projects: 11 | 12 | - [Celery](http://www.celeryproject.org/) 13 | - [Django Rest Framework](http://www.djang) 14 | - [Django](https://www.djangoproject.com/) 15 | 16 | ## Main features 17 | 18 | Client and server are designed to: 19 | 20 | - filter models with Django ORM lookups, Q-objects and excludes; 21 | - change model state (create, update, update or create, delete); 22 | - change model state in bulk mode (more than one object per request); 23 | - atomic get-set model state with bulk mode support; 24 | - call function; 25 | - client does not require Django; 26 | 27 | ## Installation 28 | Install client: 29 | ```shell script 30 | pip install djangoceleryrpc 31 | ``` 32 | Install server: 33 | ```shell script 34 | pip install djangoceleryrpc[server] 35 | ``` 36 | 37 | ## Basic Configuration 38 | 39 | Default configuration of **django-celery-rpc** must be overridden in settings.py by **CELERY_RPC_CONFIG**. 40 | The **CELERY_RPC_CONFIG** is a dict which must contains at least two keys: **BROKER_URL** and **CELERY_RESULT_BACKEND**. 41 | Any Celery config params also permitted 42 | (see [Configuration and defaults](http://celery.readthedocs.org/en/latest/configuration.html)) 43 | 44 | ### server **span** 45 | 46 | setting.py: 47 | 48 | ```python 49 | # minimal required configuration 50 | CELERY_RPC_CONFIG = { 51 | 'broker_url': 'amqp://guest:guest@rabbitmq:5672//', 52 | 'result_backend': 'redis://redis:6379/0', 53 | } 54 | ``` 55 | 56 | ### server **eggs** 57 | 58 | setting.py: 59 | 60 | ```python 61 | # alternate request queue and routing key 62 | CELERY_RPC_CONFIG = { 63 | 'broker_url': 'amqp://guest:guest@rabbitmq:5672/', 64 | 'result_backend': 'amqp://guest:guest@rabbitmq:5672/', 65 | 'task_default_queue': 'celery_rpc.requests.alter_queue', 66 | 'task_default_routing_key': 'celery_rpc.alter_routing_key' 67 | } 68 | ``` 69 | 70 | ### client 71 | 72 | setting.py: 73 | 74 | ```python 75 | # this settings will be used in clients by default 76 | CELERY_RPC_CONFIG = { 77 | 'broker_url': 'amqp://guest:guest@rabbitmq:5672/', 78 | 'result_backend': 'redis://redis:6379/0', 79 | } 80 | 81 | # 'eggs' alternative configuration will be explicitly passed to the client constructor 82 | CELERY_RPC_EGGS_CLIENT = { 83 | # BROKER_URL will be used by default from section above 84 | 'result_backend': 'amqp://guest:guest@rabbitmq:5672/', 85 | 'task_default_queue': 'celery_rpc.requests.alter_queue', 86 | 'task_default_routing_key': 'celery_rpc.alter_routing_key' 87 | } 88 | ``` 89 | 90 | *Note: 91 | 1. client and server must share the same __BROKER_URL__, __RESULT_BACKEND__, __DEFAULT_EXCHANGE__, __DEFAULT_QUEUE__, __DEFAULT_ROUTING_KEY__ 92 | 2. different server must serve different request queues with different routing keys or must work with different exchanges* 93 | 94 | example.py 95 | 96 | ```python 97 | from celery_rpc.client import Client 98 | from django.conf import settings 99 | 100 | # create client with default settings 101 | span_client = Client() 102 | 103 | # create client for `eggs` server 104 | eggs_client = Client(CELERY_RPC_EGGS_CLIENT) 105 | ``` 106 | 107 | ## Using client 108 | 109 | You can find more examples in tests. 110 | 111 | ### Filtering 112 | 113 | Simple filtering example 114 | 115 | ``` 116 | span_client.filter('app.models:MyModel', kwargs=dict(filters={'a__exact':'a'})) 117 | ``` 118 | 119 | Filtering with Q object 120 | 121 | ``` 122 | from django.db.models import Q 123 | span_client.filter('app.models:MyModel', kwargs=dict(filters_Q=(Q(a='1') | Q(b='1'))) 124 | ``` 125 | 126 | Also, we can use both Q and lookups 127 | 128 | ``` 129 | span_client.filter('app.models:MyModel', kwargs=dict(filters={'c__exact':'c'}, filters_Q=(Q(a='1') | Q(b='1'))) 130 | ``` 131 | 132 | Exclude supported 133 | 134 | ``` 135 | span_client.filter('app.models:MyModel', kwargs=dict(exclude={'c__exact':'c'}, exclude_Q=(Q(a='1') | Q(b='1'))) 136 | ``` 137 | 138 | You can mix filters and exclude, Q-object with lookups. Try it yourself. ;) 139 | 140 | Full list of available kwargs: 141 | 142 | filters - dict of terms compatible with django lookup fields 143 | offset - offset from which return a results 144 | limit - max number of results 145 | fields - list of serializer fields, which will be returned 146 | exclude - lookups for excluding matched models 147 | order_by - order of results (list, tuple or string), 148 | minus ('-') set reverse order, default = [] 149 | filters_Q - django Q-object for filtering models 150 | exclude_Q - django Q-object for excluding matched models 151 | 152 | 153 | List of all MyModel objects with high priority 154 | 155 | ``` 156 | span_client.filter('app.models:MyModel', high_priority=True) 157 | ``` 158 | 159 | ### Creating 160 | 161 | Create one object 162 | 163 | ```python 164 | span_client.create('apps.models:MyModel', data={"a": "a"}) 165 | ``` 166 | 167 | Bulk creating 168 | 169 | ```python 170 | span_client.create('apps.models:MyModel', data=[{"a": "a"}, {"a": "b"}]) 171 | ``` 172 | 173 | ### Updating 174 | 175 | Update one object by PK field name 176 | 177 | ```python 178 | span_client.update('apps.models:MyModel', data={"id": 1, "a": "a"}) 179 | ``` 180 | 181 | Update one object by special alias 'pk' which matched automatically to PK field 182 | 183 | ```python 184 | span_client.update('apps.models:MyModel', data={"id": 1, "a": "a"}) 185 | ``` 186 | 187 | Attention! Magic area! Update one object by any field you wish 188 | 189 | ```python 190 | span_client.update('apps.models:MyModel', data={"alternative_key_field": 42, "a": "a"}, 191 | {'identity': 'alternative_key_field'}) 192 | ``` 193 | 194 | ### Update or create, Delete and so on 195 | 196 | All cases are very similar. Try it you console! 197 | 198 | ### Full list of supported model methods 199 | 200 | - `filter` - select models 201 | - `create` - create new models, raise exception if model exists 202 | - `update` - update existing models 203 | - `update_or_create` - update if exist or create new 204 | - `delete` - delete existing models 205 | - `getset` - set new state and return old state atomically 206 | 207 | All method support options: 208 | 209 | - `fields` - shrink result fields 210 | - `serializer_cls` - fully qualified symbol name to DRF serializer class on server 211 | - `identity` - field name which will be used rather than PK field (mindless for `filter`) 212 | 213 | ### Pipe 214 | 215 | It's possible to pipeline tasks, so they will be executed in one transaction. 216 | 217 | ```python 218 | p = span_client.pipe() 219 | p = p.create('apps.models:MyModel', data={"a": "a"}) 220 | p = p.create('apps.models:MyAnotherModel', data={"b": "b"}) 221 | p.run() 222 | ``` 223 | 224 | You can pass some arguments from previous task to the next. 225 | 226 | Suppose you have those models on the server 227 | 228 | ```python 229 | class MyModel(models.Model): 230 | a = models.CharField() 231 | 232 | class MyAnotherModel(models.Model): 233 | fk = models.ForeignKey(MyModel) 234 | b = models.CharField() 235 | ``` 236 | 237 | You need to create instance of MyModel and instance of MyAnotherModel which reffers to MyModel 238 | 239 | ```python 240 | p = span_client.pipe() 241 | p = p.create('apps.models:MyModel', data={"a": "a"}) 242 | p = p.translate({"fk": "id"}, defaults={"b": "b"}) 243 | p = p.create('apps.models:MyAnotherModel') 244 | p.run() 245 | ``` 246 | 247 | In this example the `translate` task: 248 | - take result of the previous `create` task 249 | - extract value of "id" field from it 250 | - add this value to "defaults" by key "fk" 251 | 252 | After that next `create` task takes result of `translate` as input data 253 | 254 | ### Add/delete m2m relations 255 | 256 | Lets take such models: 257 | 258 | ```python 259 | class MyModel(models.Model): 260 | str = models.CharField() 261 | 262 | class MyManyToManyModel(models.Model): 263 | m2m = models.ManyToManyField(MyModel, null=True) 264 | ``` 265 | 266 | Add relation between existing objects 267 | 268 | ```python 269 | my_models = span_client.create('apps.models:MyModel', 270 | [{'str': 'monthy'}, {'str': 'python'}]) 271 | m2m_model = span_client.create('apps.models:MyManyToManyModel', 272 | {'m2m': [my_models[0]['id']]}) 273 | 274 | # Will add 'python' to m2m_model.m2m where 'monty' already is 275 | data = {'mymodel': my_models[1]['id'], 'mymanytomanymodel': m2m_model['id']} 276 | through = span_client.create('apps.models:MyManyToManyModel.m2m.through', data) 277 | ``` 278 | 279 | And then delete some of existing relations 280 | 281 | ```python 282 | # Next `pipe` will eliminate all relations where `mymodel__str` equals 'monty' 283 | p = span_client.pipe() 284 | p = p.filter('apps.models:MyManyToManyModel.m2m.through', {'mymodel__str': 'monthy'}) 285 | p = p.delete('apps.models:MyManyToManyModel.m2m.through') 286 | p.run() 287 | ``` 288 | 289 | ## Run server instance 290 | 291 | ```python 292 | celery worker -A celery_rpc.app 293 | ``` 294 | 295 | Server with support task consuming prioritization 296 | 297 | ```python 298 | celery multi start 2 -A celery_rpc.app -Q:1 celery_rpc.requests.high_priority 299 | ``` 300 | 301 | *Note, you must replace 'celery_rpc.request' with actual value of config param __CELERY_DEFAULT_QUEUE__* 302 | 303 | Command will start two instances. First instance will consume from high priority queue only. Second instance will serve both queues. 304 | 305 | For daemonization see [Running the worker as a daemon](http://celery.readthedocs.org/en/latest/tutorials/daemonizing.html) 306 | 307 | ## Run tests 308 | 309 | ```shell 310 | python django-celery-rpc/celery_rpc/runtests/runtests.py 311 | ``` 312 | 313 | ## More Configuration 314 | 315 | ### Overriding base task class 316 | 317 | ```python 318 | OVERRIDE_BASE_TASKS = { 319 | 'ModelTask': 'package.module.MyModelTask', 320 | 'ModelChangeTask': 'package.module.MyModelChangeTask', 321 | 'FunctionTask': 'package.module.MyFunctionTask' 322 | } 323 | 324 | 325 | ``` 326 | Supported class names: `ModelTask`, `ModelChangeTask`, `FunctionTask` 327 | 328 | ### Handling remote exceptions individually 329 | 330 | ```python 331 | # Both server and client 332 | CELERY_RPC_CONFIG['wrap_remote_errors'] = True 333 | ``` 334 | 335 | After enabling remote exception wrapping client will raise same errors happened 336 | on the server side. 337 | If client side has no error defined (i.e. no package installed), 338 | `Client.RemoteError` will be raised. 339 | Also, `Client.RemoteError` is a base for all exceptions on the client side. 340 | 341 | For unknown exceptions this code is valid: 342 | 343 | ```python 344 | try: 345 | result = rpc_client.call("remote_func") 346 | except rpc_client.errors.SomeUnknownError as e: 347 | # here a stub for remote SomeUnknownError is handled 348 | print (e.args) 349 | ``` 350 | 351 | For known exceptions both variants work: 352 | 353 | ```python 354 | 355 | try: 356 | result = rpc_client.call("remote_func") 357 | except rpc_client.errors.MultipleObjectsReturned as e: 358 | # django.core.exceptions.MultipleObjectsReturned 359 | handle_error(e) 360 | except django.core.exceptions.ObjectDoesNotExist as e: 361 | # django.core.exceptions.ObjectDoesNotExist 362 | handle_error(e) 363 | ``` 364 | 365 | If original exception hierarchy is needed: 366 | 367 | ```python 368 | 369 | SomeBaseError = rpc_client.errors.SomeBaseError 370 | 371 | DerivedError = rpc_client.errors.subclass(SomeBaseError, "DerivedError") 372 | ``` 373 | 374 | 375 | ## TODO 376 | 377 | - Set default non-generic model serializer. 378 | - Test support for RPC result backend from Celery. 379 | - Token auth and permissions support (like DRF). 380 | - Resource map and strict mode. 381 | - ... 382 | 383 | ## Acknowledgements 384 | 385 | Thanks for all who contributing to this project: 386 | - https://github.com/voron3x 387 | - https://github.com/dtrst 388 | - https://github.com/anatoliy-larin 389 | - https://github.com/bourivouh 390 | - https://github.com/tumb1er 391 | -------------------------------------------------------------------------------- /SECURITY.md: -------------------------------------------------------------------------------- 1 | # Security Policy 2 | 3 | ## Supported Versions 4 | 5 | We a on a way of migrating from supporting very old python and django version to maintained-only dependencies support. 6 | 7 | Versions 2.x are intermediate versions supporting all that ancient versions of django, django-rest-framework and python. 8 | 9 | | Version | Supported | 10 | | ------- | ------------------ | 11 | | 2.x | :white_check_mark: | 12 | | 1.x | :x: | 13 | 14 | ## Reporting a Vulnerability 15 | 16 | Please fill am issue in github tracker. 17 | 18 | Maintainers will react in a week. 19 | -------------------------------------------------------------------------------- /celery_rpc/__init__.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | -------------------------------------------------------------------------------- /celery_rpc/app.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | import os 3 | 4 | from django.conf import settings 5 | 6 | from .utils import create_celery_app 7 | 8 | os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'settings') 9 | 10 | rpc = create_celery_app() 11 | rpc.autodiscover_tasks(['celery_rpc']) 12 | rpc.autodiscover_tasks(lambda: settings.INSTALLED_APPS, 13 | related_name="celery_rpc") 14 | -------------------------------------------------------------------------------- /celery_rpc/base.py: -------------------------------------------------------------------------------- 1 | import inspect 2 | import six 3 | from logging import getLogger 4 | 5 | import django 6 | from celery import Task 7 | from django.db.models import Model 8 | from django.db import transaction 9 | from rest_framework import serializers 10 | from rest_framework import VERSION 11 | 12 | from . import config 13 | from .utils import symbol_by_name, unproxy 14 | from .exceptions import RestFrameworkError, RemoteException 15 | 16 | logger = getLogger(__name__) 17 | 18 | DRF_VERSION = tuple(map(int, VERSION.split('.'))) 19 | 20 | DRF3 = DRF_VERSION >= (3, 0, 0) 21 | DRF34 = DRF_VERSION >= (3, 4, 0) 22 | 23 | 24 | class remote_error(object): 25 | """ Transforms all raised exceptions to a RemoteException wrapper, 26 | if enabled if CELERY_RPC_CONFIG['wrap_remote_errors']. 27 | 28 | Wrapper serializes exception args with result_serializer of rpc app. 29 | """ 30 | 31 | def __init__(self, task): 32 | self.task = task 33 | 34 | def __enter__(self): 35 | pass 36 | 37 | def __exit__(self, exc_type, exc_val, exc_tb): 38 | """ Unpacks exception from RemoteException wrapper, if enabled in 39 | celery_rpc config.""" 40 | if isinstance(exc_val, RemoteException): 41 | return 42 | if exc_val and self.task.app.conf['wrap_remote_errors']: 43 | serializer = self.task.app.conf['result_serializer'] 44 | raise RemoteException(exc_val, serializer) 45 | 46 | 47 | if DRF3: 48 | class GenericListSerializerClass(serializers.ListSerializer): 49 | 50 | def update(self, instance, validated_data): 51 | """ Performs bulk delete or update or create. 52 | 53 | * instances are deleted if new data is empty 54 | * if lengths of instances and new date are equal, 55 | performs item-by-item update 56 | * performs bulk creation is no instances passed 57 | 58 | :returns new values 59 | """ 60 | if not validated_data: 61 | for obj in instance: 62 | obj.delete() 63 | return self.create(validated_data) 64 | if len(instance) == len(validated_data): 65 | for obj, values in zip(instance, validated_data): 66 | for k, v in values.items(): 67 | setattr(obj, k, v) 68 | obj.save() 69 | elif len(instance) == 0: 70 | return self.create(validated_data) 71 | else: 72 | raise RuntimeError("instance and data len differs, " 73 | "don't know what to do") 74 | return instance 75 | 76 | 77 | class RpcTask(Task): 78 | """ Base celery rpc task class 79 | """ 80 | 81 | @property 82 | def headers(self): 83 | return self.request.headers or {} 84 | 85 | def __call__(self, *args, **kwargs): 86 | with remote_error(self): 87 | self.prepare_context(*args, **kwargs) 88 | return self.run(*args, **kwargs) 89 | 90 | def prepare_context(self, *args, **kwargs): 91 | """ Prepare context for calling task function. Do nothing by default. 92 | """ 93 | 94 | 95 | class ModelTask(RpcTask): 96 | """ Base task for operating with django models. 97 | """ 98 | abstract = True 99 | 100 | def __call__(self, model, *args, **kwargs): 101 | logger.debug("Got task %s", self.name, 102 | extra={"referer": self.headers.get("referer"), 103 | "piped": self.headers.get("piped"), 104 | "model": model}) 105 | return super(ModelTask, self).__call__(model, *args, **kwargs) 106 | 107 | def prepare_context(self, model, *args, **kwargs): 108 | self.request.model = self._import_model(model) 109 | 110 | @staticmethod 111 | def _import_model(model_name): 112 | """ Import class by full name, check type and return. 113 | """ 114 | sym = symbol_by_name(model_name) 115 | if isinstance(sym, six.string_types): 116 | # perhaps model name is a value of 'sym' 117 | model_name = sym 118 | sym = symbol_by_name(model_name) 119 | elif not inspect.isclass(sym) and callable(sym): 120 | # perhaps model name is a result of call 'sym()' 121 | model_name = sym() 122 | sym = symbol_by_name(model_name) 123 | if issubclass(sym, Model): 124 | return sym 125 | raise TypeError( 126 | "Symbol '{}' is not a Django model".format(model_name)) 127 | 128 | @staticmethod 129 | def _import_serializer(serializer_name): 130 | """ Import class by full name, check type and return. 131 | """ 132 | sym = symbol_by_name(serializer_name) 133 | if inspect.isclass(sym) and issubclass(sym, 134 | serializers.ModelSerializer): 135 | return sym 136 | raise TypeError( 137 | "Symbol '{}' is not a DRF serializer".format(serializer_name)) 138 | 139 | @staticmethod 140 | def _create_queryset(model): 141 | """ Construct queryset by params. 142 | """ 143 | return model.objects.all() 144 | 145 | def _create_serializer_class(self, model_class): 146 | """ Return REST framework serializer class for model. 147 | """ 148 | 149 | # default serializer 150 | base_serializer_class = serializers.ModelSerializer 151 | 152 | # custom serializer 153 | custom_serializer = self.request.kwargs.get('serializer_cls') 154 | if custom_serializer: 155 | base_serializer_class = self._import_serializer(custom_serializer) 156 | 157 | identity_field = self.identity_field 158 | 159 | # DRF >= 3.4 160 | base_serializer_fields = (getattr( 161 | getattr(base_serializer_class, 'Meta', None), 'fields', None)) 162 | 163 | class GenericModelSerializer(base_serializer_class): 164 | 165 | class Meta(getattr(base_serializer_class, 'Meta', object)): 166 | model = model_class 167 | 168 | if DRF3: 169 | # connect overriden list serializer to child serializer 170 | list_serializer_class = GenericListSerializerClass 171 | 172 | if DRF34: 173 | # implicit fields: DRF 3.4 - deprecated , DRF 3.5 - removed 174 | fields = base_serializer_fields or '__all__' 175 | 176 | def get_identity(self, data): 177 | try: 178 | return data.get(identity_field, data.get('pk', None)) 179 | except AttributeError: 180 | return None 181 | 182 | fields = self.request.kwargs.get("fields") 183 | if fields: 184 | GenericModelSerializer.Meta.fields = fields 185 | 186 | return GenericModelSerializer 187 | 188 | @property 189 | def serializer_class(self): 190 | return self._create_serializer_class(self.model) 191 | 192 | @property 193 | def model(self): 194 | return self.request.model 195 | 196 | @property 197 | def pk_name(self): 198 | return self.model._meta.pk.name 199 | 200 | @property 201 | def identity_field(self): 202 | """ Name of field which used as key-field 203 | """ 204 | return self.request.kwargs.get('identity') or self.pk_name 205 | 206 | @property 207 | def default_queryset(self): 208 | return self._create_queryset(self.model) 209 | 210 | 211 | class ModelChangeTask(ModelTask): 212 | """ Abstract task provides ability to changing model state. 213 | """ 214 | abstract = True 215 | 216 | def get_instance(self, data, using=None): 217 | """ Prepare instance (or several instances) to changes. 218 | 219 | :param data: data for changing model 220 | :param using: send query to specified DB alias 221 | :return: (Model instance or queryset, many flag) 222 | Many flag is True if queryset is returned. 223 | :raise self.model.DoesNotExist: if cannot find object in single mode 224 | 225 | """ 226 | identity_field = self.identity_field 227 | get_identity = lambda item: item.get(identity_field, item.get('pk')) 228 | qs = self.default_queryset 229 | if using: 230 | qs.using(using) 231 | if isinstance(data, dict): 232 | instance = qs.get(**{identity_field: get_identity(data)}) 233 | many = False 234 | else: 235 | identity_values = [get_identity(item) for item in data] 236 | instance = qs.filter(**{identity_field + '__in': identity_values}) 237 | many = True 238 | return instance, many 239 | 240 | def perform_changes(self, instance, data, many, allow_add_remove=False, 241 | partial=True, force_insert=False, force_update=False): 242 | """ Change model in accordance with params 243 | 244 | :param instance: one or several instances of model 245 | :param data: data for changing instances 246 | :param many: True if more than one instances will be changed 247 | :param allow_add_remove: True if need to create absent or delete missed 248 | instances. 249 | :param partial: True if need partial update 250 | :return: serialized model data or list of one or errors 251 | 252 | """ 253 | kwargs = {'allow_add_remove': allow_add_remove} if not DRF3 else {} 254 | s = self.serializer_class(instance=instance, data=data, many=many, 255 | partial=partial, **kwargs) 256 | 257 | if s.is_valid(): 258 | if not DRF3: 259 | s.save(force_insert=force_insert, 260 | force_update=force_update) 261 | elif force_insert: 262 | s.instance = s.create(s.validated_data) 263 | elif force_update: 264 | s.update(s.instance, s.validated_data) 265 | else: 266 | s.save() 267 | return s.data 268 | else: 269 | # force ugettext_lazy to unproxy 270 | errors = unproxy(s.errors) 271 | raise RestFrameworkError('Serializer errors happened', errors) 272 | 273 | 274 | class FunctionTask(RpcTask): 275 | """ Base task for calling function. 276 | """ 277 | abstract = True 278 | 279 | def __call__(self, function, *args, **kwargs): 280 | logger.debug("Got task %s", self.name, 281 | extra={"referer": self.headers.get("referer"), 282 | "piped": self.headers.get("piped"), 283 | "function": function}) 284 | return super(FunctionTask, self).__call__(function, *args, **kwargs) 285 | 286 | def prepare_context(self, function, *args, **kwargs): 287 | self.request.function = self._import_function(function) 288 | 289 | @staticmethod 290 | def _import_function(func_name): 291 | """ Import class by full name, check type and return. 292 | """ 293 | sym = symbol_by_name(func_name) 294 | if hasattr(sym, '__call__'): 295 | return sym 296 | raise TypeError("Symbol '{}' is not a function".format(func_name)) 297 | 298 | @property 299 | def function(self): 300 | return self.request.function 301 | 302 | 303 | class PipeTask(RpcTask): 304 | """ Base Task for pipe function. 305 | """ 306 | 307 | def __call__(self, *args, **kwargs): 308 | logger.debug("Got task %s", self.name, 309 | extra={"referer": self.headers.get("referer")}) 310 | return super(PipeTask, self).__call__(*args, **kwargs) 311 | 312 | 313 | def get_base_task_class(base_task_name): 314 | """ Provide base task for actual tasks 315 | 316 | Load custom base task if overrides are in config or return default base task. 317 | 318 | :param base_task_name: name of default base task in this module 319 | :return: base celery task class 320 | """ 321 | base_task = globals().get(base_task_name) 322 | custom_task_name = config.override_base_tasks.get(base_task_name) 323 | if not custom_task_name: 324 | return base_task 325 | sym = symbol_by_name(custom_task_name) 326 | if inspect.isclass(sym) and issubclass(sym, base_task): 327 | return sym 328 | raise TypeError( 329 | "Symbol '{}' has not a base ".format(custom_task_name, 330 | base_task.__name__)) 331 | 332 | 333 | def atomic_commit_on_success(): 334 | """ Select context manager for atomic database operations depending on 335 | Django version. 336 | """ 337 | ver = django.VERSION 338 | if ver[0] == 1 and ver[1] < 6: 339 | return transaction.commit_on_success 340 | elif (ver[0] == 1 and ver[1] >= 6) or ver[0] >= 2: 341 | return transaction.atomic 342 | else: 343 | raise RuntimeError('Invalid Django version: {}'.format(ver)) 344 | 345 | 346 | atomic_commit_on_success = atomic_commit_on_success() 347 | -------------------------------------------------------------------------------- /celery_rpc/client.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | 3 | import os 4 | import socket 5 | import warnings 6 | 7 | from celery.exceptions import TimeoutError 8 | from celery.utils import nodename 9 | 10 | from . import utils 11 | from .config import get_result_timeout 12 | from .exceptions import RestFrameworkError, remote_exception_registry 13 | 14 | TEST_MODE = bool(os.environ.get('CELERY_RPC_TEST_MODE', False)) 15 | 16 | 17 | def _async_to_nowait(nowait=False, **kwargs): 18 | if 'async' in kwargs: 19 | warnings.warn("async parameter name is deprecated for python3.5+") 20 | nowait = kwargs.pop('async') 21 | return nowait 22 | 23 | 24 | class Client(object): 25 | """ Sending requests to server and translating results 26 | """ 27 | 28 | class Error(Exception): 29 | """ Base client error 30 | """ 31 | 32 | class InvalidRequest(Error): 33 | """ Request contains invalid params or some params are missed 34 | """ 35 | 36 | class RequestError(Error): 37 | """ Error of sending request 38 | """ 39 | 40 | class ResponseError(Error): 41 | """ Error of getting result 42 | """ 43 | 44 | class TimeoutError(Error): 45 | """ Timeout while getting result 46 | """ 47 | 48 | _app = None 49 | _task_stubs = None 50 | 51 | def __init__(self, app_config=None): 52 | """ Adjust server interaction parameters 53 | 54 | :param app_config: alternative configuration parameters for Celery app. 55 | 56 | """ 57 | self._app = utils.create_celery_app(config=app_config) 58 | if TEST_MODE: 59 | # XXX Working ONLY while tests running 60 | from .app import rpc 61 | self._task_stubs = rpc.tasks 62 | else: 63 | self._task_stubs = self._register_stub_tasks(self._app) 64 | 65 | self.errors = remote_exception_registry 66 | 67 | def get_client_name(self): 68 | return nodename(self._app.conf.get("rpc_client_name"), 69 | socket.gethostname()) 70 | 71 | def prepare_task(self, task_name, args, kwargs, high_priority=False, 72 | **options): 73 | """ Prepare subtask signature 74 | 75 | :param task_name: task name like 'celery_rpc.filter' which exists 76 | in `_task_stubs` 77 | :param kwargs: optional parameters of request 78 | :param args: optional parameters of request 79 | :param high_priority: ability to speedup consuming of the task 80 | if server support prioritization, by default False 81 | :param options: optional parameter of apply_async 82 | :return: celery.canvas.Signature instance 83 | 84 | """ 85 | task = self._task_stubs[task_name] 86 | options.setdefault("headers", {}) 87 | options["headers"]["referer"] = self.get_client_name() 88 | if high_priority: 89 | conf = task.app.conf 90 | options['routing_key'] = conf['task_high_priority_routing_key'] 91 | return task.subtask(args=args, kwargs=kwargs, **options) 92 | 93 | def filter(self, model, kwargs=None, nowait=False, timeout=None, retries=1, 94 | high_priority=False, **options): 95 | """ Call filtering Django model objects on server 96 | 97 | :param model: full name of model symbol like 'package.module:Class' 98 | :param nowait: enables delayed collecting of result 99 | :param timeout: timeout of waiting for results 100 | :param retries: number of tries to send request 101 | :param high_priority: ability to speedup consuming of the task 102 | if server support prioritization, by default False 103 | :param kwargs: optional parameters of request 104 | filters - dict of terms compatible with django database query 105 | offset - offset from which return a results 106 | limit - max number of results 107 | fields - list of serializer fields, which will be returned 108 | exclude - lookups for excluding matched models 109 | order_by - order of results (list, tuple or string), 110 | minus ('-') set reverse order, default = [] 111 | filters_Q - django Q-object for filtering models 112 | exclude_Q - django Q-object for excluding matched models 113 | 114 | :param options: optional parameter of apply_async 115 | :return: list of filtered objects or AsyncResult if nowait is True 116 | :raise: see get_result() 117 | 118 | """ 119 | nowait = _async_to_nowait(nowait, **options) 120 | args = (model, ) 121 | signature = self.prepare_task(utils.FILTER_TASK_NAME, args, kwargs, 122 | high_priority=high_priority, **options) 123 | return self.send_request(signature, nowait, timeout, retries) 124 | 125 | def update(self, model, data, kwargs=None, nowait=False, timeout=None, 126 | retries=1, high_priority=False, **options): 127 | """ Call update Django model objects on server 128 | 129 | :param model: full name of model symbol like 'package.module:Class' 130 | :param data: dict with new data or list of them 131 | :param kwargs: optional parameters of request (dict) 132 | :param nowait: enables delayed collecting of result 133 | :param timeout: timeout of waiting for results 134 | :param retries: number of tries to send request 135 | :param high_priority: ability to speedup consuming of the task 136 | if server support prioritization, by default False 137 | :param options: optional parameter of apply_async 138 | :return: dict with updated state of model or list of them or 139 | AsyncResult if nowait is True 140 | :raise InvalidRequest: if data has non iterable type 141 | 142 | """ 143 | if not hasattr(data, '__iter__'): 144 | raise self.InvalidRequest("Parameter 'data' must be a dict or list") 145 | nowait = _async_to_nowait(nowait, **options) 146 | args = (model, data) 147 | signature = self.prepare_task(utils.UPDATE_TASK_NAME, args, kwargs, 148 | high_priority=high_priority, **options) 149 | return self.send_request(signature, nowait, timeout, retries) 150 | 151 | def getset(self, model, data, kwargs=None, nowait=False, timeout=None, 152 | retries=1, high_priority=False, **options): 153 | """ Call update Django model objects on server and return previous state 154 | 155 | :param model: full name of model symbol like 'package.module:Class' 156 | :param data: dict with new data or list of them 157 | :param kwargs: optional parameters of request (dict) 158 | :param nowait: enables delayed collecting of result 159 | :param timeout: timeout of waiting for results 160 | :param retries: number of tries to send request 161 | :param high_priority: ability to speedup consuming of the task 162 | if server support prioritization, by default False 163 | :param options: optional parameter of apply_async 164 | :return: dict with old state of model or list of them or 165 | AsyncResult if nowait is True 166 | :raise InvalidRequest: if data has non iterable type 167 | 168 | """ 169 | if not hasattr(data, '__iter__'): 170 | raise self.InvalidRequest("Parameter 'data' must be a dict or list") 171 | nowait = _async_to_nowait(nowait, **options) 172 | args = (model, data) 173 | signature = self.prepare_task(utils.GETSET_TASK_NAME, args, kwargs, 174 | high_priority=high_priority, **options) 175 | return self.send_request(signature, nowait, timeout, retries) 176 | 177 | def update_or_create(self, model, data, kwargs=None, nowait=False, 178 | timeout=None, retries=1, high_priority=False, **options): 179 | """ Call update Django model objects on server. If there is not for some 180 | data, then a new object will be created. 181 | 182 | :param model: full name of model symbol like 'package.module:Class' 183 | :param data: dict with new data or list of them 184 | :param kwargs: optional parameters of request (dict) 185 | :param nowait: enables delayed collecting of result 186 | :param timeout: timeout of waiting for results 187 | :param retries: number of tries to send request 188 | :param high_priority: ability to speedup consuming of the task 189 | if server support prioritization, by default False 190 | :param options: optional parameter of apply_async 191 | :return: dict with updated state of model or list of them or 192 | AsyncResult if nowait is True 193 | :raise InvalidRequest: if data has non iterable type 194 | 195 | """ 196 | if not hasattr(data, '__iter__'): 197 | raise self.InvalidRequest("Parameter 'data' must be a dict or list") 198 | args = (model, data) 199 | nowait = _async_to_nowait(nowait, **options) 200 | signature = self.prepare_task( 201 | utils.UPDATE_OR_CREATE_TASK_NAME, args, kwargs, 202 | high_priority=high_priority, **options) 203 | return self.send_request(signature, nowait, timeout, retries) 204 | 205 | def create(self, model, data, kwargs=None, nowait=False, timeout=None, 206 | retries=1, high_priority=False, **options): 207 | """ Call create Django model objects on server. 208 | 209 | :param model: full name of model symbol like 'package.module:Class' 210 | :param data: dict with new data or list of them 211 | :param kwargs: optional parameters of request (dict) 212 | :param nowait: enables delayed collecting of result 213 | :param timeout: timeout of waiting for results 214 | :param retries: number of tries to send request 215 | :param high_priority: ability to speedup consuming of the task 216 | if server support prioritization, by default False 217 | :param options: optional parameter of apply_async 218 | :return: dict with updated state of model or list of them or 219 | AsyncResult if nowait is True 220 | :raise InvalidRequest: if data has non iterable type 221 | 222 | """ 223 | if not hasattr(data, '__iter__'): 224 | raise self.InvalidRequest("Parameter 'data' must be a dict or list") 225 | nowait = _async_to_nowait(nowait, **options) 226 | args = (model, data) 227 | signature = self.prepare_task( 228 | utils.CREATE_TASK_NAME, args, kwargs, high_priority=high_priority, 229 | **options) 230 | return self.send_request(signature, nowait, timeout, retries) 231 | 232 | def delete(self, model, data, kwargs=None, nowait=False, timeout=None, 233 | retries=1, high_priority=False, **options): 234 | """ Call delete Django model objects on server. 235 | 236 | :param model: full name of model symbol like 'package.module:Class' 237 | :param data: dict (or list with dicts), which can contains ID 238 | :param kwargs: optional parameters of request (dict) 239 | :param nowait: enables delayed collecting of result 240 | :param timeout: timeout of waiting for results 241 | :param retries: number of tries to send request 242 | :param high_priority: ability to speedup consuming of the task 243 | if server support prioritization, by default False 244 | :param options: optional parameter of apply_async 245 | :return: None or [] if multiple delete or AsyncResult if nowait is True 246 | :raise InvalidRequest: if data has non iterable type 247 | 248 | """ 249 | if not hasattr(data, '__iter__'): 250 | raise self.InvalidRequest("Parameter 'data' must be a dict or list") 251 | args = (model, data) 252 | nowait = _async_to_nowait(nowait, **options) 253 | signature = self.prepare_task(utils.DELETE_TASK_NAME, args, kwargs, 254 | high_priority=high_priority, **options) 255 | return self.send_request(signature, nowait, timeout, retries) 256 | 257 | def call(self, function, args=None, kwargs=None, nowait=False, timeout=None, 258 | retries=1, high_priority=False, **options): 259 | """ Call function on server 260 | 261 | :param function: full name of model symbol like 'package.module:Class' 262 | :param args: list with positional parameters of function 263 | :param kwargs: dict with named parameters of function 264 | :param nowait: enables delayed collecting of result 265 | :param timeout: timeout of waiting for results 266 | :param retries: number of tries to send request 267 | :param high_priority: ability to speedup consuming of the task 268 | if server support prioritization, by default False 269 | :param options: optional parameter of apply_async 270 | :return: result of function call or AsyncResult if nowait is True 271 | :raise InvalidRequest: if data has non iterable type 272 | 273 | """ 274 | args = (function, args, kwargs) 275 | nowait = _async_to_nowait(nowait, **options) 276 | signature = self.prepare_task(utils.CALL_TASK_NAME, args, None, 277 | high_priority=high_priority, **options) 278 | return self.send_request(signature, nowait, timeout, retries) 279 | 280 | def get_result(self, async_result, timeout=None, **options): 281 | """ Collect results from delayed result object 282 | 283 | :param async_result: Celery AsyncResult object 284 | :param timeout: timeout of waiting for results 285 | :return: results or exception if something goes wrong 286 | :raise RestFrameworkError: error in the middle of Django REST 287 | Framework at server (only is serializer is pickle or yaml) 288 | :raise Client.ResponseError: something goes wrong 289 | 290 | """ 291 | timeout = timeout or get_result_timeout 292 | 293 | try: 294 | return async_result.get(timeout=timeout, **options) 295 | except TimeoutError: 296 | raise self.TimeoutError('Timeout exceeded while waiting for results') 297 | except RestFrameworkError: 298 | # !!! Not working with JSON serializer 299 | raise 300 | except Exception as e: 301 | exc = self._unpack_exception(e) 302 | if not exc: 303 | exc = self.ResponseError( 304 | 'Something goes wrong while getting results', e) 305 | raise exc 306 | 307 | def _unpack_exception(self, error): 308 | wrap_errors = self._app.conf['wrap_remote_errors'] 309 | serializer = self._app.conf['result_serializer'] 310 | return utils.unpack_exception(error, wrap_errors, serializer=serializer) 311 | 312 | def pipe(self): 313 | """ Create pipeline for RPC request 314 | :return: Instance of Pipe 315 | """ 316 | return Pipe(self) 317 | 318 | def send_request(self, signature, nowait=False, timeout=None, retries=1, 319 | **kwargs): 320 | """ Sending request to a server 321 | 322 | :param signature: Celery signature instance 323 | :param nowait: enables delayed collecting of result 324 | :param timeout: timeout of waiting for results 325 | :param retries: number of tries to send request 326 | :param kwargs: compatibility parameters for async keyword argument 327 | :return: results or AsyncResult if nowait is True or 328 | exception if something goes wrong 329 | :raise RestFrameworkError: error in the middle of Django REST 330 | Framework at server (if nowait=False). 331 | :raise Client.ResponseError: something goes wrong (if nowait=False) 332 | 333 | """ 334 | expires = timeout or get_result_timeout 335 | nowait = _async_to_nowait(nowait, **kwargs) 336 | while True: 337 | # noinspection PyBroadException 338 | try: 339 | try: 340 | r = signature.apply_async(expires=expires) 341 | except Exception as e: 342 | raise self.RequestError( 343 | 'Something goes wrong while sending request', e) 344 | if nowait: 345 | return r 346 | else: 347 | return self.get_result(r, timeout) 348 | except Exception: 349 | retries -= 1 350 | if retries <= 0: 351 | raise 352 | 353 | @classmethod 354 | def _register_stub_tasks(cls, app): 355 | """ Bind fake tasks to the app 356 | 357 | :param app: celery application 358 | :return: dict {task_name: task_stub) 359 | 360 | """ 361 | tasks = {} 362 | for name in utils.TASK_NAME_MAP.values(): 363 | # noinspection PyUnusedLocal 364 | @app.task(bind=True, name=name, shared=False) 365 | def task_stub(*args, **kwargs): 366 | pass 367 | tasks[name] = task_stub 368 | return tasks 369 | 370 | 371 | class Pipe(object): 372 | """ Builder of pipeline of RPC requests. 373 | """ 374 | 375 | def __init__(self, client): 376 | if not client: 377 | raise ValueError("Rpc client is required for Pipe() constructing") 378 | self.client = client 379 | self._pipeline = [] 380 | 381 | def _clone(self): 382 | p = Pipe(self.client) 383 | p._pipeline = self._pipeline[:] 384 | return p 385 | 386 | def _push(self, task): 387 | p = self._clone() 388 | p._pipeline.append(task) 389 | return p 390 | 391 | def run(self, nowait=False, timeout=None, retries=1, high_priority=False, 392 | **options): 393 | """ Run pipeline - send chain of RPC request to server. 394 | :return: list of result of each chained request. 395 | """ 396 | task_name = utils.PIPE_TASK_NAME 397 | nowait = _async_to_nowait(nowait, **options) 398 | signature = self.client.prepare_task( 399 | task_name, (self._pipeline,), None, high_priority=high_priority, 400 | **options) 401 | return self.client.send_request(signature, nowait, timeout, retries) 402 | 403 | @staticmethod 404 | def _prepare_task(task_name, args, kwargs, options=None): 405 | return dict(name=task_name, args=args, kwargs=kwargs, 406 | options=options or {}) 407 | 408 | def filter(self, model, kwargs=None): 409 | task = self._prepare_task(utils.FILTER_TASK_NAME, (model, ), 410 | kwargs) 411 | return self._push(task) 412 | 413 | def delete(self, model, data=None, kwargs=None): 414 | """ Delete models identified by `data` or by result of previous request. 415 | 416 | If `data` missed acts as transformer accepted on data from output of 417 | previous task. 418 | 419 | :param model: full name of model symbol like 'package.module:Class' 420 | :param data: dict (or list with dicts), which can contains ID 421 | :param kwargs: 422 | :return: 423 | """ 424 | task = self._prepare_model_change_task(utils.DELETE_TASK_NAME, model, 425 | data, kwargs) 426 | return self._push(task) 427 | 428 | def update(self, model, data=None, kwargs=None): 429 | if data and not hasattr(data, '__iter__'): 430 | raise self.client.InvalidRequest( 431 | "Parameter 'data' must be a dict or list") 432 | 433 | task = self._prepare_model_change_task(utils.UPDATE_TASK_NAME, model, 434 | data, kwargs) 435 | return self._push(task) 436 | 437 | def update_or_create(self, model, data=None, kwargs=None): 438 | if data and not hasattr(data, '__iter__'): 439 | raise self.client.InvalidRequest( 440 | "Parameter 'data' must be a dict or list") 441 | 442 | task = self._prepare_model_change_task(utils.UPDATE_OR_CREATE_TASK_NAME, 443 | model, data, kwargs) 444 | return self._push(task) 445 | 446 | def getset(self, model, data=None, kwargs=None): 447 | if data and not hasattr(data, '__iter__'): 448 | raise self.client.InvalidRequest( 449 | "Parameter 'data' must be a dict or list") 450 | 451 | task = self._prepare_model_change_task(utils.GETSET_TASK_NAME, model, 452 | data, kwargs) 453 | return self._push(task) 454 | 455 | def create(self, model, data=None, kwargs=None): 456 | if data and not hasattr(data, '__iter__'): 457 | raise self.client.InvalidRequest( 458 | "Parameter 'data' must be a dict or list") 459 | 460 | task = self._prepare_model_change_task(utils.CREATE_TASK_NAME, model, 461 | data, kwargs) 462 | return self._push(task) 463 | 464 | def call(self, function, args, kwargs): 465 | args = (function, args) 466 | task = self._prepare_task(utils.CALL_TASK_NAME, args, kwargs) 467 | return self._push(task) 468 | 469 | def translate(self, mapping, kwargs=None): 470 | args = (mapping,) 471 | options = {'transformer': True} 472 | 473 | task = self._prepare_task(utils.TRANSLATE_TASK_NAME, args, 474 | kwargs, options) 475 | return self._push(task) 476 | 477 | def _prepare_model_change_task(self, task_name, model, data=None, 478 | kwargs=None): 479 | args = [model] 480 | options = {} 481 | if data: 482 | args.append(data) 483 | else: 484 | options['transformer'] = True 485 | 486 | return self._prepare_task(task_name, args, kwargs, options) 487 | 488 | def result(self, index, kwargs=None): 489 | args = (index,) 490 | options = {'transformer': True} 491 | 492 | task = self._prepare_task(utils.RESULT_TASK_NAME, args, 493 | kwargs, options) 494 | return self._push(task) 495 | 496 | 497 | # Copy task names into client class from utils 498 | for n, v in utils.TASK_NAME_MAP.items(): 499 | setattr(Client, n, v) 500 | -------------------------------------------------------------------------------- /celery_rpc/codecs.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import decimal 3 | import json 4 | import re 5 | import uuid 6 | 7 | import six 8 | import jsonpickle 9 | from kombu.serialization import registry 10 | 11 | try: 12 | # Django support 13 | from django.utils.functional import Promise # noqa 14 | from django.utils.encoding import smart_str # noqa 15 | from django.db.models import Q # noqa 16 | has_django = True 17 | except ImportError: 18 | has_django = False 19 | 20 | 21 | class RpcJsonEncoder(json.JSONEncoder): 22 | """ 23 | JSONEncoder subclass that knows how to encode date/time/timedelta, 24 | decimal types, Q-objects and generators. 25 | 26 | Originated from 27 | https://github.com/tomchristie/django-rest-framework/blob/master/rest_framework/utils/encoders.py 28 | 29 | """ 30 | 31 | def _default(self, o): 32 | # For Date Time string spec, see ECMA 262 33 | # http://ecma-international.org/ecma-262/5.1/#sec-15.9.1.15 34 | if isinstance(o, datetime.datetime): 35 | r = o.isoformat() 36 | if o.microsecond: 37 | r = r[:23] + r[26:] 38 | if r.endswith('+00:00'): 39 | r = r[:-6] + 'Z' 40 | return r 41 | elif isinstance(o, datetime.date): 42 | return o.isoformat() 43 | elif isinstance(o, datetime.time): 44 | r = o.isoformat() 45 | if o.microsecond: 46 | r = r[:12] 47 | return r 48 | elif isinstance(o, datetime.timedelta): 49 | return str(o.total_seconds()) 50 | elif isinstance(o, decimal.Decimal): 51 | return str(o) 52 | elif isinstance(o, uuid.UUID): 53 | return o.hex 54 | elif hasattr(o, 'tolist'): 55 | return o.tolist() 56 | elif hasattr(o, '__iter__'): 57 | return [i for i in o] 58 | return super(RpcJsonEncoder, self).default(o) 59 | 60 | if has_django: 61 | # Handling django-specific classes only if django package is installed 62 | def default(self, o): 63 | if isinstance(o, Promise): 64 | return smart_str(o) 65 | elif isinstance(o, Q): 66 | return jsonpickle.encode(o) 67 | else: 68 | return self._default(o) 69 | else: 70 | default = _default 71 | 72 | 73 | class XJsonEncoder(RpcJsonEncoder): 74 | """ Backward compatibility for task serializing. 75 | """ 76 | 77 | if has_django: 78 | def default(self, o): 79 | if isinstance(o, Q): 80 | raise RuntimeError("Django Q-objects does not supported by " 81 | "'x-json' codec. For running with Q-objects " 82 | "use celery_rpc>0.16 on both sides and " 83 | "set 'x-rpc-json' as task serializer for " 84 | "client") 85 | return super(XJsonEncoder, self).default(o) 86 | 87 | 88 | class RpcJsonDecoder(json.JSONDecoder): 89 | """ Add support for Django Q-objects in dicts 90 | """ 91 | Q_OBJECT_SIGNATURE = re.compile( 92 | r'"py/object": "django\.db\.models\.query_utils\.Q"') 93 | 94 | def __init__(self, *args, **kwargs): 95 | kwargs['object_hook'] = self._object_hook 96 | super(RpcJsonDecoder, self).__init__(*args, **kwargs) 97 | 98 | def _object_hook(self, val): 99 | """ Iterate through dict for additional conversion. 100 | """ 101 | 102 | for k, v in six.iteritems(val): 103 | if (isinstance(v, six.string_types) and re.search( 104 | self.Q_OBJECT_SIGNATURE, v)): 105 | val[k] = jsonpickle.decode(v) 106 | return val 107 | 108 | 109 | def x_rpc_json_dumps(obj): 110 | return json.dumps(obj, cls=RpcJsonEncoder) 111 | 112 | 113 | def x_rpc_json_loads(s): 114 | if isinstance(s, six.binary_type): 115 | s = s.decode() 116 | return json.loads(s, cls=RpcJsonDecoder) 117 | 118 | 119 | # XXX: Compatibility for versions <= 0.16 120 | def x_json_dumps(obj): 121 | return json.dumps(obj, cls=XJsonEncoder) 122 | 123 | 124 | # XXX: Compatibility for versions <= 0.16 125 | def x_json_loads(s): 126 | if isinstance(s, six.binary_type): 127 | s = s.decode() 128 | return json.loads(s) 129 | 130 | 131 | def register_codecs(): 132 | registry.register('x-rpc-json', x_rpc_json_dumps, x_rpc_json_loads, 133 | 'application/json+celery-rpc:v1', 'utf-8') 134 | # XXX: Compatibility for ver <= 0.16 135 | registry.register('x-json', x_json_dumps, x_json_loads, 136 | 'application/json', 'utf-8') 137 | -------------------------------------------------------------------------------- /celery_rpc/config.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | from __future__ import absolute_import 3 | 4 | try: 5 | from django.conf import settings as _settings 6 | except ImportError: 7 | # No need django for celery_rpc client 8 | _settings = object() 9 | 10 | # Default limit for results of filter call 11 | filter_limit = 1000 12 | 13 | # Default timeout for getting results 14 | get_result_timeout = 10 15 | 16 | # Pass exceptions from server to client as instances if true. 17 | # By default exceptions are passed as a string. 18 | wrap_remote_errors = False 19 | 20 | # Provide ability to change base task class for celery-rpc server tasks. 21 | # Example: { 'ModelChangeTask': my.own.ModelChangeTask } 22 | # Key - symbolic class name, value - class with suitable interface. 23 | # Do it on your own risk! 24 | override_base_tasks = {} 25 | 26 | # default celery rpc client name which will be passed as referer header 27 | rpc_client_name = "celery_rpc_client" 28 | 29 | # See Celery configuration parameters at 30 | # http://docs.celeryproject.org/en/latest/configuration.html 31 | # Some reasonable defaults are defined below 32 | 33 | result_backend = 'cache+memory://' 34 | 35 | task_default_queue = 'celery_rpc.requests' 36 | task_default_exchange = 'celery_rpc' 37 | task_default_routing_key = 'celery_rpc' 38 | 39 | # Do not let skip messages silently (RabbitMQ) 40 | broker_transport_options = {'confirm_publish': True} 41 | 42 | task_acks_late = True 43 | accept_content = ['json', 'x-json', 'x-rpc-json'] 44 | task_serializer = 'x-json' 45 | result_serializer = 'x-json' 46 | 47 | # Options can be overridden by CELERY_RPC_CONFIG dict in Django settings.py 48 | _CONFIG = getattr(_settings, 'CELERY_RPC_CONFIG', {}) 49 | 50 | locals().update(_CONFIG) 51 | 52 | task_soft_time_limit = get_result_timeout + 1 53 | task_time_limit = get_result_timeout * 2 54 | 55 | _codecs_registered = False 56 | if not _codecs_registered: 57 | from .codecs import register_codecs 58 | 59 | register_codecs() 60 | _codecs_registered = True 61 | -------------------------------------------------------------------------------- /celery_rpc/exceptions.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | import six 3 | from celery.backends.base import create_exception_cls 4 | from kombu.exceptions import ContentDisallowed 5 | from kombu.serialization import dumps, loads, registry 6 | from kombu.utils.encoding import from_utf8 7 | 8 | from celery_rpc.utils import symbol_by_name, DEFAULT_EXC_SERIALIZER 9 | 10 | 11 | class ModelTaskError(Exception): 12 | """ Base model tasks exception class 13 | """ 14 | 15 | 16 | class RestFrameworkError(ModelTaskError): 17 | """ REST framework encountered with problems while handling request 18 | """ 19 | 20 | 21 | class RemoteException(Exception): 22 | """ Wrapper for remote exceptions.""" 23 | 24 | def __init__(self, exc, serializer=DEFAULT_EXC_SERIALIZER): 25 | """ 26 | :param exc: Exception instance or RemoteException.args 27 | :type exc: BaseException subclass, list or tuple 28 | :param serializer: CELERY_RESULT_SERIALIZER for celery_rpc app 29 | :type serializer: str 30 | """ 31 | if isinstance(exc, BaseException): 32 | cls = exc.__class__ 33 | exc_args = exc.args 34 | args = (cls.__module__, cls.__name__, exc_args) 35 | args = [dumps(args, serializer=serializer)[2]] 36 | elif isinstance(exc, (list, tuple)): 37 | args = exc 38 | elif isinstance(exc, six.string_types): 39 | args = [exc] 40 | else: 41 | raise ValueError("Need a BaseException object") 42 | super(RemoteException, self).__init__(*args) 43 | 44 | def unpack_exception(self, serializer): 45 | return remote_exception_registry.unpack_exception( 46 | self.args[0], serializer) 47 | 48 | 49 | class RemoteExceptionRegistry(object): 50 | """ remote exception stub registry 51 | 52 | Allows to instantiate or acquire remote exception stubs for using on the 53 | client side. 54 | """ 55 | 56 | class RemoteError(Exception): 57 | """ Parent class for all remote exception stubs.""" 58 | 59 | def __init__(self): 60 | self.__registry = {} 61 | 62 | def unpack_exception(self, data, serializer): 63 | """ Instantiates exception stub for original exception 64 | 65 | :param module: module name for original exception 66 | :param name: class name for original exception 67 | :param args: RemoteException.args 68 | :return: new constructed exception 69 | :rtype: self.RemoteError subclass 70 | """ 71 | try: 72 | # unpacking RemoteException args 73 | content_type, content_encoding, dumps = registry._encoders[serializer] 74 | 75 | data = loads(data, content_type, content_encoding) 76 | module, name, args = data 77 | try: 78 | # trying to import original exception 79 | original = symbol_by_name("%s.%s" % (module, name)) 80 | # creating parent class for original error and self.RemoteError 81 | 82 | class_name = from_utf8("Remote" + name) 83 | parent = type(class_name, (original, self.RemoteError), 84 | {'__module__': module}) 85 | except (AttributeError, ImportError): 86 | # alternative way for unknown errors 87 | parent = self.RemoteError 88 | 89 | # create and cache exception stub class 90 | if name not in self.__registry: 91 | self.__registry[name] = create_exception_cls( 92 | from_utf8(name), module, parent=parent) 93 | exc_class = self.__registry[name] 94 | 95 | return exc_class(*args) 96 | except (ValueError, ContentDisallowed): 97 | # loads error 98 | return None 99 | 100 | def __getattr__(self, item): 101 | """ creates exception stub class for all missing attributes. 102 | """ 103 | try: 104 | return object.__getattribute__(self, item) 105 | except AttributeError: 106 | if item not in self.__registry: 107 | exception = create_exception_cls(item, "celery_rpc.exceptions", 108 | parent=self.RemoteError) 109 | self.__registry[item] = exception 110 | return self.__registry[item] 111 | 112 | def subclass(self, parent, name): 113 | """ creates exception stub class with custom parent exception.""" 114 | if name not in self.__registry: 115 | exception = create_exception_cls(name, "celery_rpc.exceptions", 116 | parent=parent) 117 | self.__registry[name] = exception 118 | return self.__registry[name] 119 | 120 | def flush(self): 121 | self.__registry = {} 122 | 123 | # Global remote exception registry 124 | remote_exception_registry = RemoteExceptionRegistry() 125 | -------------------------------------------------------------------------------- /celery_rpc/models.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | -------------------------------------------------------------------------------- /celery_rpc/runtests/__init__.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | -------------------------------------------------------------------------------- /celery_rpc/runtests/runtests.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | # http://ericholscher.com/blog/2009/jun/29/enable-setuppy-test-your-django-apps/ 4 | # http://www.travisswicegood.com/2010/01/17/django-virtualenv-pip-and-fabric/ 5 | # http://code.djangoproject.com/svn/django/trunk/tests/runtests.py 6 | import os 7 | import sys 8 | 9 | # fix sys path so we don't need to setup PYTHONPATH 10 | sys.path.append(os.path.join(os.path.dirname(__file__), "../..")) 11 | os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'celery_rpc.runtests.settings') 12 | 13 | import django 14 | from django.conf import settings 15 | from django.test.utils import get_runner 16 | 17 | 18 | def usage(): 19 | return """ 20 | Usage: python runtests.py [UnitTestClass].[method] 21 | 22 | You can pass the Class name of the `UnitTestClass` you want to test. 23 | 24 | Append a method name if you only want to test a specific method of that class. 25 | """ 26 | 27 | 28 | def main(): 29 | 30 | if len(sys.argv) == 2: 31 | test_case = '.' + sys.argv[1] 32 | elif len(sys.argv) == 1: 33 | test_case = '' 34 | else: 35 | print(usage()) 36 | sys.exit(1) 37 | 38 | test_module_name = 'celery_rpc.tests' 39 | if django.VERSION[0] == 1 and django.VERSION[1] < 6: 40 | test_module_name = 'tests' 41 | 42 | if django.VERSION >= (1, 7): 43 | # New Apps loading mechanism 44 | django.setup() 45 | 46 | TestRunner = get_runner(settings) 47 | test_runner = TestRunner() 48 | 49 | failures = test_runner.run_tests([test_module_name + test_case]) 50 | 51 | sys.exit(failures) 52 | 53 | if __name__ == '__main__': 54 | main() -------------------------------------------------------------------------------- /celery_rpc/runtests/settings.py: -------------------------------------------------------------------------------- 1 | from uuid import uuid4 2 | 3 | DEBUG = True 4 | TEMPLATE_DEBUG = DEBUG 5 | 6 | INSTALLED_APPS = [ 7 | 'django.contrib.contenttypes', 8 | 'celery_rpc.runtests', 9 | 'celery_rpc.tests' 10 | ] 11 | 12 | DATABASE_ENGINE = 'django.db.backends.sqlite3', 13 | DATABASES = { 14 | 'default': { 15 | 'ENGINE': 'django.db.backends.sqlite3', 16 | 'NAME': ':memory:' 17 | } 18 | } 19 | 20 | SECRET_KEY = str(uuid4()) 21 | 22 | CELERY_RPC_CONFIG = { 23 | 'task_always_eager': True, 24 | 'override_base_tasks': { 25 | 'ModelTask': 'celery_rpc.tests.tasks.CustomModelTask' 26 | }, 27 | 'wrap_remote_errors': True, 28 | 'task_serializer': 'x-rpc-json' 29 | } 30 | 31 | MIDDLEWARE_CLASSES = [] 32 | -------------------------------------------------------------------------------- /celery_rpc/tasks.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | 3 | from django.db import router 4 | from django.db.models import Q 5 | import six 6 | 7 | from celery_rpc.utils import unproxy 8 | from . import config, utils 9 | from .app import rpc 10 | from .base import get_base_task_class, atomic_commit_on_success 11 | from .exceptions import RestFrameworkError 12 | 13 | 14 | _base_model_task = get_base_task_class('ModelTask') 15 | 16 | 17 | @rpc.task(name=utils.FILTER_TASK_NAME, bind=True, base=_base_model_task, 18 | shared=False) 19 | def filter(self, model, filters=None, offset=0, 20 | limit=config.filter_limit, fields=None, exclude=None, 21 | depth=0, manager='objects', database=None, serializer_cls=None, 22 | order_by=None, filters_Q=None, exclude_Q=None, *args, **kwargs): 23 | """ Filter Django models and return serialized queryset. 24 | 25 | :param model: full name of model class like 'app.models:Model' 26 | :param filters: supported lookups for filter like {'pk__in': [1,2,3]} 27 | :param offset: offset of first item in the queryset (by default 0) 28 | :param limit: max number of result list (by default 1000) 29 | :param fields: shrink serialized fields of result 30 | :param exclude: supported lookups for exclude like {'pk__in': [1,2,3]} 31 | :param order_by: order of result list (list, tuple or string), default = [] 32 | :param filters_Q: Django Q object for filter() 33 | :param exclude_Q: Django Q object for exclude() 34 | :return: list of serialized model data 35 | 36 | """ 37 | qs = self.default_queryset 38 | if filters or filters_Q: 39 | filters = filters if isinstance(filters, dict) else {} 40 | filters_Q = filters_Q if isinstance(filters_Q, Q) else Q() 41 | qs = qs.filter(filters_Q, **filters) 42 | if exclude or exclude_Q: 43 | exclude = exclude if isinstance(exclude, dict) else {} 44 | exclude_Q = exclude_Q if isinstance(exclude_Q, Q) else Q() 45 | qs = qs.exclude(exclude_Q, **exclude) 46 | if order_by: 47 | if isinstance(order_by, six.string_types): 48 | qs = qs.order_by(order_by) 49 | elif isinstance(order_by, (list, tuple)): 50 | qs = qs.order_by(*order_by) 51 | qs = qs[offset:offset+limit] 52 | return self.serializer_class(instance=qs, many=True).data 53 | 54 | 55 | _base_model_change_task = get_base_task_class('ModelChangeTask') 56 | 57 | 58 | @rpc.task(name=utils.UPDATE_TASK_NAME, bind=True, base=_base_model_change_task, 59 | shared=False) 60 | def update(self, model, data, fields=None, nocache=False, 61 | manager='objects', database=None, serializer_cls=None, *args, **kwargs): 62 | """ Update Django models by PK and return new values. 63 | 64 | :param model: full name of model class like 'app.models:ModelClass' 65 | :param data: values of one or several objects 66 | {'id': 1, 'title': 'hello'} or [{'id': 1, 'title': 'hello'}] 67 | :return: serialized model data or list of one or errors 68 | 69 | """ 70 | instance, many = self.get_instance(data) 71 | return self.perform_changes(instance=instance, data=data, many=many, 72 | allow_add_remove=False, force_update=True) 73 | 74 | 75 | @rpc.task(name=utils.GETSET_TASK_NAME, bind=True, base=_base_model_change_task, 76 | shared=False) 77 | def getset(self, model, data, fields=None, nocache=False, 78 | manager='objects', database=None, *args, **kwargs): 79 | """ Update Django models by PK and return old values as one atomic action. 80 | 81 | :param model: full name of model class like 'app.models:ModelClass' 82 | :param data: values of one or several objects 83 | {'id': 1, 'title': 'hello'} or [{'id': 1, 'title': 'hello'}] 84 | :return: serialized model data or list of one or errors 85 | 86 | """ 87 | from celery_rpc.base import DRF3 88 | db_for_write = router.db_for_write(self.model) 89 | with atomic_commit_on_success(using=db_for_write): 90 | instance, many = self.get_instance(data, using=db_for_write) 91 | if DRF3: 92 | kwargs = {} 93 | else: 94 | kwargs = {'allow_add_remove': False} 95 | s = self.serializer_class(instance=instance, data=data, 96 | many=many, partial=True, **kwargs) 97 | if not DRF3: 98 | # In DRF 2.3-2.4 serializer.is_valid() changes serializer.data 99 | old_values = s.data 100 | elif s.is_valid(): 101 | # In DRF 3.0+ you must call is_valid() before accessing data 102 | old_values = s.data 103 | # In DRF 3.3+ you cant call save() after accessing data, so we need 104 | # to spoof check in save() 105 | del s._data 106 | else: 107 | errors = unproxy(s.errors) 108 | raise RestFrameworkError('Serializer errors happened', errors) 109 | 110 | if s.is_valid(): 111 | s.save(force_update=True) 112 | if many: 113 | return old_values 114 | else: 115 | return old_values 116 | else: 117 | errors = unproxy(s.errors) 118 | raise RestFrameworkError('Serializer errors happened', errors) 119 | 120 | 121 | @rpc.task(name=utils.UPDATE_OR_CREATE_TASK_NAME, bind=True, 122 | base=_base_model_change_task, shared=False) 123 | def update_or_create(self, model, data, fields=None, nocache=False, 124 | manager='objects', database=None, serializer_cls=None, *args, **kwargs): 125 | """ Update Django models by PK or create new and return new values. 126 | 127 | :param model: full name of model class like 'app.models:ModelClass' 128 | :param data: values of one or several objects 129 | {'id': 1, 'title': 'hello'} or [{'id': 1, 'title': 'hello'}] 130 | :return: serialized model data or list of one or errors 131 | 132 | """ 133 | try: 134 | instance, many = self.get_instance(data) 135 | except self.model.DoesNotExist: 136 | instance, many = None, False 137 | return self.perform_changes(instance=instance, data=data, many=many, 138 | allow_add_remove=many) 139 | 140 | 141 | @rpc.task(name=utils.CREATE_TASK_NAME, bind=True, base=_base_model_change_task, 142 | shared=False) 143 | def create(self, model, data, fields=None, nocache=False, 144 | manager='objects', database=None, serializer_cls=None, *args, **kwargs): 145 | """ Update Django models by PK or create new and return new values. 146 | 147 | :param model: full name of model class like 'app.models:ModelClass' 148 | :param data: values of one or several objects 149 | {'id': 1, 'title': 'hello'} or [{'id': 1, 'title': 'hello'}] 150 | :return: serialized model data or list of one or errors 151 | 152 | """ 153 | instance, many = (None, False if isinstance(data, dict) else True) 154 | return self.perform_changes(instance=instance, data=data, many=many, 155 | allow_add_remove=many, force_insert=True, 156 | partial=False) 157 | 158 | 159 | @rpc.task(name=utils.DELETE_TASK_NAME, bind=True, base=_base_model_change_task, 160 | shared=False) 161 | def delete(self, model, data, fields=None, nocache=False, 162 | manager='objects', database=None, serializer_cls=None, *args, **kwargs): 163 | """ Delete Django models by PK. 164 | 165 | :param model: full name of model class like 'app.models:ModelClass' 166 | :param data: values of one or several objects 167 | {'id': 1, 'title': 'hello'} or [{'id': 1, 'title': 'hello'}] 168 | :return: None or [] if many 169 | 170 | """ 171 | instance, many = self.get_instance(data) 172 | if not many: 173 | try: 174 | instance.delete() 175 | except Exception as e: 176 | raise RestFrameworkError('Could not delete instance', e) 177 | else: 178 | return self.perform_changes(instance=instance, data=[], many=many, 179 | allow_add_remove=many) 180 | 181 | 182 | _base_function_task = get_base_task_class('FunctionTask') 183 | 184 | 185 | @rpc.task(name=utils.CALL_TASK_NAME, bind=True, base=_base_function_task, 186 | shared=False) 187 | def call(self, function, args, kwargs): 188 | """ Call function with args & kwargs 189 | 190 | :param function: full function name like 'package.module:function' 191 | :param args: positional parameters 192 | :param kwargs: named parameters 193 | {'id': 1, 'title': 'hello'} or [{'id': 1, 'title': 'hello'}] 194 | :return: result of function 195 | 196 | """ 197 | args = args or [] 198 | kwargs = kwargs or {} 199 | if not isinstance(args, list): 200 | message = "Invalid type of 'args', need: 'list', got: '{}'".format( 201 | type(args)) 202 | raise TypeError(message) 203 | if not isinstance(kwargs, dict): 204 | message = "Invalid type of 'kwargs', need: 'dict', got: '{}'".format( 205 | type(args)) 206 | raise TypeError(message) 207 | return self.function(*args, **kwargs) 208 | 209 | 210 | _base_pipe_task = get_base_task_class('PipeTask') 211 | 212 | 213 | @rpc.task(name=utils.PIPE_TASK_NAME, bind=True, base=_base_pipe_task, 214 | shared=False) 215 | def pipe(self, pipeline): 216 | """ Handle pipeline and return results 217 | :param pipeline: List of pipelined requests. 218 | :return: list of results of each request. 219 | """ 220 | result = [] 221 | r = None 222 | headers = self.headers 223 | headers["piped"] = True 224 | with atomic_commit_on_success(): 225 | for t in pipeline: 226 | task = self.app.tasks[t['name']] 227 | args = t['args'] 228 | if t['options'].get('transformer'): 229 | if not hasattr(args, 'append'): 230 | args = list(args) 231 | if t['name'] == utils.RESULT_TASK_NAME: 232 | args.append(result) 233 | else: 234 | args.append(r) 235 | res = task.apply(args=args, kwargs=t['kwargs'], headers=headers) 236 | r = res.get(disable_sync_subtasks=False) 237 | result.append(r) 238 | 239 | return result 240 | 241 | 242 | @rpc.task(name=utils.TRANSLATE_TASK_NAME, bind=True, shared=False) 243 | def translate(self, map, data, defaults=None): 244 | """ Translate keys by map. 245 | 246 | :param map: list or dict, translation map 247 | :param data: values for translate 248 | :param defaults: defaults value 249 | :return: list or dict translated values 250 | """ 251 | defaults = defaults or {} 252 | 253 | def _translate_keys_and_set_defaults(data): 254 | result = defaults.copy() 255 | 256 | for result_key, initial_key in map.items(): 257 | if initial_key in data: 258 | result[result_key] = data[initial_key] 259 | 260 | return result 261 | 262 | if isinstance(data, (list, tuple)): 263 | return [_translate_keys_and_set_defaults(el) for el in data] 264 | else: 265 | return _translate_keys_and_set_defaults(data) 266 | 267 | 268 | @rpc.task(name=utils.RESULT_TASK_NAME, bind=True, shared=False) 269 | def result(self, index, data): 270 | """ Return result from pipe results lists by index. 271 | Need to explicitly specify which value to transmit a subsequent task. 272 | 273 | :param index: int index in list of results 274 | :param data: list of values 275 | :return: value from list 276 | """ 277 | return data[index] 278 | -------------------------------------------------------------------------------- /celery_rpc/tests/__init__.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | os.environ.setdefault('CELERY_RPC_TEST_MODE', "1") -------------------------------------------------------------------------------- /celery_rpc/tests/factories.py: -------------------------------------------------------------------------------- 1 | # coding=utf-8 2 | import factory 3 | from factory.django import DjangoModelFactory 4 | 5 | from celery_rpc.tests import models 6 | 7 | 8 | def create_m2m(field_name, field_factory=None): 9 | """ Вспомогательная функция для создания Many-To-Many полей 10 | для PostGeneration декрарации factory_boy. 11 | 12 | Сделано на основе документации по factory_boy: 13 | https://factoryboy.readthedocs.io/en/latest/recipes.html#simple-many-to-many-relationship 14 | 15 | Если указан field_factory, то поле поле заполняется единственным объектом, 16 | созданным указанной фабрикой. 17 | 18 | :param field_name: Имя поля, в которое будут добавляться объекты. 19 | :param field_factory: Фабрика для создания одного объекта. 20 | """ 21 | 22 | # noinspection PyUnusedLocal 23 | def basic_m2m(obj, create, extracted, **kwargs): 24 | if not create: 25 | return 26 | 27 | if field_factory is not None: 28 | getattr(obj, field_name).add(field_factory.create()) 29 | elif extracted: 30 | for item in extracted: 31 | getattr(obj, field_name).add(item) 32 | 33 | return basic_m2m 34 | 35 | 36 | class SimpleModelFactory(DjangoModelFactory): 37 | class Meta: 38 | model = models.SimpleModel 39 | 40 | char = factory.Sequence(lambda n: 'char{}'.format(n)) 41 | 42 | 43 | class NonAutoPrimaryKeyModelFactory(DjangoModelFactory): 44 | class Meta: 45 | model = models.NonAutoPrimaryKeyModel 46 | 47 | 48 | class PartialUpdateModelFactory(DjangoModelFactory): 49 | class Meta: 50 | model = models.PartialUpdateModel 51 | 52 | f1 = factory.Sequence(lambda n: n) 53 | f2 = factory.Sequence(lambda n: n) 54 | 55 | 56 | class FkSimpleModelFactory(DjangoModelFactory): 57 | class Meta: 58 | model = models.FkSimpleModel 59 | 60 | fk = factory.SubFactory(SimpleModelFactory) 61 | 62 | 63 | class ManyToManyModelFactory(DjangoModelFactory): 64 | class Meta: 65 | model = models.ManyToManyModel 66 | 67 | m2m = factory.PostGeneration(create_m2m('m2m')) 68 | -------------------------------------------------------------------------------- /celery_rpc/tests/models.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime 2 | 3 | from django.db import models 4 | 5 | 6 | class SimpleModel(models.Model): 7 | char = models.CharField(max_length=64) 8 | datetime = models.DateTimeField(default=datetime.now) 9 | 10 | 11 | class NonAutoPrimaryKeyModel(models.Model): 12 | id = models.IntegerField(primary_key=True) 13 | 14 | 15 | class PartialUpdateModel(models.Model): 16 | """ For partial update checks 17 | """ 18 | f1 = models.IntegerField() 19 | f2 = models.IntegerField() 20 | 21 | 22 | class FkSimpleModel(models.Model): 23 | fk = models.ForeignKey(SimpleModel, on_delete=models.CASCADE) 24 | char = models.CharField(max_length=64, blank=True, null=True) 25 | 26 | 27 | class ManyToManyModel(models.Model): 28 | """ For m2m add/delete tests 29 | """ 30 | m2m = models.ManyToManyField(SimpleModel) 31 | -------------------------------------------------------------------------------- /celery_rpc/tests/tasks.py: -------------------------------------------------------------------------------- 1 | from celery_rpc.base import ModelTask 2 | 3 | 4 | class CustomModelTask(ModelTask): 5 | """ For testing override base task class """ 6 | -------------------------------------------------------------------------------- /celery_rpc/tests/test_client.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | 3 | import random 4 | import socket 5 | from datetime import datetime 6 | import mock 7 | 8 | from django.test import TestCase 9 | from rest_framework import serializers 10 | 11 | from celery_rpc.base import DRF3 12 | from .. import config, utils 13 | from ..client import Client 14 | from .utils import SimpleModelTestMixin 15 | 16 | 17 | class HighPriorityRequestTests(TestCase): 18 | """ High priority request tests 19 | """ 20 | @classmethod 21 | def setUpClass(cls): 22 | """ Creates rpc-client object 23 | """ 24 | super(HighPriorityRequestTests, cls).setUpClass() 25 | cls.rpc_client = Client() 26 | cls.task_name = utils.FILTER_TASK_NAME 27 | 28 | def testPrepareNormalPriorityTask(self): 29 | """ Method `prepare_task` cook normal priority request correctly 30 | """ 31 | signature = self.rpc_client.prepare_task(self.task_name, None, None) 32 | # Check, that default routing key is used 33 | self.assertNotIn('routing_key', signature.options) 34 | 35 | def testPrepareHighPriorityTask(self): 36 | """ Method `prepare_task` cook high priority request correctly 37 | """ 38 | signature = self.rpc_client.prepare_task(self.task_name, None, None, 39 | high_priority=True) 40 | self.assertEquals(config.task_default_routing_key + '.high_priority', 41 | signature.options['routing_key']) 42 | 43 | def _assertProxyMethodSupportHighPriority(self, method_name, *args, 44 | **kwargs): 45 | method = getattr(self.rpc_client, method_name) 46 | args = ['fake_model_or_function_name'] + list(args) 47 | # TODO: replace with nowait=True 48 | kwargs.update(high_priority=True, **{'async': True}) 49 | with mock.patch.object(Client, 'send_request') as _send_request: 50 | method(*args, **kwargs) 51 | # Get first parameter of args - Celery subtask signature 52 | signature = _send_request.call_args[0][0] 53 | msg = 'RPC-client method `{}` does not support high'.format(method_name) 54 | msg += ' priority requests' 55 | self.assertEquals(config.task_default_routing_key + '.high_priority', 56 | signature.options.get('routing_key'), msg) 57 | 58 | def testHighPriorityFilter(self): 59 | """ Method `filter` support high priority requests 60 | """ 61 | self._assertProxyMethodSupportHighPriority('filter') 62 | 63 | def testHighPriorityUpdate(self): 64 | """ Method `update` support high priority requests 65 | """ 66 | self._assertProxyMethodSupportHighPriority('update', {}) 67 | 68 | def testHighPriorityGetset(self): 69 | """ Method `getset` support high priority requests 70 | """ 71 | self._assertProxyMethodSupportHighPriority('getset', {}) 72 | 73 | def testHighPriorityUpdateOrCreate(self): 74 | """ Method `update_or_create` support high priority requests 75 | """ 76 | self._assertProxyMethodSupportHighPriority('update_or_create', {}) 77 | 78 | def testHighPriorityCreate(self): 79 | """ Method `create` support high priority requests 80 | """ 81 | self._assertProxyMethodSupportHighPriority('create', {}) 82 | 83 | def testHighPriorityDelete(self): 84 | """ Method `delete` support high priority requests 85 | """ 86 | self._assertProxyMethodSupportHighPriority('delete', {}) 87 | 88 | def testHighPriorityCall(self): 89 | """ Method `call` support high priority requests 90 | """ 91 | self._assertProxyMethodSupportHighPriority('call') 92 | 93 | 94 | class AlterIdentityTests(SimpleModelTestMixin, TestCase): 95 | """ Access to models with alter identity field (not only PK field) 96 | """ 97 | @classmethod 98 | def setUpClass(cls): 99 | """ Creates rpc-client object 100 | """ 101 | super(AlterIdentityTests, cls).setUpClass() 102 | cls.rpc_client = Client() 103 | 104 | def setUp(self): 105 | super(AlterIdentityTests, self).setUp() 106 | self.dt = datetime.max.replace(microsecond=998000) 107 | self.data = {'datetime': self.dt, 108 | 'char': self.models[0].char} 109 | self.kwargs = {'identity': 'char'} 110 | 111 | def testUpdate(self): 112 | """ Update with alter identity field looks good. 113 | """ 114 | r = self.rpc_client.update(self.MODEL_SYMBOL, self.data, self.kwargs) 115 | if DRF3: 116 | dt = serializers.DateTimeField().to_representation(self.dt) 117 | else: 118 | dt = self.dt 119 | self.assertEqual(dt, r['datetime']) 120 | self.assertEqual(self.dt, 121 | self.MODEL.objects.get(pk=self.models[0].pk).datetime) 122 | 123 | def testDelete(self): 124 | """ Update with alter identity field looks good. 125 | """ 126 | r = self.rpc_client.delete(self.MODEL_SYMBOL, self.data, self.kwargs) 127 | self.assertIsNone(r) 128 | self.assertFalse(self.MODEL.objects.filter(pk=self.models[0].pk).exists()) 129 | 130 | 131 | class SetRefererTests(SimpleModelTestMixin, TestCase): 132 | """ Client set referer header when calling tasks 133 | """ 134 | 135 | @classmethod 136 | def setUpClass(cls): 137 | """ Creates rpc-client object 138 | """ 139 | super(SetRefererTests, cls).setUpClass() 140 | cls.rpc_client = Client() 141 | cls.task_name = utils.FILTER_TASK_NAME 142 | 143 | def testSetRefererHeader(self): 144 | """ Method `prepare_task` set referer header 145 | """ 146 | signature = self.rpc_client.prepare_task(self.task_name, None, None) 147 | self.assertEqual( 148 | signature.options["headers"], 149 | {"referer": "@".join([config.rpc_client_name, socket.gethostname()])}) 150 | 151 | 152 | class TaskExpireTests(TestCase): 153 | """ Tests expiry time for tasks 154 | """ 155 | @classmethod 156 | def setUpClass(cls): 157 | super(TaskExpireTests, cls).setUpClass() 158 | cls.rpc_client = Client() 159 | cls.test_expires = 123 160 | cls.method_names = ['call', 'create', 'delete', 'update', 'filter', 161 | 'getset', 'update_or_create'] 162 | 163 | def _assertExpires(self, method_name, expected_expires, **kwargs): 164 | method = getattr(self.rpc_client, method_name) 165 | args = ['fake_model_or_function_name', {}] 166 | mock_name = 'celery_rpc.tasks.{}.apply_async'.format(method_name) 167 | kwargs.update(nowait=False) 168 | with mock.patch(mock_name) as _apply_async: 169 | method(*args, **kwargs) 170 | 171 | expires = _apply_async.call_args[1].get('expires', None) 172 | self.assertIsNotNone(expires) 173 | self.assertEqual(expected_expires, expires) 174 | 175 | def testExpiresDefault(self): 176 | """ Client uses default value for task expiration if timeout is None 177 | """ 178 | method_name = random.choice(self.method_names) 179 | 180 | self._assertExpires(method_name, config.get_result_timeout) 181 | 182 | def testExpiresFromTimeout(self): 183 | """ Client uses timeout value for task expiration 184 | """ 185 | method_name = random.choice(self.method_names) 186 | 187 | self._assertExpires(method_name, self.test_expires, 188 | timeout=self.test_expires) 189 | -------------------------------------------------------------------------------- /celery_rpc/tests/test_codecs.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | from __future__ import absolute_import 3 | 4 | import jsonpickle 5 | 6 | from django.test import TestCase 7 | from django.db.models import Q 8 | from kombu import serialization 9 | 10 | 11 | class RpcJsonCodecTests(TestCase): 12 | 13 | def testContentType(self): 14 | """ Encode with correct content-type. 15 | """ 16 | serialized = serialization.dumps(None, 'x-rpc-json') 17 | self.assertEqual('application/json+celery-rpc:v1', serialized[0]) 18 | 19 | def testSupportQ(self): 20 | """ Encoder/Decoder support Django Q-object 21 | """ 22 | source = dict(q=Q(a=1) | Q(b=2) & Q(c=3)) 23 | content_type, encoding, result = serialization.dumps(source, 'x-rpc-json') 24 | restored = serialization.loads(result, content_type, encoding) 25 | 26 | expected = jsonpickle.encode(source) 27 | result = jsonpickle.encode(restored) 28 | self.assertEqual(expected, result) 29 | 30 | def testAcceptJsonCeleryRpc(self): 31 | """ Accepted content type are correct. 32 | """ 33 | from ..app import rpc 34 | accept = serialization.prepare_accept_content( 35 | rpc.conf.CELERY_ACCEPT_CONTENT) 36 | 37 | source = ('a', 1, None) 38 | for codec in ('json', 'x-json', 'x-rpc-json'): 39 | content_type, encoding, result = serialization.dumps(source, codec) 40 | restored = serialization.loads(result, content_type, encoding, 41 | accept=accept) 42 | self.assertEqual(list(source), restored) 43 | -------------------------------------------------------------------------------- /celery_rpc/tests/test_errors.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | # $Id: $ 4 | import json 5 | 6 | from celery.utils.serialization import UnpickleableExceptionWrapper 7 | from django.core.exceptions import PermissionDenied 8 | from django.test import TestCase 9 | import mock 10 | 11 | from celery_rpc import tasks 12 | from celery_rpc.client import Client 13 | from celery_rpc import exceptions 14 | from celery_rpc.exceptions import RestFrameworkError 15 | from celery_rpc.tests.utils import SimpleModelTestMixin 16 | 17 | 18 | class RemoteErrorsTestMixin(SimpleModelTestMixin): 19 | def setUp(self): 20 | super(RemoteErrorsTestMixin, self).setUp() 21 | self.serializer = tasks.rpc.conf['result_serializer'] 22 | self._wrap_errors = tasks.rpc.conf['wrap_remote_errors'] 23 | tasks.rpc.conf['wrap_remote_errors'] = True 24 | 25 | def tearDown(self): 26 | super(RemoteErrorsTestMixin, self).tearDown() 27 | tasks.rpc.conf["wrap_remote_errors"] = self._wrap_errors 28 | 29 | def testCallTask(self): 30 | self.assertErrorTunnelException( 31 | 'call', 32 | 'celery_rpc.base.FunctionTask.function', 33 | args=('celery_rpc.tests.utils.fail', (), {}), 34 | ) 35 | 36 | def testFilterTask(self): 37 | self.assertErrorTunnelException( 38 | 'filter', 39 | 'celery_rpc.base.ModelTask._import_model', 40 | args=(self.MODEL_SYMBOL,), 41 | ) 42 | 43 | def testUpdateTask(self): 44 | self.assertErrorTunnelException( 45 | 'update', 46 | 'celery_rpc.base.ModelChangeTask._import_model', 47 | args=(self.MODEL_SYMBOL, {'char': 'abc'}), 48 | ) 49 | 50 | def testGetSetTask(self): 51 | self.assertErrorTunnelException( 52 | 'getset', 53 | 'celery_rpc.base.ModelChangeTask._import_model', 54 | args=(self.MODEL_SYMBOL, {'char': 'abc'}), 55 | ) 56 | 57 | def testUpdateOrCreateTask(self): 58 | self.assertErrorTunnelException( 59 | 'update_or_create', 60 | 'celery_rpc.base.ModelChangeTask._import_model', 61 | args=(self.MODEL_SYMBOL, {'char': 'abc'}), 62 | ) 63 | 64 | def testCreateTask(self): 65 | self.assertErrorTunnelException( 66 | 'create', 67 | 'celery_rpc.base.ModelTask._import_model', 68 | args=(self.MODEL_SYMBOL, {'char': 'abc'}), 69 | ) 70 | 71 | def testDeleteTask(self): 72 | self.assertErrorTunnelException( 73 | 'delete', 74 | 'celery_rpc.base.ModelChangeTask._import_model', 75 | args=(self.MODEL_SYMBOL, {'char': 'abc'}), 76 | ) 77 | 78 | def assertErrorTunnelException(self, task, patch, args=(), kwargs=None): 79 | raise NotImplementedError() 80 | 81 | 82 | class ErrorTunnelServerTestCase(RemoteErrorsTestMixin, TestCase): 83 | def gettestee(self, name): 84 | return getattr(tasks, name) 85 | 86 | def assertErrorTunnelException(self, name, patch, args=(), kwargs=None): 87 | kwargs = kwargs or {} 88 | error = ValueError(100500) 89 | 90 | task = self.gettestee(name) 91 | 92 | with mock.patch(patch, side_effect=error): 93 | r = task.apply(args=args, kwargs=kwargs) 94 | remote_exception_stub = r.get(propagate=False) 95 | expected = exceptions.RemoteException(error, serializer=self.serializer) 96 | self.assertEqual(remote_exception_stub.__class__.__name__, 97 | exceptions.RemoteException.__name__) 98 | self.assertTupleEqual(remote_exception_stub.args, expected.args) 99 | 100 | def testPackUnpackException(self): 101 | exc = exceptions.RemoteException( 102 | ValueError(100500), 103 | serializer=self.serializer) 104 | 105 | inner = exc.unpack_exception(self.serializer) 106 | self.assertIsInstance(inner, 107 | exceptions.remote_exception_registry.ValueError) 108 | self.assertEqual(inner.args, (100500,)) 109 | 110 | def testTunnelDisabled(self): 111 | error = ValueError(100500) 112 | tasks.rpc.conf['wrap_remote_errors'] = False 113 | task = self.gettestee('call') 114 | patch = 'celery_rpc.base.FunctionTask.function' 115 | args = ('celery_rpc.tests.utils.fail', (), {}), 116 | with mock.patch(patch, side_effect=error): 117 | r = task.apply(*args) 118 | remote_exception_stub = r.get(propagate=False) 119 | self.assertIsInstance(remote_exception_stub, ValueError) 120 | 121 | 122 | class ErrorTunnelClientTestCase(RemoteErrorsTestMixin, TestCase): 123 | @classmethod 124 | def setUpClass(cls): 125 | """ Creates rpc-client object 126 | """ 127 | super(ErrorTunnelClientTestCase, cls).setUpClass() 128 | cls.rpc_client = Client() 129 | 130 | def setUp(self): 131 | super(ErrorTunnelClientTestCase, self).setUp() 132 | self._wrap_errors = self.rpc_client._app.conf['wrap_remote_errors'] 133 | self.rpc_client._app.conf['wrap_remote_errors'] = True 134 | self.serializer = self.rpc_client._app.conf['result_serializer'] 135 | 136 | def tearDown(self): 137 | super(ErrorTunnelClientTestCase, self).tearDown() 138 | self.rpc_client._app.conf['wrap_remote_errors'] = self._wrap_errors 139 | 140 | def gettestee(self, name): 141 | return getattr(self.rpc_client, name) 142 | 143 | def assertErrorTunnelException(self, name, patch, args=(), kwargs=None): 144 | kwargs = kwargs or {} 145 | error = RestFrameworkError(100500) 146 | 147 | method = self.gettestee(name) 148 | with mock.patch(patch, side_effect=error): 149 | with self.assertRaises(error.__class__) as r: 150 | method(*args, **kwargs) 151 | 152 | self.assertTupleEqual(r.exception.args, error.args) 153 | 154 | def testUnpackingFromTunnelDisabled(self): 155 | """ Error wrapping disabled on server, enabled on client.""" 156 | error = ValueError(100500) 157 | tasks.rpc.conf['wrap_remote_errors'] = False 158 | method = self.gettestee('call') 159 | patch = 'celery_rpc.base.FunctionTask.function' 160 | args = ('celery_rpc.tests.utils.fail', (), {}) 161 | with mock.patch(patch, side_effect=error): 162 | with mock.patch('celery_rpc.utils.unpack_exception', 163 | return_value=None) as unpack_mock: 164 | with self.assertRaises(self.rpc_client.ResponseError) as ctx: 165 | method(*args) 166 | response_error = ctx.exception 167 | self.assertIsInstance(response_error.args[1], ValueError) 168 | # checking that non-wrapped exception is passed to unpacking helper 169 | # and that unpack flag is True. 170 | unpack_mock.assert_called_with(error, True, serializer=self.serializer) 171 | 172 | def testNotUnpackingFromTunnelEnabled(self): 173 | """ Error wrapping disabled on client, enabled on server.""" 174 | error = ValueError(100500) 175 | serializer = tasks.rpc.conf['result_serializer'] 176 | wrapped = exceptions.RemoteException(error, serializer) 177 | method = self.gettestee('call') 178 | patch = 'celery_rpc.base.FunctionTask.function' 179 | args = ('celery_rpc.tests.utils.fail', (), {}) 180 | self.rpc_client._app.conf['wrap_remote_errors'] = False 181 | with mock.patch(patch, side_effect=error): 182 | with mock.patch('celery_rpc.utils.unpack_exception', 183 | return_value=None) as unpack_mock: 184 | with self.assertRaises(self.rpc_client.ResponseError) as ctx: 185 | method(*args) 186 | response = ctx.exception 187 | remote_error = response.args[1] 188 | # checking that wrapped exception is passed to unpacking helper 189 | # and that unpack flag is False. 190 | 191 | unpack_mock.assert_called_with(remote_error, False, 192 | serializer=self.serializer) 193 | remote_error_cls = remote_error.__class__ 194 | self.assertEqual(remote_error_cls.__name__, "RemoteException") 195 | self.assertEqual(remote_error_cls.__module__, "celery_rpc.exceptions") 196 | args = remote_error.args 197 | self.assertTupleEqual(args, wrapped.args) 198 | 199 | 200 | class ErrorRegistryTestCase(TestCase): 201 | @classmethod 202 | def setUpClass(cls): 203 | """ Creates rpc-client object 204 | """ 205 | super(ErrorRegistryTestCase, cls).setUpClass() 206 | cls.rpc_client = Client() 207 | 208 | def setUp(self): 209 | super(ErrorRegistryTestCase, self).setUp() 210 | self._wrap_errors = self.rpc_client._app.conf['wrap_remote_errors'] 211 | self.rpc_client._app.conf['wrap_remote_errors'] = True 212 | self.registry = exceptions.remote_exception_registry 213 | self.registry.flush() 214 | self.module = ValueError.__module__ 215 | self.name = "ValueError" 216 | self.args = (100500,) 217 | self.serializer = 'json' 218 | 219 | @property 220 | def data(self): 221 | return json.dumps([self.module, self.name, self.args]) 222 | 223 | def tearDown(self): 224 | super(ErrorRegistryTestCase, self).tearDown() 225 | self.rpc_client._app.conf['wrap_remote_errors'] = self._wrap_errors 226 | 227 | def testUnpackNativeException(self): 228 | exc = self.registry.unpack_exception(self.data, self.serializer) 229 | self.assertIsInstance(exc, ValueError) 230 | self.assertIsInstance(exc, self.registry.RemoteError) 231 | self.assertTupleEqual(exc.args, self.args) 232 | 233 | def testUnpackExistingException(self): 234 | self.module = "django.core.exceptions" 235 | self.name = "PermissionDenied" 236 | exc = self.registry.unpack_exception(self.data, self.serializer) 237 | self.assertIsInstance(exc, PermissionDenied) 238 | self.assertIsInstance(exc, self.registry.RemoteError) 239 | self.assertTupleEqual(exc.args, self.args) 240 | 241 | def testUnpackUnknownException(self): 242 | self.module = "nonexistent.module" 243 | self.name = "NonexistentError" 244 | exc = self.registry.unpack_exception(self.data, self.serializer) 245 | self.assertIsInstance(exc, self.registry.RemoteError) 246 | self.assertTupleEqual(exc.args, self.args) 247 | 248 | def assertRemoteErrorInClient(self, error=None): 249 | error = error or ValueError(100500) 250 | with mock.patch('celery_rpc.base.FunctionTask.function', 251 | side_effect=error): 252 | with self.assertRaises(self.registry.RemoteError) as ctx: 253 | self.rpc_client.call('celery_rpc.tests.utils.fail') 254 | self.assertIsInstance(ctx.exception, error.__class__) 255 | 256 | def testClientRemoteErrorBaseClasses(self): 257 | self.assertRemoteErrorInClient() # Native error 258 | self.assertRemoteErrorInClient(error=PermissionDenied("WTF")) 259 | 260 | def testExceptNativeRemoteError(self): 261 | with self.assertRaises(self.rpc_client.errors.ValueError): 262 | raise self.registry.unpack_exception(self.data, self.serializer) 263 | 264 | def testExceptExistingRemoteError(self): 265 | self.module = "django.core.exceptions" 266 | self.name = "PermissionDenied" 267 | with self.assertRaises(self.rpc_client.errors.PermissionDenied): 268 | raise self.registry.unpack_exception(self.data, self.serializer) 269 | 270 | def testExceptUnknownRemoteError(self): 271 | self.module = "nonexistent.module" 272 | self.name = "NonexistentError" 273 | with self.assertRaises(self.rpc_client.errors.NonexistentError): 274 | raise self.registry.unpack_exception(self.data, self.serializer) 275 | 276 | def testRemoteErrorHierarchy(self): 277 | parent = self.rpc_client.errors.IndexError 278 | error = self.rpc_client.errors.subclass(parent, "ValueError") 279 | exc = self.registry.unpack_exception(self.data, self.serializer) 280 | 281 | self.assertIsInstance(exc, self.rpc_client.errors.RemoteError) 282 | self.assertIsInstance(exc, self.rpc_client.errors.IndexError) 283 | self.assertIsInstance(exc, self.rpc_client.errors.ValueError) 284 | -------------------------------------------------------------------------------- /celery_rpc/tests/test_m2m.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | from __future__ import absolute_import 3 | 4 | from django import VERSION as django_version 5 | 6 | from celery_rpc.tests import factories 7 | from .. import tasks 8 | from .utils import get_model_dict 9 | from .test_tasks import BaseTaskTests 10 | 11 | 12 | class ManyToManyUpdateTests(BaseTaskTests): 13 | """ Test for add/remove m2m relations 14 | """ 15 | M2M_THROUGH_SYMBOL = 'celery_rpc.tests.models:ManyToManyModel.m2m.through' 16 | 17 | def setUp(self): 18 | super(ManyToManyUpdateTests, self).setUp() 19 | self.m2m_model = factories.ManyToManyModelFactory() 20 | 21 | def testAdd(self): 22 | """ Add m2m relations working fine 23 | """ 24 | if django_version[0] > 1: 25 | self.m2m_model.m2m.set([self.models[0]]) 26 | else: 27 | self.m2m_model.m2m = [self.models[0]] 28 | 29 | # # pre-conditions 30 | self.assertEquals(1, self.m2m_model.m2m.count()) 31 | 32 | data = {'manytomanymodel': self.m2m_model.pk, 33 | 'simplemodel': self.models[1].pk} 34 | r = tasks.create.delay(self.M2M_THROUGH_SYMBOL, data) 35 | 36 | self.assertTrue(r.successful()) 37 | self.assertEquals(2, self.m2m_model.m2m.count()) 38 | 39 | expected = self.m2m_model.m2m.get(id=self.models[1].pk) 40 | expected = get_model_dict(expected) 41 | self.assertEquals(expected, get_model_dict(self.models[1])) 42 | 43 | def testDelete(self): 44 | """ Remove m2m relations working fine 45 | """ 46 | if django_version[0] > 1: 47 | self.m2m_model.m2m.set(self.models[:2]) 48 | else: 49 | self.m2m_model.m2m = self.models[:2] 50 | 51 | # pre-conditions 52 | self.assertEquals(2, self.m2m_model.m2m.count()) 53 | 54 | through = self.m2m_model.m2m.through.objects 55 | 56 | delete = through.get(simplemodel=self.models[0]) 57 | # FIXME actually is not a good idea drop relations by PK 58 | data = {'pk': delete.pk} 59 | r = tasks.delete.delay(self.M2M_THROUGH_SYMBOL, data) 60 | 61 | self.assertTrue(r.successful()) 62 | self.assertEquals(1, self.m2m_model.m2m.count()) 63 | 64 | expected = get_model_dict(self.models[1]) 65 | value = get_model_dict(self.m2m_model.m2m.all()[0]) 66 | self.assertEquals(expected, value) -------------------------------------------------------------------------------- /celery_rpc/tests/test_pipeline.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | from __future__ import absolute_import 3 | import six 4 | from unittest import expectedFailure 5 | 6 | from django.test import TransactionTestCase 7 | 8 | from celery_rpc.exceptions import remote_exception_registry 9 | from celery_rpc.tests import factories 10 | from ..client import Pipe, Client 11 | from .utils import SimpleModelTestMixin, unpack_exception 12 | from .models import SimpleModel, FkSimpleModel 13 | 14 | 15 | class BasePipelineTests(SimpleModelTestMixin, TransactionTestCase): 16 | """ Abstract base class for pipe tests. 17 | """ 18 | 19 | def setUp(self): 20 | super(BasePipelineTests, self).setUp() 21 | self.client = Client() 22 | 23 | @property 24 | def pipe(self): 25 | return self.client.pipe() 26 | 27 | 28 | class PipelineTests(BasePipelineTests): 29 | """ Pipeline related tests. 30 | """ 31 | 32 | def testClientCanCreatePipe(self): 33 | """ Client able to start pipeline 34 | """ 35 | p = self.client.pipe() 36 | self.assertIsInstance(p, Pipe) 37 | 38 | def testPipeCanSendRequest(self): 39 | """ Pipe can send complex request to RPC server. 40 | """ 41 | r = self.client.pipe().run() 42 | self.assertEqual([], r) 43 | 44 | def testSeveralFilters(self): 45 | """ Several filters in the chain work well. 46 | """ 47 | p = self.pipe.filter(self.MODEL_SYMBOL, 48 | kwargs=dict(filters={'pk': self.models[0].pk})) 49 | p = p.filter(self.MODEL_SYMBOL, 50 | kwargs=dict(filters={'pk': self.models[1].pk})) 51 | 52 | r = p.run() 53 | 54 | expected = [[self.get_model_dict(self.models[0])], 55 | [self.get_model_dict(self.models[1])]] 56 | self.assertEqual(expected, r) 57 | 58 | def testUpdate(self): 59 | """ Update works well in pipeline. 60 | """ 61 | p = self.pipe.update(self.MODEL_SYMBOL, 62 | {'pk': self.models[0].pk, 'char': 'hello'}) 63 | r = p.run() 64 | 65 | m = SimpleModel.objects.get(pk=self.models[0].pk) 66 | self.assertEqual('hello', m.char) 67 | expected = [self.get_model_dict(m)] 68 | self.assertEqual(expected, r) 69 | 70 | def testAtomicPipeline(self): 71 | """ Pipeline is atomic by default. 72 | """ 73 | p = self.pipe 74 | p = p.delete(self.MODEL_SYMBOL, self.get_model_dict(self.models[0])) 75 | p = p.delete('invalid model symbol raise exception', {}) 76 | with self.assertRaisesRegexp(Exception, "No module named"): 77 | with unpack_exception(): 78 | p.run() 79 | self.assertTrue(SimpleModel.objects.filter( 80 | pk=self.models[0].pk).exists()) 81 | 82 | def testAtomicPipelineRemoteError(self): 83 | """ Perform testAtomicPipeline with remote errors handling 84 | in another mode.""" 85 | old = self.client._app.conf['wrap_remote_errors'] 86 | self.client._app.conf['wrap_remote_errors'] = not old 87 | return self.testAtomicPipeline() 88 | 89 | def testWrapRemoteErrors(self): 90 | """ Errors wrap correctly 91 | """ 92 | self.client._app.conf['wrap_remote_errors'] = True 93 | 94 | p = self.pipe 95 | p = p.delete(self.MODEL_SYMBOL, self.get_model_dict(self.models[0])) 96 | p = p.delete('invalid model symbol raise exception', {}) 97 | with self.assertRaisesRegexp(remote_exception_registry.RemoteError, 98 | "No module named") as ctx: 99 | p.run(propagate=False) 100 | self.assertIsInstance(ctx.exception, ImportError) 101 | 102 | @expectedFailure 103 | def testPatchTransformer(self): 104 | """ TODO `patch` updates result of previous task. 105 | """ 106 | p = self.pipe.filter(self.MODEL_SYMBOL, 107 | kwargs=dict(filters={'pk': self.models[0].pk})) 108 | r = p.patch({'char': 'abc'}) 109 | 110 | expected = [[self.get_model_dict(self.models[0])], 111 | [self.get_model_dict(self.models[0])]] 112 | expected[1].update(char='abc') 113 | 114 | self.assertEqual(expected, r) 115 | 116 | 117 | class TransformTests(BasePipelineTests): 118 | """ Tests on different transformation. 119 | """ 120 | FK_MODEL_SYMBOL = 'celery_rpc.tests.models:FkSimpleModel' 121 | TRANSFORM_MAP = {'fk': 'id'} 122 | 123 | def setUp(self): 124 | super(TransformTests, self).setUp() 125 | 126 | self.model = factories.SimpleModelFactory() 127 | self.fk_model = factories.FkSimpleModelFactory(fk=self.model) 128 | 129 | def testDeleteTransformer(self): 130 | """ Delete transformation works well. 131 | """ 132 | p = self.pipe.filter(self.MODEL_SYMBOL, 133 | kwargs=dict(filters={'pk': self.models[0].pk})) 134 | p = p.delete(self.MODEL_SYMBOL) 135 | r = p.run() 136 | 137 | expected = [[self.get_model_dict(self.models[0])], []] 138 | self.assertEqual(expected, r) 139 | self.assertRaises(SimpleModel.DoesNotExist, 140 | SimpleModel.objects.get, pk=self.models[0].pk) 141 | 142 | def testDeleteTransformerRemoteError(self): 143 | """ Perform testDeleteTransformer with remote errors handling 144 | in another mode.""" 145 | old = self.client._app.conf['wrap_remote_errors'] 146 | self.client._app.conf['wrap_remote_errors'] = not old 147 | return self.testDeleteTransformer() 148 | 149 | def testCreateTransformer(self): 150 | p = self.pipe.filter(self.MODEL_SYMBOL, 151 | kwargs=dict(filters={'pk': self.models[0].pk})) 152 | p = p.translate(self.TRANSFORM_MAP) 153 | p = p.create(self.FK_MODEL_SYMBOL) 154 | r = p.run() 155 | 156 | self.assertTrue(FkSimpleModel.objects.get(**r[2][0])) 157 | 158 | def testCreateTransformerDefaults(self): 159 | p = self.pipe.create(self.MODEL_SYMBOL, data={"char": "parent"}) 160 | p = p.translate(self.TRANSFORM_MAP, 161 | kwargs=dict(defaults={"char": "child"})) 162 | p = p.create(self.FK_MODEL_SYMBOL) 163 | r = p.run() 164 | 165 | model = FkSimpleModel.objects.get(**r[2]) 166 | self.assertEqual(model.fk_id, r[0]["id"]) 167 | self.assertEqual(model.char, "child") 168 | 169 | def testUpdateOrCreateCreateTransformer(self): 170 | """ Test creating with update_or_create 171 | """ 172 | p = self.pipe.filter(self.MODEL_SYMBOL, 173 | kwargs=dict(filters={'pk': self.models[0].pk})) 174 | p = p.translate(self.TRANSFORM_MAP) 175 | p = p.update_or_create(self.FK_MODEL_SYMBOL) 176 | r = p.run() 177 | 178 | self.assertTrue(FkSimpleModel.objects.get(**r[2][0])) 179 | 180 | def testUpdateOrCreateUpdateTransformer(self): 181 | self.assertNotEqual(self.fk_model.id, self.models[1].pk) 182 | 183 | p = self.pipe.filter(self.MODEL_SYMBOL, 184 | kwargs=dict(filters={'pk': self.models[1].pk})) 185 | p = p.translate(self.TRANSFORM_MAP, 186 | kwargs=dict(defaults={'id': self.fk_model.id})) 187 | p = p.update_or_create(self.FK_MODEL_SYMBOL) 188 | r = p.run() 189 | 190 | expect_obj = FkSimpleModel.objects.get(**r[2][0]) 191 | self.assertEquals(expect_obj.fk.id, self.models[1].pk) 192 | 193 | def testUpdateTransformer(self): 194 | p = self.pipe.filter(self.MODEL_SYMBOL, 195 | kwargs=dict(filters={'pk': self.models[0].pk})) 196 | p = p.translate(self.TRANSFORM_MAP, 197 | kwargs=dict(defaults={'id': self.fk_model.id})) 198 | 199 | p = p.update(self.FK_MODEL_SYMBOL) 200 | r = p.run() 201 | 202 | self.assertEqual(r[2][0]['fk'], self.models[0].pk) 203 | 204 | def testGetSetTransformer(self): 205 | p = self.pipe.filter(self.MODEL_SYMBOL, 206 | kwargs=dict(filters={'pk': self.models[3].pk})) 207 | p = p.translate(self.TRANSFORM_MAP, 208 | kwargs=dict(defaults={'id': self.fk_model.id})) 209 | 210 | p = p.getset(self.FK_MODEL_SYMBOL) 211 | r = p.run() 212 | 213 | expect_obj = FkSimpleModel.objects.get(fk=self.models[3].pk) 214 | self.assertEquals(expect_obj.fk.id, self.models[3].pk) 215 | # return previous state 216 | self.assertNotEqual(r[2][0]['fk'], self.models[3].pk) 217 | 218 | 219 | class ResultTests(TransformTests): 220 | 221 | def testResult(self): 222 | DEFAULTS_COUNT = 10 223 | defaults = [dict(char=i) for i in six.moves.range(DEFAULTS_COUNT)] 224 | p = self.pipe.create(self.MODEL_SYMBOL, data={'char': 123}) 225 | 226 | for el in defaults: 227 | p = p.result(0) 228 | p = p.translate(self.TRANSFORM_MAP, 229 | kwargs=dict(defaults=el)) 230 | p = p.create(self.FK_MODEL_SYMBOL) 231 | 232 | r = p.run() 233 | 234 | expect_fk_id = r[0]['id'] 235 | expect = FkSimpleModel.objects.filter( 236 | char__in=six.moves.range(DEFAULTS_COUNT), 237 | fk=expect_fk_id) 238 | self.assertEquals(expect.count(), DEFAULTS_COUNT) 239 | -------------------------------------------------------------------------------- /celery_rpc/tests/test_tasks.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | from random import randint 3 | from uuid import uuid4 4 | 5 | from django.core.exceptions import ObjectDoesNotExist 6 | 7 | from celery_rpc.tests import factories 8 | from celery_rpc.tests.utils import (get_model_dict, SimpleModelTestMixin, 9 | get_model_dict_from_list, unpack_exception) 10 | from django.test import TestCase 11 | from django.db.models import Q 12 | from rest_framework import serializers 13 | from .. import tasks 14 | from ..exceptions import ModelTaskError, remote_exception_registry 15 | from ..tests.tasks import CustomModelTask 16 | from .models import SimpleModel, NonAutoPrimaryKeyModel, PartialUpdateModel 17 | 18 | 19 | class BaseTaskTests(SimpleModelTestMixin, TestCase): 20 | pass 21 | 22 | 23 | class FilterTaskTests(BaseTaskTests): 24 | """ Tests for selecting models located on RPC server. 25 | """ 26 | 27 | def testLimit(self): 28 | r = tasks.filter.delay(self.MODEL_SYMBOL) 29 | self.assertEquals(5, len(r.get())) 30 | 31 | r = tasks.filter.delay(self.MODEL_SYMBOL, limit=2) 32 | self.assertEquals(2, len(r.get())) 33 | 34 | def testOffset(self): 35 | r = tasks.filter.delay(self.MODEL_SYMBOL, offset=1) 36 | expected = get_model_dict(self.models[1]) 37 | self.assertEquals(expected, r.get()[0]) 38 | 39 | def testFilters(self): 40 | expected = get_model_dict(self.models[0]) 41 | r = tasks.filter.delay(self.MODEL_SYMBOL, 42 | filters={'pk': expected['id']}) 43 | self.assertEquals(expected, r.get()[0]) 44 | 45 | def testFiltersWithQ(self): 46 | expected = get_model_dict(self.models[0]) 47 | r = tasks.filter.delay(self.MODEL_SYMBOL, 48 | filters_Q=Q(pk=expected['id'])) 49 | self.assertEquals(expected, r.get()[0]) 50 | 51 | def testFiltersWithLookupsAndQ(self): 52 | filter_ids = [m.id for m in self.models[3:]] 53 | filter_Q = Q(pk__lte=self.models[3].pk) 54 | r = tasks.filter.delay(self.MODEL_SYMBOL, 55 | filters={'pk__in': filter_ids}, 56 | filters_Q=filter_Q) 57 | expected = get_model_dict(self.models[3]) 58 | self.assertEquals(len(r.get()), 1) 59 | self.assertEquals(expected, r.get()[0]) 60 | 61 | def testExclude(self): 62 | """ Exclude seems good. 63 | """ 64 | exclude_ids = [m.pk for m in self.models[1:]] 65 | r = tasks.filter.delay(self.MODEL_SYMBOL, 66 | exclude={'pk__in': exclude_ids}) 67 | expected = get_model_dict(self.models[0]) 68 | self.assertEquals(expected, r.get()[0]) 69 | 70 | def testExcludeWithQ(self): 71 | """ Exclude with Q-object works nice. 72 | """ 73 | r = tasks.filter.delay(self.MODEL_SYMBOL, 74 | exclude_q=Q(pk__gte=self.models[1].pk)) 75 | expected = get_model_dict(self.models[0]) 76 | self.assertEquals(expected, r.get()[0]) 77 | 78 | def testExcludeWithLookupsAndQ(self): 79 | """ Exclude all except first and last by mix of `exclude` and 80 | `exclude_Q` seems able. 81 | """ 82 | exclude_char = [m.char for m in self.models[1:]] 83 | exclude_Q = Q(pk__lte=self.models[3].pk) 84 | r = tasks.filter.delay(self.MODEL_SYMBOL, 85 | exclude={'char__in': exclude_char}, 86 | exclude_Q=exclude_Q) 87 | 88 | result = r.get() 89 | self.assertEquals(len(result), 2) 90 | for i in 4, 0: 91 | expected = get_model_dict(self.models[i]) 92 | r = result.pop() 93 | self.assertEquals(expected, r) 94 | 95 | def testSerializerFields(self): 96 | expected = get_model_dict(self.models[0]) 97 | field = list(expected.keys())[0] 98 | r = tasks.filter.delay(self.MODEL_SYMBOL, 99 | filters={'pk': expected['id']}, 100 | fields=[field]) 101 | self.assertEquals({field: expected[field]}, r.get()[0]) 102 | 103 | def testOrdering(self): 104 | self.models[0].char = 'a' 105 | self.models[0].save() 106 | 107 | self.models[1].char = 'b' 108 | self.models[1].save() 109 | 110 | r = tasks.filter.delay(self.MODEL_SYMBOL, 111 | filters={'char__in': ['a', 'b']}, 112 | order_by=['char']) 113 | self.assertEquals(['a', 'b'], [item['char'] for item in r.get()]) 114 | 115 | def testReverseOrdering(self): 116 | self.models[0].char = 'a' 117 | self.models[0].save() 118 | 119 | self.models[1].char = 'b' 120 | self.models[1].save() 121 | 122 | r = tasks.filter.delay(self.MODEL_SYMBOL, 123 | filters={'char__in': ['a', 'b']}, 124 | order_by='-char') 125 | self.assertEquals(['b', 'a'], [item['char'] for item in r.get()]) 126 | 127 | 128 | class SimpleTaskSerializer(serializers.ModelSerializer): 129 | """ Test serializer 130 | """ 131 | class Meta: 132 | model = SimpleModel 133 | fields = ('id', ) 134 | 135 | 136 | class SingleObjectsDoesNotExistMixin(object): 137 | """ Checks behavior of tasks, which modify existing objects. 138 | """ 139 | 140 | def checkSingleObjectDoesNotExist(self, expected_exc=ObjectDoesNotExist): 141 | with self.assertRaisesRegexp(expected_exc, 142 | r'matching query does not exist.'): 143 | with unpack_exception(): 144 | self.task.delay(self.MODEL_SYMBOL, 145 | {'char': str(uuid4()), 146 | 'id': randint(100, 1000)}).get() 147 | 148 | def testSingleObjectDoesNotExist(self): 149 | """ Raise exception if cannot find object in single mode """ 150 | tasks.rpc.conf['wrap_remote_errors'] = False 151 | return self.checkSingleObjectDoesNotExist() 152 | 153 | def testSingleObjectDoesNotExistRemoteError(self): 154 | """ Perform testSingleObjectDoesNotExist with remote errors handling 155 | enabled.""" 156 | tasks.rpc.conf['wrap_remote_errors'] = True 157 | return self.checkSingleObjectDoesNotExist(remote_exception_registry.RemoteError) 158 | 159 | 160 | class UpdateTaskTests(SingleObjectsDoesNotExistMixin, BaseTaskTests): 161 | 162 | task = tasks.update 163 | 164 | def testUpdateOne(self): 165 | expected = get_model_dict(self.models[0]) 166 | expected.update(char=str(uuid4())) 167 | r = self.task.delay(self.MODEL_SYMBOL, expected) 168 | self.assertEquals(expected, r.get()) 169 | 170 | updated = get_model_dict(SimpleModel.objects.get(pk=expected['id'])) 171 | self.assertEquals(expected, updated) 172 | 173 | def testUpdateMulti(self): 174 | expected = [get_model_dict(e) for e in self.models[0:2]] 175 | for e in expected: 176 | e.update(char=str(uuid4())) 177 | r = self.task.delay(self.MODEL_SYMBOL, expected) 178 | result = r.get() 179 | self.assertEquals(2, len(result)) 180 | self.assertEquals(expected, result) 181 | 182 | updated = [get_model_dict(o) for o in SimpleModel.objects.all()[0:2]] 183 | self.assertEquals(expected, updated) 184 | 185 | def testUpdatePartial(self): 186 | char_val = str(uuid4()) 187 | expected = get_model_dict(self.models[0]) 188 | expected.update(char=char_val) 189 | r = self.task.delay(self.MODEL_SYMBOL, 190 | {'char': char_val, 'id': expected['id']}) 191 | self.assertDictEqual(expected, r.get()) 192 | 193 | updated = get_model_dict(SimpleModel.objects.get(pk=expected['id'])) 194 | self.assertEquals(expected, updated) 195 | 196 | def testSerializer(self): 197 | """ Test serializer_cls """ 198 | char_val = str(uuid4()) 199 | expected = get_model_dict(self.models[0]) 200 | expected.update(char=char_val) 201 | 202 | serializer_cls = "{}:{}".format(SimpleTaskSerializer.__module__, 203 | SimpleTaskSerializer.__name__) 204 | r = self.task.delay(self.MODEL_SYMBOL, 205 | {'char': char_val, 'id': expected['id']}, 206 | serializer_cls=serializer_cls) 207 | self.assertDictEqual({'id': expected['id']}, r.get()) 208 | 209 | def testNoExistSerializer(self): 210 | """ Test not existing serializer """ 211 | char_val = str(uuid4()) 212 | expected = get_model_dict(self.models[0]) 213 | 214 | with self.assertRaises(ImportError): 215 | with unpack_exception(): 216 | self.task.delay(self.MODEL_SYMBOL, 217 | {'char': char_val, 'id': expected['id']}, 218 | serializer_cls='not.existing.symbol').get() 219 | 220 | def testNoExistSerializerRemoteError(self): 221 | """ Perform testNoExistSerializer with remote errors handling 222 | in another mode.""" 223 | old = tasks.rpc.conf['wrap_remote_errors'] 224 | tasks.rpc.conf['wrap_remote_errors'] = not old 225 | return self.testNoExistSerializer() 226 | 227 | def testNoValidSerializer(self): 228 | """ Test not valid serializer """ 229 | char_val = str(uuid4()) 230 | expected = get_model_dict(self.models[0]) 231 | 232 | with self.assertRaisesRegexp(TypeError, r'not a DRF serializer'): 233 | serializer_cls = 'celery_rpc.tests.models:SimpleModel' 234 | with unpack_exception(): 235 | self.task.delay(self.MODEL_SYMBOL, 236 | {'char': char_val, 'id': expected['id']}, 237 | serializer_cls=serializer_cls).get() 238 | 239 | def testNoValidSerializerRemoteError(self): 240 | """ Perform testNoValidSerializer with remote errors handling 241 | in another mode.""" 242 | old = tasks.rpc.conf['wrap_remote_errors'] 243 | tasks.rpc.conf['wrap_remote_errors'] = not old 244 | return self.testNoValidSerializer() 245 | 246 | 247 | class GetSetTaskTests(SingleObjectsDoesNotExistMixin, BaseTaskTests): 248 | 249 | task = tasks.getset 250 | 251 | def testGetSetOne(self): 252 | new = get_model_dict(self.models[0]) 253 | new.update(char=str(uuid4())) 254 | r = self.task.delay(self.MODEL_SYMBOL, new) 255 | old = get_model_dict(self.models[0]) 256 | self.assertEquals(old, r.get()) 257 | 258 | updated = get_model_dict(SimpleModel.objects.get(pk=old['id'])) 259 | self.assertEquals(new, updated) 260 | 261 | def testGetSetMulti(self): 262 | new = [get_model_dict(e) for e in self.models[0:2]] 263 | for e in new: 264 | e.update(char=str(uuid4())) 265 | r = self.task.delay(self.MODEL_SYMBOL, new) 266 | result = r.get() 267 | self.assertEquals(2, len(result)) 268 | old = [get_model_dict(e) for e in self.models[0:2]] 269 | self.assertEquals(old, result) 270 | 271 | updated = [get_model_dict(o) for o in SimpleModel.objects.all()[0:2]] 272 | self.assertEquals(new, updated) 273 | 274 | def testPartialUpdate(self): 275 | """ Check that getset allow update model partially 276 | """ 277 | m = factories.PartialUpdateModelFactory() 278 | preserve_f2 = m.f2 279 | expected = randint(1, 1000) 280 | r = self.task.delay('celery_rpc.tests.models:PartialUpdateModel', 281 | {'f1': expected, 'pk': m.pk}) 282 | r.get() 283 | m = PartialUpdateModel.objects.get(pk=m.pk) 284 | self.assertEquals(expected, m.f1) 285 | self.assertEquals(preserve_f2, m.f2) 286 | 287 | 288 | class CreateTaskTests(BaseTaskTests): 289 | 290 | task = tasks.create 291 | 292 | def testCreateOne(self): 293 | expected = str(uuid4()) 294 | self.assertEquals(0, SimpleModel.objects.filter(char=expected).count()) 295 | 296 | r = self.task.delay(self.MODEL_SYMBOL, {'char': expected}) 297 | 298 | self.assertEquals(expected, r.get()['char']) 299 | self.assertEquals(1, SimpleModel.objects.filter(char=expected).count()) 300 | 301 | def testCreateMulti(self): 302 | uuids = str(uuid4()), str(uuid4()) 303 | expected = [{'char': v} for v in uuids] 304 | self.assertEquals(0, SimpleModel.objects.filter(char__in=uuids).count()) 305 | 306 | r = self.task.delay(self.MODEL_SYMBOL, expected) 307 | 308 | self.assertEquals(expected, [{'char': i['char']} for i in r.get()]) 309 | self.assertEquals(2, SimpleModel.objects.filter(char__in=uuids).count()) 310 | 311 | def checkSingleObjectDoesNotExist(self, *args): 312 | """ Creates new object if provided ID does not exist """ 313 | expected = str(uuid4()) 314 | self.assertEquals(0, SimpleModel.objects.filter(char=expected).count()) 315 | 316 | unexpected_id = randint(100, 1000) 317 | r = self.task.delay(self.MODEL_SYMBOL, {'char': expected, 318 | 'id': unexpected_id}) 319 | 320 | self.assertEquals(expected, r.get()['char']) 321 | self.assertNotEquals(unexpected_id, r.get()['id']) 322 | self.assertEquals(0, SimpleModel.objects.filter( 323 | char=unexpected_id).count()) 324 | self.assertEquals(1, SimpleModel.objects.filter( 325 | char=expected).count()) 326 | 327 | def testSingleObjectAlreadyExist(self): 328 | """ Raise exception if object already exists """ 329 | pk = randint(1, 1000) 330 | obj = NonAutoPrimaryKeyModel.objects.create(pk=pk) 331 | with self.assertRaisesRegexp( 332 | ModelTaskError, 333 | r'primary key|PRIMARY KEY|This field must be unique' 334 | r'|with this id already exists') as ctx: 335 | with unpack_exception(): 336 | r = self.task.delay( 337 | 'celery_rpc.tests.models:NonAutoPrimaryKeyModel', 338 | {'id': obj.pk}) 339 | r.get() 340 | self.assertNotEquals(self.models[0].id, ctx.exception.args[1]['id']) 341 | 342 | def testSingleObjectAlreadyExistRemoteError(self): 343 | """ Perform testSingleObjectAlreadyExist with remote errors handling 344 | in another mode.""" 345 | old = tasks.rpc.conf['wrap_remote_errors'] 346 | tasks.rpc.conf['wrap_remote_errors'] = not old 347 | return self.testSingleObjectAlreadyExist() 348 | 349 | 350 | class UpdateOrCreateTaskTests(CreateTaskTests, UpdateTaskTests): 351 | 352 | task = tasks.update_or_create 353 | 354 | def testSingleObjectAlreadyExist(self): 355 | super(UpdateOrCreateTaskTests, self).testUpdateOne() 356 | 357 | 358 | class DeleteTaskTests(SingleObjectsDoesNotExistMixin, BaseTaskTests): 359 | 360 | task = tasks.delete 361 | 362 | def testDeleteOne(self): 363 | expected = get_model_dict(self.models[0]) 364 | 365 | r = self.task.delay(self.MODEL_SYMBOL, expected) 366 | 367 | self.assertEquals(None, r.get()) 368 | self.assertEquals(0, SimpleModel.objects.filter( 369 | id=expected['id']).count()) 370 | 371 | def testDeleteMany(self): 372 | expected = (get_model_dict(self.models[0]), 373 | get_model_dict(self.models[1])) 374 | 375 | r = self.task.delay(self.MODEL_SYMBOL, expected) 376 | 377 | self.assertEquals([], r.get()) 378 | ids = [v['id'] for v in expected] 379 | self.assertEquals(0, SimpleModel.objects.filter(id__in=ids).count()) 380 | 381 | 382 | def plus(a, b): 383 | return a + b 384 | 385 | 386 | class CallTaskTests(TestCase): 387 | 388 | def testCallPlus(self): 389 | a = 2 390 | b = 3 391 | expected = a + b 392 | r = tasks.call.delay('celery_rpc.tests.test_tasks:plus', [a, b], 393 | None) 394 | self.assertEquals(expected, r.get()) 395 | 396 | 397 | class OverrideTaskTests(TestCase): 398 | """ Check if base task class overriding is worked. 399 | """ 400 | def testOverrideModelTask(self): 401 | self.assertIsInstance(tasks.filter, CustomModelTask) 402 | 403 | 404 | class TranslateTaskTests(BaseTaskTests): 405 | 406 | task = tasks.translate 407 | transform_map = {'title': 'char'} 408 | 409 | def _transform_keys(self, transform_map, data): 410 | result = {} 411 | for new_key, old_key in transform_map.items(): 412 | if old_key in data.keys(): 413 | result[new_key] = data[old_key] 414 | 415 | return result 416 | 417 | def testTransformDict(self): 418 | before = get_model_dict(self.models[0]) 419 | after = self._transform_keys(self.transform_map, before) 420 | 421 | r = self.task.delay(self.transform_map, before) 422 | self.assertEquals(after, r.get()) 423 | 424 | def testTransformList(self): 425 | before = get_model_dict_from_list(self.models) 426 | after = before[:] 427 | for index, el in enumerate(after): 428 | after[index] = self._transform_keys(self.transform_map, el) 429 | 430 | r = self.task.delay(self.transform_map, before) 431 | self.assertEquals(after, r.get()) 432 | 433 | def testTransformWithDefaults(self): 434 | defaults = dict(bart='simpson') 435 | before = get_model_dict(self.models[0]) 436 | after = self._transform_keys(self.transform_map, before) 437 | after.update(defaults) 438 | 439 | r = self.task.delay(self.transform_map, before, defaults=defaults) 440 | self.assertEquals(after, r.get()) 441 | -------------------------------------------------------------------------------- /celery_rpc/tests/tests.py: -------------------------------------------------------------------------------- 1 | """ 2 | Force import of all modules in this package in order to get the standard test 3 | runner to pick up the tests. Yowzers. 4 | """ 5 | 6 | import os 7 | import django 8 | 9 | modules = [filename.rsplit('.', 1)[0] 10 | for filename in os.listdir(os.path.dirname(__file__)) 11 | if filename.endswith('.py') and not filename.startswith('_')] 12 | __test__ = dict() 13 | 14 | if django.VERSION < (1, 6): 15 | for module in modules: 16 | exec("from celery_rpc.tests.%s import *" % module) -------------------------------------------------------------------------------- /celery_rpc/tests/utils.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | from django.core.exceptions import ValidationError 3 | from rest_framework import serializers 4 | 5 | from celery_rpc.tests import factories 6 | from celery_rpc.tests.models import SimpleModel 7 | from celery_rpc import utils 8 | from celery_rpc.base import DRF3, DRF34 9 | 10 | 11 | def get_model_dict(model): 12 | result = model.__dict__.copy() 13 | del result['_state'] 14 | if not DRF3: 15 | return result 16 | model_class = model._meta.model 17 | 18 | class Serializer(serializers.ModelSerializer): 19 | class Meta: 20 | model = model_class 21 | if DRF34: 22 | # implicit fields: DRF 3.4 - deprecated , DRF 3.5 - removed 23 | fields = '__all__' 24 | 25 | s = Serializer(instance=model) 26 | result = s.data 27 | return result 28 | 29 | 30 | def get_model_dict_from_list(models): 31 | result = [] 32 | for model in models: 33 | result.append(get_model_dict(model)) 34 | return result 35 | 36 | 37 | class SimpleModelTestMixin(object): 38 | """ Helper for tests with model needs. 39 | """ 40 | MODEL = SimpleModel 41 | MODEL_FACTORY = factories.SimpleModelFactory 42 | MODEL_SYMBOL = 'celery_rpc.tests.models:SimpleModel' 43 | 44 | def setUp(self): 45 | super(SimpleModelTestMixin, self).setUp() 46 | self.models = self.MODEL_FACTORY.create_batch(5) 47 | 48 | get_model_dict = staticmethod(get_model_dict) 49 | 50 | 51 | class RemoteException(Exception): 52 | pass 53 | 54 | 55 | def fail(*args): 56 | raise ValidationError({"field": "gavno"}) 57 | 58 | 59 | class unpack_exception(object): 60 | def __enter__(self): 61 | pass 62 | 63 | def __exit__(self, exc_type, exc_val, exc_tb): 64 | if not exc_val: 65 | return 66 | 67 | if hasattr(exc_val, 'restore'): 68 | exc_val = exc_val.restore() 69 | inner = utils.unpack_exception(exc_val, True) 70 | exc_val = inner or exc_val 71 | raise exc_val 72 | -------------------------------------------------------------------------------- /celery_rpc/utils.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | import six 4 | from celery import Celery 5 | from kombu import Queue, utils 6 | from six.moves import reduce 7 | 8 | 9 | def create_celery_app(config=None, **opts): 10 | opts.setdefault('main', 'celery-rpc') 11 | app = Celery(**opts) 12 | app.config_from_object('celery_rpc.config') 13 | if config: 14 | app.conf.update(config) 15 | 16 | # Setup queues in accordance with config and overrides 17 | q = app.conf['task_default_queue'] 18 | rk = app.conf['task_default_routing_key'] or q 19 | high_q = q + '.high_priority' 20 | high_rk = rk + '.high_priority' 21 | 22 | app.conf.update( 23 | task_high_priority_queue=high_q, 24 | task_high_priority_routing_key=high_rk, 25 | task_queues=(Queue(q, routing_key=rk), 26 | Queue(high_q, routing_key=high_rk))) 27 | 28 | return app 29 | 30 | 31 | def symbol_by_name(name): 32 | """ Get symbol by qualified name. 33 | """ 34 | try: 35 | return utils.symbol_by_name(name) 36 | except: 37 | pass 38 | 39 | if ':' in name: 40 | name = name.replace(':', '.') 41 | attrs = name.split('.') 42 | base_module = utils.symbol_by_name(attrs.pop(0)) 43 | return reduce(getattr, attrs, base_module) 44 | 45 | 46 | FILTER_TASK_NAME = 'celery_rpc.filter' 47 | UPDATE_TASK_NAME = 'celery_rpc.update' 48 | GETSET_TASK_NAME = 'celery_rpc.getset' 49 | UPDATE_OR_CREATE_TASK_NAME = 'celery_rpc.update_or_create' 50 | CREATE_TASK_NAME = 'celery_rpc.create' 51 | DELETE_TASK_NAME = 'celery_rpc.delete' 52 | CALL_TASK_NAME = 'celery_rpc.call' 53 | PIPE_TASK_NAME = 'celery_rpc.pipe' 54 | TRANSLATE_TASK_NAME = 'celery_rpc.translate' 55 | RESULT_TASK_NAME = 'celery_rpc.result' 56 | 57 | TASK_NAME_MAP = {n: v for n, v in locals().items() if n.endswith('_TASK_NAME')} 58 | 59 | DEFAULT_EXC_SERIALIZER = 'json' 60 | 61 | 62 | def unpack_exception(error, wrap_errors, serializer=DEFAULT_EXC_SERIALIZER): 63 | """ Extracts original error from RemoteException description 64 | :param error: remote exception stub (or real) instance 65 | :type error: RemoteException 66 | :param wrap_errors: flag for enabling errors unpacking 67 | :type wrap_errors: bool 68 | :return: original error instance, if unpacking is successful; 69 | None otherwise. 70 | :rtype: Exception 71 | """ 72 | if not wrap_errors: 73 | return None 74 | if not error.__class__.__name__ == 'RemoteException': 75 | return None 76 | if not hasattr(error, 'unpack_exception'): 77 | # Stub exception 78 | from celery_rpc.exceptions import RemoteException 79 | error = RemoteException(error.args) 80 | error = error.unpack_exception(serializer) 81 | return error 82 | 83 | 84 | def unproxy(errors): 85 | """ removes ugettext_lazy proxy from ValidationError structure to allow 86 | errors to be serialized with JSON encoder.""" 87 | for k, v in errors.items(): 88 | unproxied = [] 89 | for i in v: 90 | unproxied.append(six.text_type(i)) 91 | errors[k] = unproxied 92 | return errors 93 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | Celery==5.2.6 2 | Django==4.0.4 3 | factory-boy==3.2.1 4 | djangorestframework==3.13.1 5 | mock==4.0.3 6 | six==1.16.0 7 | jsonpickle==2.1.0 8 | django_nose==1.4.7 9 | -------------------------------------------------------------------------------- /runtests.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import sys 4 | from optparse import OptionParser 5 | import django 6 | 7 | from django.conf import settings 8 | 9 | if not settings.configured: 10 | from celery_rpc.runtests import settings as test_settings 11 | kwargs = {k: getattr(test_settings, k) for k in dir(test_settings) 12 | if not k.startswith('_') and k.isupper()} 13 | settings.configure(**kwargs) 14 | 15 | 16 | from django_nose import NoseTestSuiteRunner 17 | 18 | if django.VERSION >= (1, 7): 19 | # New Apps loading mechanism 20 | django.setup() 21 | 22 | def runtests(*test_args, **kwargs): 23 | if 'south' in settings.INSTALLED_APPS: 24 | from south.management.commands import patch_for_test_db_setup 25 | patch_for_test_db_setup() 26 | 27 | if not test_args: 28 | test_args = ['celery_rpc'] 29 | 30 | if sys.version_info >= (3, 10, 0): 31 | from django.test.runner import DiscoverRunner 32 | test_runner = DiscoverRunner(**kwargs) 33 | else: 34 | test_runner = NoseTestSuiteRunner(**kwargs) 35 | 36 | failures = test_runner.run_tests(test_args) 37 | sys.exit(failures) 38 | 39 | if __name__ == '__main__': 40 | parser = OptionParser() 41 | parser.add_option('--verbosity', dest='verbosity', action='store', 42 | default=1, type=int) 43 | opts = getattr(NoseTestSuiteRunner, 'options', None) 44 | if opts: 45 | parser.add_options(opts) 46 | (options, args) = parser.parse_args() 47 | 48 | runtests(*args, **options.__dict__) 49 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | import os 2 | import re 3 | import subprocess 4 | from setuptools import setup, find_packages # type: ignore 5 | # TODO: use pathlib in get_version 6 | base_dir = os.path.dirname(__file__) 7 | 8 | try: 9 | with open(os.path.join(base_dir, 'README.md')) as f: 10 | long_description = f.read() 11 | except OSError: 12 | long_description = None 13 | 14 | version_re = re.compile('^Version: (.+)$', re.M) 15 | package_name = 'djangoceleryrpc' 16 | 17 | 18 | def get_version(): 19 | """ 20 | Reads version from git status or PKG-INFO 21 | 22 | https://gist.github.com/pwithnall/7bc5f320b3bdf418265a 23 | """ 24 | # noinspection PyUnresolvedReferences 25 | git_dir = os.path.join(base_dir, '.git') 26 | if os.path.isdir(git_dir): 27 | # Get the version using "git describe". 28 | cmd = 'git describe --tags --match [0-9]*'.split() 29 | try: 30 | version = subprocess.check_output(cmd).decode().strip() 31 | except subprocess.CalledProcessError: 32 | return None 33 | 34 | # PEP 386 compatibility 35 | if '-' in version: 36 | version = '.post'.join(version.split('-')[:2]) 37 | 38 | # Don't declare a version "dirty" merely because a time stamp has 39 | # changed. If it is dirty, append a ".dev1" suffix to indicate a 40 | # development revision after the release. 41 | with open(os.devnull, 'w') as fd_devnull: 42 | subprocess.call(['git', 'status'], 43 | stdout=fd_devnull, stderr=fd_devnull) 44 | 45 | cmd = 'git diff-index --name-only HEAD'.split() 46 | try: 47 | dirty = subprocess.check_output(cmd).decode().strip() 48 | except subprocess.CalledProcessError: 49 | return None 50 | 51 | if dirty != '': 52 | version += '.dev1' 53 | else: 54 | # Extract the version from the PKG-INFO file. 55 | try: 56 | with open('PKG-INFO') as v: 57 | version = version_re.search(v.read()).group(1) 58 | except OSError: 59 | version = None 60 | 61 | return version 62 | 63 | 64 | setup( 65 | name=package_name, 66 | version=get_version() or '0.0.0-dev', 67 | long_description=long_description, 68 | long_description_content_type='text/markdown', 69 | packages=find_packages(), 70 | include_package_data=True, 71 | url='https://github.com/just-work/django-celery-rpc', 72 | license='Public', 73 | author='axeman', 74 | author_email='alex.manaev@gmail.com', 75 | description='Remote access from one system to models and functions of ' 76 | 'another one using Celery machinery.', 77 | install_requires=[ 78 | 'celery >=3.1.5, <5.3.0', 79 | 'jsonpickle >=0.8.0, <2.1.0', 80 | 'six', 81 | ], 82 | extras_require={ 83 | 'server': [ 84 | 'django >=1.3, <4.1', 85 | 'djangorestframework >= 2.3, <3.14', 86 | ], 87 | }, 88 | tests_require=[ 89 | 'nose>=1.0', 90 | 'django >=1.3, <4.1', 91 | 'djangorestframework >= 2.3, <3.13', 92 | 'django-nose >= 1.2, <1.5', 93 | 'factory-boy==2.8.1', 94 | 'mock', 95 | ], 96 | test_suite='runtests.runtests', 97 | classifiers=[ 98 | 'Development Status :: 6 - Mature', 99 | 'Environment :: Console', 100 | 'Intended Audience :: Developers', 101 | 'Framework :: Django', 102 | 'Framework :: Django :: 1.4', 103 | 'Framework :: Django :: 1.5', 104 | 'Framework :: Django :: 1.6', 105 | 'Framework :: Django :: 1.7', 106 | 'Framework :: Django :: 1.8', 107 | 'Framework :: Django :: 1.9', 108 | 'Framework :: Django :: 1.10', 109 | 'Framework :: Django :: 1.11', 110 | 'Framework :: Django :: 2.0', 111 | 'Framework :: Django :: 2.1', 112 | 'Framework :: Django :: 2.2', 113 | 'Framework :: Django :: 3.0', 114 | 'Framework :: Django :: 3.1', 115 | 'Framework :: Django :: 3.2', 116 | 'Framework :: Django :: 4.0', 117 | 'Operating System :: POSIX', 118 | 'Programming Language :: Python :: 2.7', 119 | 'Programming Language :: Python :: 3.5', 120 | 'Programming Language :: Python :: 3.6', 121 | 'Programming Language :: Python :: 3.7', 122 | 'Programming Language :: Python :: 3.8', 123 | 'Programming Language :: Python :: 3.9', 124 | 'Programming Language :: Python :: 3.10', 125 | ] 126 | ) 127 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | [tox] 2 | envlist = 3 | {py3.8,py3.9,py3.10}-django{4.0}-drf{3.11,3.12,3.13}-celery{5.2}-{importlib}-{setuptools} 4 | {py3.7,py3.8,py3.9,py3.10}-django{3.2}-drf{3.11,3.12,3.13}-celery{5.2}-{importlib}-{setuptools} 5 | {py3.6,py3.7,py3.8,py3.9,py3.10}-django{3.2}-drf{3.11,3.12,3.13}-celery{4.4,5.0,5.1}-{importlib}-{setuptools} 6 | {py3.6,py3.7,py3.8,py3.9,py3.10}-django{3.1}-drf{3.11,3.12,3.13}-celery{4.4,5.0,5.1}-{importlib}-{setuptools} 7 | {py3.6,py3.7,py3.8,py3.9,py3.10}-django{3.0}-drf{3.10,3.11,3.12,3.13}-celery{4.4,5.0,5.1}-{importlib}-{setuptools} 8 | {py3.5,py3.6,py3.7,py3.8,py3.10}-django{2.2}-drf{3.10,3.11,3.12}-celery{4.4}-{importlib}-{setuptools} 9 | {py3.5,py3.6,py3.7,py3.8}-django{2.1}-drf{3.10,3.11}-celery{4.4}-{importlib}-{setuptools} 10 | {py3.5,py3.6,py3.7,py3.8}-django{2.0}-drf{3.10,3.11}-celery{4.4}-{importlib}-{setuptools} 11 | {py2.7,py3.5}-django{1.11}-drf{3.7,3.6,3.5}-celery{4.4}-{importlib}-{setuptools} 12 | 13 | 14 | [gh-actions] 15 | python = 16 | 2.7: py2.7 17 | 3.5: py3.5 18 | 3.6: py3.6 19 | 3.7: py3.7 20 | 3.8: py3.8 21 | 3.9: py3.9 22 | 3.10: py3.10 23 | 24 | [testenv] 25 | basepython = 26 | py2.7: python2.7 27 | py3.5: python3.5 28 | py3.6: python3.6 29 | py3.7: python3.7 30 | py3.8: python3.8 31 | py3.9: python3.9 32 | py3.10: python3.10 33 | 34 | deps = 35 | django1.11: Django>=1.11,<2.0 36 | django2.0: Django>=2.0,<2.1 37 | django2.1: Django>=2.1,<2.2 38 | django2.2: Django>=2.2,<3.0 39 | django3.0: Django>=3.0,<3.1 40 | django3.1: Django>=3.1,<3.2 41 | django3.2: Django>=3.2,<3.3 42 | django4.0: Django>=4.0,<4.1 43 | drf2.3: djangorestframework>=2.3,<2.4 44 | drf2.4: djangorestframework>=2.4,<2.5 45 | drf3.0: djangorestframework>=3.0,<3.1 46 | drf3.1: djangorestframework>=3.1,<3.2 47 | drf3.2: djangorestframework>=3.2,<3.3 48 | drf3.3: djangorestframework>=3.3,<3.4 49 | drf3.4: djangorestframework>=3.4,<3.5 50 | drf3.5: djangorestframework>=3.5,<3.6 51 | drf3.6: djangorestframework>=3.6,<3.7 52 | drf3.7: djangorestframework>=3.7.4,<3.8 53 | drf3.8: djangorestframework>=3.8,<3.9 54 | drf3.9: djangorestframework>=3.9,<3.10 55 | drf3.10: djangorestframework>=3.10,<3.11 56 | drf3.11: djangorestframework>=3.11,<3.12 57 | drf3.12: djangorestframework>=3.12,<3.13 58 | drf3.13: djangorestframework>=3.13,<3.14 59 | celery4.4: celery>=4.4,<4.5 60 | celery5.0: celery>=5.0,<5.1 61 | celery5.1: celery>=5.1,<5.2 62 | celery5.2: celery>=5.2,<5.3 63 | py{3.7}-importlib: importlib-metadata<5.0 64 | setuptools: setuptools<=65.7.0 65 | commands = python setup.py test 66 | --------------------------------------------------------------------------------