├── tests
├── __init__.py
├── dummyapp
│ ├── __init__.py
│ └── models.py
├── settings.py
├── conftest.py
├── test_decorators.py
├── test_utils.py
├── test_jobs.py
└── test_base_job.py
├── cacheback
├── models.py
├── apps.py
├── rq_tasks.py
├── tasks.py
├── __init__.py
├── decorators.py
├── utils.py
├── jobs.py
└── base.py
├── sandbox
├── dummyapp
│ ├── __init__.py
│ ├── migrations
│ │ ├── __init__.py
│ │ └── 0001_initial.py
│ ├── models.py
│ ├── templates
│ │ └── index.html
│ ├── jobs.py
│ └── views.py
├── requirements.txt
├── __init__.py
├── Procfile.rq
├── urls.py
├── Procfile.celery
├── manage.py
├── celeryconfig.py
├── provision.sh
├── wsgi.py
├── settings.py
└── fixture.json
├── .python-version
├── .readthedocs.yml
├── .gitignore
├── Vagrantfile
├── Makefile
├── .editorconfig
├── tox.ini
├── docs
├── settings.rst
├── advanced.rst
├── contributing.rst
├── api.rst
├── installation.rst
├── Makefile
├── usage.rst
├── index.rst
└── conf.py
├── LICENSE
├── .github
└── workflows
│ └── test.yml
├── README.rst
├── pyproject.toml
└── CHANGELOG.rst
/tests/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/cacheback/models.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/sandbox/dummyapp/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/tests/dummyapp/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/sandbox/dummyapp/migrations/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/.python-version:
--------------------------------------------------------------------------------
1 | 3.8.5
2 | 3.7.8
3 | 3.6.11
4 |
--------------------------------------------------------------------------------
/sandbox/requirements.txt:
--------------------------------------------------------------------------------
1 | python-memcached>=1.59
2 |
--------------------------------------------------------------------------------
/sandbox/__init__.py:
--------------------------------------------------------------------------------
1 | from .celeryconfig import app as celery_app
2 |
3 | __all__ = ('celery_app',)
4 |
--------------------------------------------------------------------------------
/sandbox/Procfile.rq:
--------------------------------------------------------------------------------
1 | web: env QUEUE=rq poetry run python manage.py runserver 0.0.0.0:8000
2 | worker: env QUEUE=rq poetry run python manage.py rqworker
3 |
--------------------------------------------------------------------------------
/sandbox/urls.py:
--------------------------------------------------------------------------------
1 | from django.urls import path
2 |
3 | from dummyapp import views
4 |
5 |
6 | urlpatterns = [
7 | path('', views.index, name='index'),
8 | ]
9 |
--------------------------------------------------------------------------------
/sandbox/Procfile.celery:
--------------------------------------------------------------------------------
1 | web: env QUEUE=celery poetry run python manage.py runserver 0.0.0.0:8000
2 | worker: env QUEUE=celery poetry run celery -A sandbox worker --loglevel=INFO
3 |
--------------------------------------------------------------------------------
/.readthedocs.yml:
--------------------------------------------------------------------------------
1 | build:
2 | image: latest
3 |
4 | python:
5 | version: 3.8
6 | pip_install: true
7 | extra_requirements:
8 | - docs
9 | - celery
10 | - rq
11 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | .vagrant
2 | sandbox/db.sqlite3
3 | docs/_build
4 | dist/*
5 | build/*
6 | .coverage
7 | htmlcov/
8 | TODO
9 | .tox
10 | __pycache__
11 | *.egg-info
12 | .pytest_cache
13 | *.lock
14 |
--------------------------------------------------------------------------------
/cacheback/apps.py:
--------------------------------------------------------------------------------
1 | from django.apps import AppConfig
2 | from django.utils.translation import gettext_lazy as _
3 |
4 |
5 | class CachebackConfig(AppConfig):
6 | name = 'cacheback'
7 | verbose_name = _('Cacheback')
8 |
--------------------------------------------------------------------------------
/Vagrantfile:
--------------------------------------------------------------------------------
1 | # -*- mode: ruby -*-
2 | # vi: set ft=ruby :
3 |
4 | Vagrant::Config.run do |config|
5 | config.vm.box = "ubuntu/focal64"
6 | config.vm.forward_port 8000, 8080
7 | config.vm.provision "shell", path: "sandbox/provision.sh", privileged: false
8 | end
9 |
--------------------------------------------------------------------------------
/cacheback/rq_tasks.py:
--------------------------------------------------------------------------------
1 | from django_rq import job
2 |
3 |
4 | @job
5 | def refresh_cache(klass_str, obj_args, obj_kwargs, call_args, call_kwargs):
6 | from .base import Job
7 |
8 | Job.perform_async_refresh(klass_str, obj_args, obj_kwargs, call_args, call_kwargs)
9 |
--------------------------------------------------------------------------------
/sandbox/dummyapp/models.py:
--------------------------------------------------------------------------------
1 | from django.db import models
2 |
3 |
4 | class DummyModel(models.Model):
5 | name = models.CharField(max_length=100)
6 | date_created = models.DateTimeField(auto_now_add=True)
7 |
8 | def __unicode__(self):
9 | return self.name
--------------------------------------------------------------------------------
/sandbox/dummyapp/templates/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 | Testing
4 |
5 |
6 | Items
7 |
8 | {% for item in items %}
9 | - {{ item }}
10 | {% endfor %}
11 |
12 |
13 |
14 |
--------------------------------------------------------------------------------
/tests/dummyapp/models.py:
--------------------------------------------------------------------------------
1 | from django.db import models
2 |
3 |
4 | class DummyModel(models.Model):
5 | name = models.CharField(max_length=100)
6 | date_created = models.DateTimeField(auto_now_add=True)
7 |
8 | class Meta:
9 | ordering = ('name',)
10 |
--------------------------------------------------------------------------------
/sandbox/manage.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | import os
3 | import sys
4 |
5 | if __name__ == "__main__":
6 | os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings")
7 |
8 | from django.core.management import execute_from_command_line
9 |
10 | execute_from_command_line(sys.argv)
11 |
--------------------------------------------------------------------------------
/sandbox/celeryconfig.py:
--------------------------------------------------------------------------------
1 | import os
2 |
3 | from celery import Celery
4 |
5 | os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'settings')
6 |
7 | from django.conf import settings # noqa
8 |
9 | app = Celery('sandbox')
10 |
11 | app.config_from_object('django.conf:settings', namespace='CELERY')
12 | app.autodiscover_tasks()
13 |
--------------------------------------------------------------------------------
/Makefile:
--------------------------------------------------------------------------------
1 | .PHONY: correct tests
2 |
3 | correct:
4 | poetry run isort cacheback tests
5 | poetry run black -q cacheback tests
6 |
7 | pytests:
8 | @PYTHONPATH=$(CURDIR):${PYTHONPATH} poetry run pytest
9 |
10 | tests:
11 | @PYTHONPATH=$(CURDIR):${PYTHONPATH} poetry run pytest --cov --isort --flake8 --black
12 |
13 | coverage-html: tests
14 | poetry run coverage html
15 |
--------------------------------------------------------------------------------
/cacheback/tasks.py:
--------------------------------------------------------------------------------
1 | from celery import shared_task
2 | from django.conf import settings
3 |
4 |
5 | @shared_task(ignore_result=getattr(settings, 'CACHEBACK_TASK_IGNORE_RESULT', False))
6 | def refresh_cache(klass_str, obj_args, obj_kwargs, call_args, call_kwargs):
7 | from .base import Job
8 |
9 | Job.perform_async_refresh(klass_str, obj_args, obj_kwargs, call_args, call_kwargs)
10 |
--------------------------------------------------------------------------------
/cacheback/__init__.py:
--------------------------------------------------------------------------------
1 | try:
2 | import importlib.metadata as importlib_metadata
3 | except ModuleNotFoundError:
4 | # This is required for Python versions < 3.8
5 | import importlib_metadata
6 |
7 | try:
8 | __version__ = importlib_metadata.version('django-cacheback')
9 | except Exception:
10 | __version__ = 'HEAD'
11 |
12 | default_app_config = 'cacheback.apps.CachebackConfig'
13 |
--------------------------------------------------------------------------------
/sandbox/provision.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | sudo apt-get update
4 | sudo apt-get install -y redis-server memcached python3-pip git
5 | sudo pip3 install -U pip poetry honcho
6 |
7 | cd /vagrant
8 | poetry install
9 | poetry run pip install -r sandbox/requirements.txt
10 |
11 | # Create and fill database
12 | cd /vagrant/sandbox
13 | poetry run python manage.py migrate
14 | poetry run python manage.py loaddata fixture.json
15 |
--------------------------------------------------------------------------------
/.editorconfig:
--------------------------------------------------------------------------------
1 | root = true
2 |
3 | [*]
4 | charset = utf-8
5 | indent_style = tab
6 | end_of_line = lf
7 | trim_trailing_whitespace = true
8 | insert_final_newline = true
9 |
10 | [*.py]
11 | indent_style = space
12 | indent_size = 4
13 | multi_line_output = 3
14 | lines_after_imports = 2
15 | include_trailing_comma = True
16 | ensure_newline_before_comments = True
17 | force_grid_wrap = 0
18 | use_parentheses = True
19 | line_length = 96
20 |
21 | [*.rst]
22 | indent_style = space
23 | indent_size = 4
24 |
25 | [*.yml]
26 | indent_style = space
27 | indent_size = 2
28 |
--------------------------------------------------------------------------------
/sandbox/dummyapp/jobs.py:
--------------------------------------------------------------------------------
1 | from cacheback.base import Job
2 |
3 | from dummyapp import models
4 |
5 |
6 | class VanillaJob(Job):
7 | fetch_on_miss = False
8 | refresh_timeout = 5
9 |
10 | def fetch(self):
11 | import time
12 | time.sleep(10)
13 | return models.DummyModel.objects.all()
14 |
15 |
16 | class KeyedJob(Job):
17 | lifetime = 5
18 | fetch_on_stale_threshold = 10
19 |
20 | def key(self, name):
21 | return name
22 |
23 | def fetch(self, name):
24 | return models.DummyModel.objects.filter(name=name)
25 |
--------------------------------------------------------------------------------
/sandbox/dummyapp/migrations/0001_initial.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Generated by Django 1.9 on 2015-12-12 20:02
3 | from __future__ import unicode_literals
4 |
5 | from django.db import migrations, models
6 |
7 |
8 | class Migration(migrations.Migration):
9 |
10 | initial = True
11 |
12 | dependencies = [
13 | ]
14 |
15 | operations = [
16 | migrations.CreateModel(
17 | name='DummyModel',
18 | fields=[
19 | ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
20 | ('name', models.CharField(max_length=100)),
21 | ('date_created', models.DateTimeField(auto_now_add=True)),
22 | ],
23 | ),
24 | ]
25 |
--------------------------------------------------------------------------------
/tox.ini:
--------------------------------------------------------------------------------
1 | [tox]
2 | isolated_build = true
3 | envlist =
4 | py{36,37,38,39}-django{22,30,31}
5 | py{36,37,38,39,310}-django{32}
6 | py{38,39,310}-django{40}
7 |
8 | [gh-actions]
9 | python =
10 | 3.6: py36
11 | 3.7: py37
12 | 3.8: py38
13 | 3.9: py39
14 | 3.10: py310
15 |
16 | [testenv]
17 | setenv = PYTHONPATH={toxinidir}
18 | deps =
19 | django22: Django>=2.2,<2.3
20 | django30: Django>=3.0,<3.1
21 | django31: Django>=3.1,<3.2
22 | django32: Django>=3.2,<3.3
23 | django40: Django>=4.0,<4.1
24 | allowlist_externals =
25 | poetry
26 | sh
27 | skip_install = true
28 | commands =
29 | poetry export --dev --without-hashes -o {toxworkdir}/requirements.txt
30 | sh -c 'grep -v "^[dD]jango==" {toxworkdir}/requirements.txt | poetry run pip install --no-deps -r /dev/stdin'
31 | pytest --isort --flake8 --black --cov
32 |
--------------------------------------------------------------------------------
/docs/settings.rst:
--------------------------------------------------------------------------------
1 | ========
2 | Settings
3 | ========
4 |
5 | ``CACHEBACK_CACHE_ALIAS``
6 | -------------------------
7 |
8 | This specifies which cache to use from your ``CACHES`` setting. It defaults to
9 | ``default``.
10 |
11 |
12 | ``CACHEBACK_VERIFY_CACHE_WRITE``
13 | --------------------------------
14 |
15 | This verifies the data is correctly written to memcache. If not, then a
16 | ``RuntimeError`` is raised. Defaults to ``True``.
17 |
18 |
19 | ``CACHEBACK_TASK_QUEUE``
20 | ------------------------
21 |
22 | This defines the task queue to use. Valid options are ``rq`` and ``celery``.
23 | Make sure that the corresponding task queue is configured too.
24 |
25 |
26 | ``CACHEBACK_TASK_IGNORE_RESULT``
27 | --------------------------------
28 |
29 | This specifies whether to ignore the result of the ``refresh_cache`` task
30 | and prevent Celery/RQ from storing it into its results backend.
31 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Copyright (C) 2012 django-cacheback authors (see AUTHORS file)
2 |
3 | Permission is hereby granted, free of charge, to any person obtaining a copy of
4 | this software and associated documentation files (the "Software"), to deal in
5 | the Software without restriction, including without limitation the rights to
6 | use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
7 | of the Software, and to permit persons to whom the Software is furnished to do
8 | so, subject to the following conditions:
9 |
10 | The above copyright notice and this permission notice shall be included in all
11 | copies or substantial portions of the Software.
12 |
13 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
14 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
15 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
16 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
17 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
18 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
19 | SOFTWARE.
20 |
--------------------------------------------------------------------------------
/.github/workflows/test.yml:
--------------------------------------------------------------------------------
1 | ---
2 | name: CI
3 |
4 | on:
5 | push:
6 | branches: ["master"]
7 | pull_request:
8 | branches: ["master"]
9 | workflow_dispatch:
10 |
11 | jobs:
12 | tests:
13 | name: "Python ${{ matrix.python-version }}"
14 | runs-on: "ubuntu-20.04"
15 | services:
16 | redis:
17 | image: redis
18 | options: >-
19 | --health-cmd "redis-cli ping"
20 | --health-interval 10s
21 | --health-timeout 5s
22 | --health-retries 5
23 | ports:
24 | - 6379:6379
25 | strategy:
26 | matrix:
27 | python-version: ["3.6", "3.7", "3.8", "3.9", "3.10"]
28 | steps:
29 | - uses: "actions/checkout@v2"
30 | - uses: "actions/setup-python@v2"
31 | with:
32 | python-version: "${{ matrix.python-version }}"
33 | - name: "Install dependencies"
34 | run: |
35 | set -xe
36 | python -m pip install --upgrade pip setuptools
37 | python -m pip install --upgrade poetry tox tox-gh-actions
38 |
39 | - name: "Run tox targets for ${{ matrix.python-version }}"
40 | run: "python -m tox"
41 |
--------------------------------------------------------------------------------
/docs/advanced.rst:
--------------------------------------------------------------------------------
1 | Advanced usage
2 | --------------
3 |
4 | Three thresholds for cache invalidation
5 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
6 |
7 | It's possible to employ three threshold times to control cache behaviour:
8 |
9 | 1. A time after which the cached item is considered 'stale'. When a stale item
10 | is returned, an async job is triggered to refresh the item but the stale item
11 | is returned. This is controlled by the ``lifetime`` attribute of the
12 | ``Job`` class - the default value is 600 seconds (10 minutes).
13 |
14 | 2. A time after which the cached item is removed (a cache miss). If you have
15 | ``fetch_on_miss=True``, then this will trigger a synchronous data fetch.
16 | This is controlled by the ``cache_ttl`` attribute of the ``Job`` class - the
17 | default value is 2592000 seconds, which is the maximum ttl that memcached
18 | supports.
19 |
20 | 3. A timeout value for the refresh job. If the cached item is not refreshed
21 | after this time, then another async refresh job will be triggered. This is
22 | controlled by the ``refresh_timeout`` attribute of the ``Job`` class and
23 | defaults to 60 seconds.
24 |
--------------------------------------------------------------------------------
/tests/settings.py:
--------------------------------------------------------------------------------
1 | import logging
2 | import tempfile
3 |
4 |
5 | SECRET_KEY = 'testing'
6 |
7 | DATABASES = {
8 | 'default': {
9 | 'ENGINE': 'django.db.backends.sqlite3',
10 | 'NAME': ':memory:',
11 | }
12 | }
13 |
14 | CACHES = {
15 | 'default': {
16 | 'BACKEND': 'django.core.cache.backends.filebased.FileBasedCache',
17 | 'LOCATION': tempfile.mkdtemp(),
18 | },
19 | 'secondary': {
20 | 'BACKEND': 'django.core.cache.backends.filebased.FileBasedCache',
21 | 'LOCATION': tempfile.mkdtemp(),
22 | },
23 | 'dummy': {
24 | 'BACKEND': 'django.core.cache.backends.dummy.DummyCache',
25 | },
26 | }
27 |
28 | INSTALLED_APPS = [
29 | # 'django.contrib.auth',
30 | # 'django.contrib.admin',
31 | # 'django.contrib.contenttypes',
32 | # 'django.contrib.sessions',
33 | # 'django.contrib.sites',
34 | # 'django.contrib.flatpages',
35 | 'cacheback',
36 | 'tests.dummyapp',
37 | ]
38 |
39 | BROKER_URL = 'django://'
40 |
41 | RQ_QUEUES = {
42 | 'default': {
43 | 'HOST': 'localhost',
44 | 'PORT': 6379,
45 | 'DB': 1,
46 | }
47 | }
48 |
49 | CACHEBACK_TASK_QUEUE = 'rq'
50 |
51 |
52 | logging.disable(logging.CRITICAL)
53 |
--------------------------------------------------------------------------------
/sandbox/wsgi.py:
--------------------------------------------------------------------------------
1 | """
2 | WSGI config for sandbox project.
3 |
4 | This module contains the WSGI application used by Django's development server
5 | and any production WSGI deployments. It should expose a module-level variable
6 | named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
7 | this application via the ``WSGI_APPLICATION`` setting.
8 |
9 | Usually you will have the standard Django WSGI application here, but it also
10 | might make sense to replace the whole Django WSGI application with a custom one
11 | that later delegates to the Django one. For example, you could introduce WSGI
12 | middleware here, or combine a Django application with an application of another
13 | framework.
14 |
15 | """
16 | import os
17 |
18 | os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings")
19 |
20 | # This application object is used by any WSGI server configured to use this
21 | # file. This includes Django's development server, if the WSGI_APPLICATION
22 | # setting points here.
23 | from django.core.wsgi import get_wsgi_application
24 | application = get_wsgi_application()
25 |
26 | # Apply WSGI middleware here.
27 | # from helloworld.wsgi import HelloWorldApplication
28 | # application = HelloWorldApplication(application)
29 |
--------------------------------------------------------------------------------
/sandbox/dummyapp/views.py:
--------------------------------------------------------------------------------
1 | from django.shortcuts import render
2 |
3 | from cacheback.jobs import QuerySetFilterJob
4 | from cacheback.jobs import FunctionJob
5 | from cacheback.decorators import cacheback
6 |
7 | from dummyapp import jobs
8 | from dummyapp import models
9 |
10 |
11 | def fetch():
12 | return models.DummyModel.objects.filter(name__contains='1')
13 |
14 |
15 | def fetch_with_arg(q):
16 | return models.DummyModel.objects.filter(name__contains=q)
17 |
18 |
19 | @cacheback(5)
20 | def decorated(q):
21 | return models.DummyModel.objects.filter(name__contains=q)
22 |
23 |
24 | def index(request):
25 | if 'name' in request.GET:
26 | name = request.GET['name']
27 | if 'qs' in request.GET:
28 | items = QuerySetFilterJob(models.DummyModel, 10, False).get(
29 | name=name)
30 | else:
31 | items = jobs.KeyedJob().get(name=request.GET['name'])
32 | elif 'function' in request.GET:
33 | job = FunctionJob()
34 | job.fetch_on_miss = False
35 | if 'q' in request.GET:
36 | items = job.get(fetch_with_arg, request.GET['q'])
37 | else:
38 | items = job.get(fetch)
39 | elif 'decorator' in request.GET:
40 | items = decorated('3')
41 | else:
42 | items = jobs.VanillaJob().get()
43 | return render(request, 'index.html', {'items': items})
44 |
--------------------------------------------------------------------------------
/tests/conftest.py:
--------------------------------------------------------------------------------
1 | import django_rq
2 | import pytest
3 | import redis
4 | from django.conf import settings
5 | from django.core.cache import cache
6 |
7 |
8 | def skip_if_no_redis():
9 | if not hasattr(skip_if_no_redis, '_test_result'):
10 | try:
11 | redis.StrictRedis(
12 | settings.RQ_QUEUES['default'].get('HOST', 'localhost'),
13 | settings.RQ_QUEUES['default'].get('POST', 6379),
14 | ).ping()
15 | skip_if_no_redis._redis_available = True
16 | except redis.ConnectionError:
17 | skip_if_no_redis._redis_available = False
18 |
19 | if not skip_if_no_redis._redis_available:
20 | pytest.skip('Redis server not available.')
21 |
22 |
23 | def pytest_runtest_setup(item):
24 | if item.get_closest_marker('redis_required'):
25 | skip_if_no_redis()
26 |
27 |
28 | @pytest.fixture(scope='session', autouse=True)
29 | def session_clear_cache(request):
30 | cache.clear()
31 | request.addfinalizer(lambda: cache.clear())
32 |
33 |
34 | @pytest.fixture()
35 | def cleared_cache(request):
36 | cache.clear()
37 |
38 |
39 | @pytest.yield_fixture
40 | def rq_worker(request):
41 | [queue.empty() for queue in django_rq.get_worker().queues]
42 |
43 | worker = django_rq.get_worker()
44 |
45 | yield worker
46 |
47 | [queue.empty() for queue in django_rq.get_worker().queues]
48 |
49 |
50 | @pytest.yield_fixture
51 | def rq_burst(request, rq_worker):
52 | def burst():
53 | rq_worker.work(burst=True)
54 |
55 | yield burst
56 |
--------------------------------------------------------------------------------
/docs/contributing.rst:
--------------------------------------------------------------------------------
1 | ============
2 | Contributing
3 | ============
4 |
5 | Make sure to have `poetry` installed. Then, start by cloning the repo,
6 | and installing the dependencies::
7 |
8 | $ pip install poetry # if not already installed
9 | $ cd
10 | $ poetry install
11 |
12 |
13 | Running tests
14 | =============
15 |
16 | Use::
17 |
18 | # only runs actual tests
19 | $ make pytests
20 |
21 | or::
22 |
23 | # runs tests but also linters like black, isort and flake8
24 | $ make tests
25 |
26 |
27 | To generate html coverage::
28 |
29 | $ make coverage-html
30 |
31 |
32 | Finally, you can also use tox to run tests against
33 | all supported Django and Python versions::
34 |
35 | $ tox
36 |
37 |
38 | Sandbox VM
39 | ==========
40 |
41 | There is a ``VagrantFile`` for setting up a sandbox VM where you can play around
42 | with the functionality. Bring up the Vagrant box::
43 |
44 | $ vagrant up
45 |
46 | This may take a while but will set up a Ubuntu Precise64 VM with RabbitMQ
47 | installed. You can then SSH into the machine::
48 |
49 | $ vagrant ssh
50 | $ cd /vagrant/sandbox
51 |
52 | You can now decide to run the Celery implementation::
53 |
54 | $ honcho -f Procfile.celery start
55 |
56 | Or you can run the RQ implementation::
57 |
58 | $ honcho -f Procfile.rq start
59 |
60 | The above commands will start a Django runserver and the selected task worker.
61 | The dummy site will be available at ``http://localhost:8080`` on your host
62 | machine. There are some sample views in ``sandbox/dummyapp/views.py`` that
63 | exercise django-cacheback.
64 |
--------------------------------------------------------------------------------
/cacheback/decorators.py:
--------------------------------------------------------------------------------
1 | from functools import WRAPPER_ASSIGNMENTS, wraps
2 |
3 | from .jobs import FunctionJob
4 |
5 |
6 | def cacheback(
7 | lifetime=None,
8 | fetch_on_miss=None,
9 | cache_alias=None,
10 | job_class=None,
11 | task_options=None,
12 | **job_class_kwargs
13 | ):
14 | """
15 | Decorate function to cache its return value.
16 |
17 | :lifetime: How long to cache items for
18 | :fetch_on_miss: Whether to perform a synchronous fetch when no cached
19 | result is found
20 | :cache_alias: The Django cache alias to store the result into.
21 | :job_class: The class to use for running the cache refresh job. Defaults
22 | using the FunctionJob.
23 | :job_class_kwargs: Any extra kwargs to pass to job_class constructor.
24 | Useful with custom job_class implementations.
25 | """
26 | if job_class is None:
27 | job_class = FunctionJob
28 | job = job_class(
29 | lifetime=lifetime,
30 | fetch_on_miss=fetch_on_miss,
31 | cache_alias=cache_alias,
32 | task_options=task_options,
33 | **job_class_kwargs
34 | )
35 |
36 | def _wrapper(fn):
37 | # using available_attrs to work around http://bugs.python.org/issue3445
38 | @wraps(fn, assigned=WRAPPER_ASSIGNMENTS)
39 | def __wrapper(*args, **kwargs):
40 | return job.get(fn, *args, **kwargs)
41 |
42 | # Assign reference to unwrapped function so that we can access it
43 | # later without descending into infinite regress.
44 | __wrapper.fn = fn
45 | # Assign reference to job so we can use the full Job API
46 | __wrapper.job = job
47 | return __wrapper
48 |
49 | return _wrapper
50 |
--------------------------------------------------------------------------------
/docs/api.rst:
--------------------------------------------------------------------------------
1 | ===
2 | API
3 | ===
4 |
5 | Jobs
6 | ====
7 |
8 | The main class is ``cacheback.base.Job``. The methods that are intended to be
9 | called from client code are:
10 |
11 | .. autoclass:: cacheback.base.Job
12 | :members: get, invalidate, delete
13 |
14 | It has some class properties than can be used to configure simple behaviour:
15 |
16 | .. autoclass:: cacheback.base.Job
17 | :noindex:
18 | :members: lifetime, refresh_timeout, cache_alias, fetch_on_miss, fetch_on_stale_threshold, task_options
19 |
20 | There are also several methods intended to be overridden and customised:
21 |
22 | .. autoclass:: cacheback.base.Job
23 | :noindex:
24 | :members: key, fetch, expiry, should_missing_item_be_fetched_synchronously, should_stale_item_be_fetched_synchronously, empty, prepare_args, prepare_kwargs, timeout, process_result
25 |
26 |
27 | Queryset jobs
28 | =============
29 |
30 | There are two classes for easy caching of ORM reads. These don't need
31 | subclassing but rather take the model class as a ``__init__`` parameter.
32 |
33 | .. autoclass:: cacheback.jobs.QuerySetFilterJob
34 | :members:
35 |
36 | .. autoclass:: cacheback.jobs.QuerySetGetJob
37 | :members:
38 |
39 |
40 | Example usage:
41 |
42 | .. sourcecode:: python
43 |
44 | from django.contrib.auth import models
45 | from django.shortcuts import render
46 | from cacheback.jobs import QuerySetGetJob, QuerySetFilterJob
47 |
48 | def user_detail(request, username):
49 | user = QuerySetGetJob(models.User).get(username=username)
50 | return render(request, 'user.html', {'user': user})
51 |
52 | def staff(request):
53 | staff = QuerySetFilterJob(models.User).get(is_staff=True)
54 | return render(request, 'staff.html', {'users': staff})
55 |
56 | These classes are helpful for simple ORM reads but won't be suitable for more
57 | complicated queries where ``filter`` is chained together with ``exclude``.
58 |
--------------------------------------------------------------------------------
/cacheback/utils.py:
--------------------------------------------------------------------------------
1 | import logging
2 |
3 | from django.conf import settings
4 | from django.core.exceptions import ImproperlyConfigured
5 |
6 |
7 | try:
8 | import importlib
9 | except ImportError:
10 | import django.utils.importlib as importlib
11 |
12 | try:
13 | from .tasks import refresh_cache as celery_refresh_cache
14 | except ImportError:
15 | celery_refresh_cache = None
16 |
17 | try:
18 | import django_rq
19 |
20 | from .rq_tasks import refresh_cache as rq_refresh_cache
21 | except ImportError:
22 | django_rq = None
23 | rq_refresh_cache = None
24 |
25 |
26 | logger = logging.getLogger('cacheback')
27 |
28 |
29 | def get_job_class(klass_str):
30 | """
31 | Return the job class
32 | """
33 | mod_name, klass_name = klass_str.rsplit('.', 1)
34 | try:
35 | mod = importlib.import_module(mod_name)
36 | except ImportError as e:
37 | logger.error("Error importing job module %s: '%s'", mod_name, e)
38 | return
39 | try:
40 | klass = getattr(mod, klass_name)
41 | except AttributeError:
42 | logger.error("Module '%s' does not define a '%s' class", mod_name, klass_name)
43 | return
44 | return klass
45 |
46 |
47 | def enqueue_task(kwargs, task_options=None):
48 | task_queue = getattr(settings, 'CACHEBACK_TASK_QUEUE', 'celery')
49 |
50 | if task_queue == 'rq' and rq_refresh_cache is not None:
51 | queue = django_rq.get_queue(**task_options or {})
52 | return queue.enqueue_call(
53 | rq_refresh_cache,
54 | kwargs=kwargs,
55 | result_ttl=0 if getattr(settings, 'CACHEBACK_TASK_IGNORE_RESULT', False) else None,
56 | )
57 |
58 | elif task_queue == 'celery' and celery_refresh_cache is not None:
59 | return celery_refresh_cache.apply_async(kwargs=kwargs, **task_options or {})
60 |
61 | raise ImproperlyConfigured('Unkown task queue configured: {0}'.format(task_queue))
62 |
--------------------------------------------------------------------------------
/README.rst:
--------------------------------------------------------------------------------
1 | =========
2 | Cacheback
3 | =========
4 |
5 | ----------------------------------------
6 | Asynchronous cache refreshing for Django
7 | ----------------------------------------
8 |
9 | What does this library do?
10 | --------------------------
11 |
12 | It's an extensible caching library that refreshes stale cache items
13 | asynchronously using a Celery_ or rq_ task (utilizing django-rq). The key
14 | idea being that it's better to serve a stale item (and populate the cache
15 | asynchronously) than block the response process in order to populate the cache
16 | synchronously.
17 |
18 | .. _Celery: http://celeryproject.org/
19 | .. _rq: http://python-rq.org/
20 |
21 | Using this library, you can rework your views so that all reads are from
22 | cache - which can be a significant performance boost.
23 |
24 | A corollary of this technique is that cache hammering can be handled simply and
25 | elegantly, avoiding sudden surges of expensive reads when a cached item becomes stale.
26 |
27 |
28 | Do you have good docs?
29 | ----------------------
30 |
31 | Yup - `over on readthedocs.org`_.
32 |
33 | .. _`over on readthedocs.org`: http://django-cacheback.readthedocs.org/en/latest/
34 |
35 |
36 | Supported versions
37 | ------------------
38 |
39 | Python 3.6+ is supported. Django 2.2+ is supported.
40 |
41 |
42 | Do you have tests?
43 | ------------------
44 |
45 | You betcha!
46 |
47 | .. image:: https://github.com/codeinthehole/django-cacheback/workflows/CI/badge.svg?branch=master
48 | :target: https://github.com/codeinthehole/django-cacheback/actions?workflow=CI
49 | :alt: CI Status
50 |
51 |
52 | Can I use this in my project?
53 | -----------------------------
54 |
55 | Probably - subject to the `MIT license`_.
56 |
57 | .. _`MIT license`: https://github.com/codeinthehole/django-cacheback/blob/master/LICENSE
58 |
59 |
60 | I want to contribute!
61 | ---------------------
62 |
63 | Brilliant! Here are the `contributing guidelines`_.
64 |
65 | .. _`contributing guidelines`: http://django-cacheback.readthedocs.org/en/latest/contributing.html
66 |
--------------------------------------------------------------------------------
/docs/installation.rst:
--------------------------------------------------------------------------------
1 | ============
2 | Installation
3 | ============
4 |
5 | You need to do three things:
6 |
7 | Install django-cacheback
8 | ~~~~~~~~~~~~~~~~~~~~~~~~
9 |
10 | To install with Celery support, run::
11 |
12 | $ pip install django-cacheback[celery]
13 |
14 | If you want to install with RQ support, just use::
15 |
16 | $ pip install django-cacheback[rq]
17 |
18 | After installing the package and dependencies, add ``cacheback`` to your ``INSTALLED_APPS``.
19 | If you want to use RQ as your task queue, you need to set ``CACHEBACK_TASK_QUEUE``
20 | in your settings to ``rq``.
21 |
22 | Install a message broker
23 | ~~~~~~~~~~~~~~~~~~~~~~~~
24 |
25 | Celery requires a message broker. Use `Celery's tutorial`_ to help set one up.
26 | I recommend rabbitmq.
27 |
28 |
29 | For RQ you need to set up a redis-server and configure ``django-rq``. Please look
30 | up the `django-rq installation guide`_ for more details.
31 |
32 | .. _`Celery's tutorial`: http://docs.celeryproject.org/en/latest/getting-started/first-steps-with-celery.html
33 | .. _`django-rq installation guide`: https://github.com/ui/django-rq#installation
34 |
35 | Set up a cache
36 | ~~~~~~~~~~~~~~
37 |
38 | You also need to ensure you have `a cache set up`_. Most likely, you'll be using
39 | memcache so your settings will include something like::
40 |
41 | CACHES = {
42 | 'default': {
43 | 'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache',
44 | 'LOCATION': '127.0.0.1:11211',
45 | }
46 | }
47 |
48 | .. _`a cache set up`: https://docs.djangoproject.com/en/dev/topics/cache/?from=olddocs
49 |
50 | Logging
51 | ~~~~~~~
52 |
53 | You may also want to configure logging handlers for the 'cacheback' named
54 | logger. To set up console logging, use something like::
55 |
56 | LOGGING = {
57 | 'version': 1,
58 | 'disable_existing_loggers': False,
59 | 'filters': {
60 | 'require_debug_false': {
61 | '()': 'django.utils.log.RequireDebugFalse'
62 | }
63 | },
64 | 'handlers': {
65 | 'console': {
66 | 'level': 'DEBUG',
67 | 'class': 'logging.StreamHandler',
68 | }
69 | },
70 | 'loggers': {
71 | 'cacheback': {
72 | 'handlers': ['console'],
73 | 'level': 'DEBUG',
74 | 'propagate': False,
75 | },
76 | }
77 | }
78 |
79 |
--------------------------------------------------------------------------------
/tests/test_decorators.py:
--------------------------------------------------------------------------------
1 | import pytest
2 |
3 | from cacheback.decorators import cacheback
4 | from cacheback.jobs import FunctionJob
5 |
6 |
7 | class OtherFunctionJob(FunctionJob):
8 | pass
9 |
10 |
11 | @cacheback(fetch_on_miss=False, job_class=OtherFunctionJob)
12 | def no_fetch_miss_function(param):
13 | return 'JOB-EXECUTED:{0}'.format(param)
14 |
15 |
16 | @cacheback(lifetime=30, fetch_on_miss=True)
17 | def fetch_miss_function(param):
18 | return 'JOB-EXECUTED:{0}'.format(param)
19 |
20 |
21 | @cacheback(cache_alias='secondary', fetch_on_miss=True)
22 | def fetch_cache_alias_function(param):
23 | return 'JOB-EXECUTED:{0}'.format(param)
24 |
25 |
26 | @cacheback(set_data_kwarg='my_data')
27 | def custom_payload_label_function(param):
28 | return 'JOB-EXECUTED:{0}'.format(param)
29 |
30 |
31 | @pytest.mark.usefixtures('cleared_cache', scope='function')
32 | class TestCachebackDecorator:
33 | def test_job_init(self):
34 | assert isinstance(fetch_miss_function.job, FunctionJob)
35 | assert fetch_miss_function.job.lifetime == 30
36 |
37 | assert isinstance(fetch_cache_alias_function.job, FunctionJob)
38 | assert fetch_cache_alias_function.job.cache_alias == 'secondary'
39 |
40 | def test_job_init_job_class(self):
41 | assert isinstance(no_fetch_miss_function.job, OtherFunctionJob)
42 |
43 | @pytest.mark.redis_required
44 | def test_miss_no_fetch(self, rq_worker):
45 | assert no_fetch_miss_function('foo') is None
46 | assert len(rq_worker.queues[0].jobs) == 1
47 |
48 | def test_miss_fetch(self):
49 | assert fetch_miss_function('foo') == 'JOB-EXECUTED:foo'
50 |
51 | def test_cache_alias(self):
52 | assert fetch_cache_alias_function('foo') == 'JOB-EXECUTED:foo'
53 |
54 | def test_set(self):
55 | no_fetch_miss_function.job.set(no_fetch_miss_function, 'foo', 'MANUALLY_SET')
56 |
57 | assert no_fetch_miss_function('foo') == 'MANUALLY_SET'
58 |
59 | def test_set_kwarg(self):
60 | no_fetch_miss_function.job.set(
61 | no_fetch_miss_function, 'foo', data='MANUALLY_SET_WITH_KWARG'
62 | )
63 |
64 | assert no_fetch_miss_function('foo') == 'MANUALLY_SET_WITH_KWARG'
65 |
66 | def test_set_custom_kwarg(self):
67 | custom_payload_label_function.job.set(
68 | custom_payload_label_function, 'foo', my_data='MANUALLY_SET_WITH_CUSTOM_KWARG'
69 | )
70 |
71 | assert custom_payload_label_function('foo') == 'MANUALLY_SET_WITH_CUSTOM_KWARG'
72 |
73 | @pytest.mark.redis_required
74 | def test_hit(self, rq_burst):
75 | assert no_fetch_miss_function('foo') is None
76 | rq_burst()
77 | assert no_fetch_miss_function('foo') == 'JOB-EXECUTED:foo'
78 |
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [tool.poetry]
2 | name = "django-cacheback"
3 | version = "3.0.0"
4 | description = "Caching library for Django that uses Celery or RQ to refresh cache items asynchronously"
5 | authors = [
6 | "David Winterbottom ",
7 | "Stephan Jaekel ",
8 | "Flávio Juvenal (@fjsj)",
9 | "Michael Kutý",
10 | ]
11 | license = "MIT"
12 | readme = "README.rst"
13 | homepage = 'https://github.com/codeinthehole/django-cacheback'
14 | repository = 'https://github.com/codeinthehole/django-cacheback'
15 | keywords = ["flake8", "markdown", "lint"]
16 | classifiers = [
17 | "Development Status :: 5 - Production/Stable",
18 | "License :: OSI Approved :: MIT License",
19 | "Environment :: Web Environment",
20 | "Framework :: Django",
21 | "Intended Audience :: Developers",
22 | "Operating System :: Unix",
23 | "Programming Language :: Python",
24 | "Programming Language :: Python",
25 | "Programming Language :: Python :: 3.6",
26 | "Programming Language :: Python :: 3.7",
27 | "Programming Language :: Python :: 3.8",
28 | "Programming Language :: Python :: 3.9",
29 | "Programming Language :: Python :: 3.10",
30 | ]
31 | packages = [{ include = "cacheback" }]
32 | include = ["LICENSE"]
33 |
34 | [tool.poetry.dependencies]
35 | python = ">=3.6.2,<4"
36 | importlib-metadata = {version = "*", python = "<3.8"}
37 |
38 | django = ">=2"
39 | celery = {version = ">=4", optional = true}
40 | django-rq = {version = ">=2", optional = true}
41 | Sphinx = {version = ">=3.3.0,<4", optional = true}
42 |
43 | [tool.poetry.dev-dependencies]
44 | pytest = ">=6.0"
45 | pytest-django = ">=4.1"
46 | pytest-cov = ">=2.10"
47 | pytest-isort = ">=1.2"
48 | pytest-flake8 = ">=1.0"
49 | flake8 = "<5"
50 | pytest-black = {version = ">=0.3"}
51 | freezegun = ">=1.0"
52 | coverage = {version = ">=5.0", extras = ["toml"]}
53 | celery = ">=4"
54 | django-rq = ">=2"
55 | typing_extensions = { version = ">=3.10", python = "<3.10" }
56 |
57 | [tool.poetry.extras]
58 | celery = ["celery"]
59 | rq = ["django-rq"]
60 | docs = ["Sphinx"]
61 |
62 | [build-system]
63 | requires = ["poetry>=1.1"]
64 | build-backend = "poetry.masonry.api"
65 |
66 | [tool.pytest.ini_options]
67 | addopts = "-v --nomigrations"
68 | testpaths = ["cacheback", "tests"]
69 | markers = [
70 | "redis_required: Tests that require a running redis-server instance"
71 | ]
72 | flake8-max-line-length = 96
73 | flake8-ignore = ["E203", "E266", "E501", "W503"]
74 | flake8-max-complexity = 18
75 | DJANGO_SETTINGS_MODULE = "tests.settings"
76 |
77 | [tool.black]
78 | line-length = 96
79 | skip-string-normalization = true
80 | skip-numeric-underscore-normalization = true
81 | include = "\\.pyi?$"
82 | exclude = "/(\\.git|\\.tox|build|dist)/"
83 |
84 | [tool.coverage.run]
85 | branch = true
86 | source = ["cacheback"]
87 |
88 | [tool.coverage.report]
89 | exclude_lines = ["raise NotImplementedError"]
90 |
--------------------------------------------------------------------------------
/tests/test_utils.py:
--------------------------------------------------------------------------------
1 | from unittest import mock
2 |
3 | import pytest
4 | from django.core.exceptions import ImproperlyConfigured
5 |
6 | from cacheback.utils import enqueue_task, get_job_class
7 |
8 |
9 | class DummyClass:
10 | pass
11 |
12 |
13 | class TestGetJobClass:
14 | @mock.patch('cacheback.utils.logger')
15 | def test_invalid_module(self, logger_mock):
16 | assert get_job_class('tests.foo.DummyClass') is None
17 | assert 'Error importing job module' in logger_mock.error.call_args[0][0]
18 | assert logger_mock.error.call_args[0][1] == 'tests.foo'
19 |
20 | @mock.patch('cacheback.utils.logger')
21 | def test_invalid_class(self, logger_mock):
22 | assert get_job_class('tests.test_utils.OtherDummyClass') is None
23 | assert 'define a \'%s\' class' in logger_mock.error.call_args[0][0]
24 | assert logger_mock.error.call_args[0][1] == 'tests.test_utils'
25 | assert logger_mock.error.call_args[0][2] == 'OtherDummyClass'
26 |
27 | def test_class(self):
28 | assert get_job_class('tests.test_utils.DummyClass') == DummyClass
29 |
30 |
31 | class TestEnqueueTask:
32 | @mock.patch('cacheback.utils.rq_refresh_cache')
33 | @mock.patch('cacheback.utils.celery_refresh_cache')
34 | def test_celery(self, celery_mock, rq_mock, settings):
35 | settings.CACHEBACK_TASK_QUEUE = 'celery'
36 | enqueue_task({'bar': 'baz'}, task_options={'foo': 'bar'})
37 | assert celery_mock.apply_async.called is True
38 | assert celery_mock.apply_async.call_args[1] == {'kwargs': {'bar': 'baz'}, 'foo': 'bar'}
39 | assert rq_mock.delay.called is False
40 |
41 | @mock.patch('django_rq.get_queue')
42 | @mock.patch('cacheback.utils.celery_refresh_cache')
43 | def test_rq(self, celery_mock, rq_mock, settings):
44 | settings.CACHEBACK_TASK_QUEUE = 'rq'
45 | enqueue_task({'bar': 'baz'}, task_options={'foo': 'bar'})
46 | assert celery_mock.apply_async.called is False
47 | assert rq_mock.called is True
48 | assert rq_mock.call_args[1] == {'foo': 'bar'}
49 | assert rq_mock.return_value.enqueue_call.called is True
50 | assert rq_mock.return_value.enqueue_call.call_args[1] == {
51 | 'kwargs': {'bar': 'baz'},
52 | 'result_ttl': None,
53 | }
54 |
55 | @mock.patch('django_rq.get_queue')
56 | @mock.patch('cacheback.utils.celery_refresh_cache')
57 | def test_rq_dont_store_result(self, celery_mock, rq_mock, settings):
58 | settings.CACHEBACK_TASK_QUEUE = 'rq'
59 | settings.CACHEBACK_TASK_IGNORE_RESULT = True
60 | enqueue_task({'bar': 'baz'}, task_options={'foo': 'bar'})
61 | assert celery_mock.apply_async.called is False
62 | assert rq_mock.called is True
63 | assert rq_mock.call_args[1] == {'foo': 'bar'}
64 | assert rq_mock.return_value.enqueue_call.called is True
65 | assert rq_mock.return_value.enqueue_call.call_args[1] == {
66 | 'kwargs': {'bar': 'baz'},
67 | 'result_ttl': 0,
68 | }
69 |
70 | def test_unkown(self, settings):
71 | settings.CACHEBACK_TASK_QUEUE = 'unknown'
72 | with pytest.raises(ImproperlyConfigured) as exc:
73 | enqueue_task('foo')
74 |
75 | assert 'Unkown task queue' in str(exc.value)
76 |
--------------------------------------------------------------------------------
/cacheback/jobs.py:
--------------------------------------------------------------------------------
1 | try:
2 | import importlib
3 | except ImportError:
4 | import django.utils.importlib as importlib
5 |
6 | from .base import Job
7 |
8 |
9 | class FunctionJob(Job):
10 | """
11 | Job for executing a function and caching the result
12 | """
13 |
14 | def __init__(
15 | self,
16 | lifetime=None,
17 | fetch_on_miss=None,
18 | cache_alias=None,
19 | task_options=None,
20 | set_data_kwarg=None,
21 | ):
22 | super(FunctionJob, self).__init__()
23 | if lifetime is not None:
24 | self.lifetime = int(lifetime)
25 | if fetch_on_miss is not None:
26 | self.fetch_on_miss = fetch_on_miss
27 | if cache_alias is not None:
28 | self.cache_alias = cache_alias
29 | if task_options is not None:
30 | self.task_options = task_options
31 | if set_data_kwarg is not None:
32 | self.set_data_kwarg = set_data_kwarg
33 |
34 | def get_init_kwargs(self):
35 | """
36 | Return the kwargs that need to be passed to __init__ when reconstructing
37 | this class.
38 | """
39 | # We don't need to pass fetch_on_miss as it isn't used by the refresh
40 | # method.
41 | return {'lifetime': self.lifetime, 'cache_alias': self.cache_alias}
42 |
43 | def prepare_args(self, fn, *args):
44 | # Convert function into "module:name" form so that is can be pickled and
45 | # then re-imported.
46 | return ("%s:%s" % (fn.__module__, fn.__name__),) + args
47 |
48 | def fetch(self, fn_string, *args, **kwargs):
49 | # Import function from string representation
50 | module_path, fn_name = fn_string.split(":")
51 | module = importlib.import_module(module_path)
52 | fn = getattr(module, fn_name)
53 | # Look for 'fn' attribute which is used by the decorator
54 | if hasattr(fn, 'fn'):
55 | fn = fn.fn
56 | return fn(*args, **kwargs)
57 |
58 |
59 | class QuerySetJob(Job):
60 | """
61 | Helper class for wrapping ORM reads
62 | """
63 |
64 | def __init__(
65 | self, model, lifetime=None, fetch_on_miss=None, cache_alias=None, task_options=None
66 | ):
67 | """
68 | :model: The model class to use
69 | """
70 | super(QuerySetJob, self).__init__()
71 | self.model = model
72 | if lifetime is not None:
73 | self.lifetime = lifetime
74 | if fetch_on_miss is not None:
75 | self.fetch_on_miss = fetch_on_miss
76 | if cache_alias is not None:
77 | self.cache_alias = cache_alias
78 | if task_options is not None:
79 | self.task_options = task_options
80 |
81 | def get_init_kwargs(self):
82 | return {'model': self.model, 'lifetime': self.lifetime, 'cache_alias': self.cache_alias}
83 |
84 | def key(self, *args, **kwargs):
85 | return "%s-%s" % (self.model.__name__, super(QuerySetJob, self).key(*args, **kwargs))
86 |
87 |
88 | class QuerySetGetJob(QuerySetJob):
89 | """
90 | For ORM reads that use the ``get`` method.
91 | """
92 |
93 | def fetch(self, *args, **kwargs):
94 | return self.model.objects.get(**kwargs)
95 |
96 |
97 | class QuerySetFilterJob(QuerySetJob):
98 | """
99 | For ORM reads that use the ``filter`` method.
100 | """
101 |
102 | def fetch(self, *args, **kwargs):
103 | return self.model.objects.filter(**kwargs)
104 |
--------------------------------------------------------------------------------
/CHANGELOG.rst:
--------------------------------------------------------------------------------
1 | =========
2 | Changelog
3 | =========
4 |
5 | 3.0.0
6 | ~~~~~
7 |
8 | * BREAKING CHANGE: Drop support for Django < 2 and Python < 3.6.
9 |
10 | 2.1.0
11 | ~~~~~
12 |
13 | * Add ``CACHEBACK_TASK_IGNORE_RESULT`` option to reduce noise in task queues.
14 |
15 | 2.0.0
16 | ~~~~~
17 |
18 | * BREAKING CHANGE: Dropping support for Python 2
19 |
20 | 1.4.0
21 | ~~~~~
22 |
23 | * Add support for Django 2.0.
24 | * Drop official support for Python 3.3 (py33 might work for bit longer).
25 |
26 | 1.3.2
27 | ~~~~~
28 |
29 | * Bugfix in set method — was using the timeout instead of the expiry. Also added a
30 | raw_get method, which retrieves cache entry with same semantics as regular
31 | get method.
32 |
33 | 1.3.1
34 | ~~~~~
35 |
36 | * Add support for Django 1.11.
37 |
38 | 1.3
39 | ~~~
40 | * Add set method, with the same semantics as delete & get. Updated docs.
41 |
42 | 1.2
43 | ~~~
44 |
45 | * Add support for Django 1.10 (and drop support for Django < 1.8)
46 | * Refactored codebase, cleaned up method naming and module structure. Old imports
47 | and methods will work at least for this release. RemovedInCacheback13Warning is
48 | set if old methods or imports are used.
49 | * Add option to have a different cache per cacheback job
50 |
51 | 1.1
52 | ~~~
53 |
54 | * Added support for multiple background workers (currently Celery and rq)
55 | * Add pytest support
56 |
57 | 1.0
58 | ~~~
59 | * Support Django versions >= 1.7
60 | * Update sandbox to work with Django 1.9
61 |
62 | 0.9.1
63 | ~~~~~
64 | * Fix silly ``NameError`` introduced in 0.9 (`#39`)
65 |
66 | .. _`#39`: https://github.com/codeinthehole/django-cacheback/pull/39
67 |
68 | 0.9
69 | ~~~
70 | * Add support for other caches (`#32`_)
71 | * Fix inconsistent hasing issue in Python 3.x (`#28`_)
72 | * Allow ``job_class_kwargs`` to be passed to ``cacheback`` decorator (`#31`_)
73 |
74 | .. _`#32`: https://github.com/codeinthehole/django-cacheback/pull/32
75 | .. _`#28`: https://github.com/codeinthehole/django-cacheback/pull/28
76 | .. _`#31`: https://github.com/codeinthehole/django-cacheback/pull/31
77 |
78 | 0.8
79 | ~~~
80 | * Add support for Python 3 (`#24`_)
81 |
82 | .. _`#24`: https://github.com/codeinthehole/django-cacheback/pull/24
83 |
84 | 0.7
85 | ~~~
86 | * Include the class name and module path in the cache key by defauly (`#21`_)
87 |
88 | .. _`#21`: https://github.com/codeinthehole/django-cacheback/pull/21
89 |
90 | 0.6
91 | ~~~
92 | * Celery task arguments can now be passed (`#20`_).
93 | * Include reference to job instance on decorator function (`#17`_). This allows
94 | caches to be invalidated using the decorator function instance.
95 |
96 | .. _`#17`: https://github.com/codeinthehole/django-cacheback/pull/17
97 | .. _`#20`: https://github.com/codeinthehole/django-cacheback/pull/20
98 |
99 | 0.5
100 | ~~~
101 | * Added hook for performing a synchronous refresh of stale items
102 | * Updated docs for invalidation
103 |
104 | 0.4
105 | ~~~
106 | * Handle some error cases
107 | * Add invalidate method
108 |
109 | 0.3
110 | ~~~
111 | * Fixed nasty bug where caching could find it's way into a limbo state (#5)
112 | * Remove bug where it was assumed that cached items would be iterable (#4)
113 | * Added handling of uncacheable types
114 |
115 | .. _`#5`: https://github.com/codeinthehole/django-cacheback/pull/5
116 | .. _`#4`: https://github.com/codeinthehole/django-cacheback/pull/4
117 |
118 | 0.2
119 | ~~~
120 | * Docs? Docs!
121 | * Added method for determining whether to "fetch on miss"
122 |
123 | 0.1
124 | ~~~
125 | Minimal viable product
126 |
--------------------------------------------------------------------------------
/tests/test_jobs.py:
--------------------------------------------------------------------------------
1 | import pytest
2 |
3 | from cacheback.decorators import cacheback
4 | from cacheback.jobs import FunctionJob, QuerySetFilterJob, QuerySetGetJob, QuerySetJob
5 | from tests.dummyapp.models import DummyModel
6 |
7 |
8 | def dummy_function(param):
9 | return 'JOB-EXECUTED:{0}'.format(param)
10 |
11 |
12 | @cacheback()
13 | def decorated_dummy_function(param):
14 | return 'JOB-EXECUTED:{0}'.format(param)
15 |
16 |
17 | @pytest.mark.usefixtures('cleared_cache', scope='function')
18 | class TestFunctionJob:
19 | def test_init_defaults(self):
20 | job = FunctionJob()
21 | assert job.lifetime == 600
22 | assert job.fetch_on_miss
23 | assert job.cache_alias == 'default'
24 | assert job.task_options == {}
25 |
26 | def test_init(self):
27 | job = FunctionJob(
28 | lifetime=30,
29 | fetch_on_miss=False,
30 | cache_alias='secondary',
31 | task_options={'foo': 'bar'},
32 | )
33 | assert job.lifetime == 30
34 | assert not job.fetch_on_miss
35 | assert job.cache_alias == 'secondary'
36 | assert job.task_options == {'foo': 'bar'}
37 |
38 | def test_prepare_args(self):
39 | job = FunctionJob()
40 | assert job.prepare_args(dummy_function, 'foo') == (
41 | 'tests.test_jobs:dummy_function',
42 | 'foo',
43 | )
44 |
45 | def test_fetch(self):
46 | assert FunctionJob().fetch('tests.test_jobs:dummy_function', 'foo') == (
47 | 'JOB-EXECUTED:foo'
48 | )
49 |
50 | def test_fetch_decorated(self):
51 | assert FunctionJob().fetch('tests.test_jobs:decorated_dummy_function', 'foo') == (
52 | 'JOB-EXECUTED:foo'
53 | )
54 |
55 | def test_init_kwargs(self):
56 | assert FunctionJob().get_init_kwargs() == {'lifetime': 600, 'cache_alias': 'default'}
57 | assert FunctionJob(lifetime=30).get_init_kwargs() == {
58 | 'lifetime': 30,
59 | 'cache_alias': 'default',
60 | }
61 | assert FunctionJob(cache_alias='secondary').get_init_kwargs() == {
62 | 'lifetime': 600,
63 | 'cache_alias': 'secondary',
64 | }
65 |
66 |
67 | @pytest.mark.django_db
68 | class TestQuerySetJob:
69 | def test_init_defaults(self):
70 | job = QuerySetJob(DummyModel)
71 | assert job.lifetime == 600
72 | assert job.fetch_on_miss
73 | assert job.cache_alias == 'default'
74 | assert job.task_options == {}
75 |
76 | def test_init(self):
77 | job = QuerySetJob(
78 | DummyModel,
79 | lifetime=30,
80 | fetch_on_miss=False,
81 | cache_alias='secondary',
82 | task_options={'foo': 'bar'},
83 | )
84 | assert job.lifetime == 30
85 | assert not job.fetch_on_miss
86 | assert job.cache_alias == 'secondary'
87 | assert job.task_options == {'foo': 'bar'}
88 |
89 | def test_key(self):
90 | assert QuerySetJob(DummyModel).key('foo') == (
91 | 'DummyModel-cacheback.jobs.QuerySetJob:acbd18db4cc2f85cedef654fccc4a4d8'
92 | )
93 |
94 | def test_init_kwargs(self):
95 | assert QuerySetJob(DummyModel).get_init_kwargs() == {
96 | 'model': DummyModel,
97 | 'lifetime': 600,
98 | 'cache_alias': 'default',
99 | }
100 | assert QuerySetJob(DummyModel, lifetime=30).get_init_kwargs() == {
101 | 'model': DummyModel,
102 | 'lifetime': 30,
103 | 'cache_alias': 'default',
104 | }
105 | assert QuerySetJob(DummyModel, cache_alias='secondary').get_init_kwargs() == {
106 | 'model': DummyModel,
107 | 'lifetime': 600,
108 | 'cache_alias': 'secondary',
109 | }
110 |
111 |
112 | @pytest.mark.django_db
113 | class TestQuerySetGetJob:
114 | def test_fetch(self):
115 | dummy1 = DummyModel.objects.create(name='Foo')
116 | assert QuerySetGetJob(DummyModel).fetch(name='Foo') == dummy1
117 |
118 |
119 | @pytest.mark.django_db
120 | class TestQuerySetGetFilterJob:
121 | def test_fetch(self):
122 | dummy1 = DummyModel.objects.create(name='Foo')
123 | dummy2 = DummyModel.objects.create(name='Bar')
124 | dummy3 = DummyModel.objects.create(name='Foobar')
125 |
126 | qset = list(QuerySetFilterJob(DummyModel).fetch(name__startswith='Foo'))
127 | assert dummy1 in qset
128 | assert dummy2 not in qset
129 | assert dummy3 in qset
130 |
--------------------------------------------------------------------------------
/docs/Makefile:
--------------------------------------------------------------------------------
1 | # Makefile for Sphinx documentation
2 | #
3 |
4 | # You can set these variables from the command line.
5 | SPHINXOPTS =
6 | SPHINXBUILD = sphinx-build
7 | PAPER =
8 | BUILDDIR = _build
9 |
10 | # Internal variables.
11 | PAPEROPT_a4 = -D latex_paper_size=a4
12 | PAPEROPT_letter = -D latex_paper_size=letter
13 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
14 | # the i18n builder cannot share the environment and doctrees with the others
15 | I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
16 |
17 | .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext
18 |
19 | help:
20 | @echo "Please use \`make ' where is one of"
21 | @echo " html to make standalone HTML files"
22 | @echo " dirhtml to make HTML files named index.html in directories"
23 | @echo " singlehtml to make a single large HTML file"
24 | @echo " pickle to make pickle files"
25 | @echo " json to make JSON files"
26 | @echo " htmlhelp to make HTML files and a HTML help project"
27 | @echo " qthelp to make HTML files and a qthelp project"
28 | @echo " devhelp to make HTML files and a Devhelp project"
29 | @echo " epub to make an epub"
30 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
31 | @echo " latexpdf to make LaTeX files and run them through pdflatex"
32 | @echo " text to make text files"
33 | @echo " man to make manual pages"
34 | @echo " texinfo to make Texinfo files"
35 | @echo " info to make Texinfo files and run them through makeinfo"
36 | @echo " gettext to make PO message catalogs"
37 | @echo " changes to make an overview of all changed/added/deprecated items"
38 | @echo " linkcheck to check all external links for integrity"
39 | @echo " doctest to run all doctests embedded in the documentation (if enabled)"
40 |
41 | clean:
42 | -rm -rf $(BUILDDIR)/*
43 |
44 | html:
45 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
46 | @echo
47 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
48 |
49 | dirhtml:
50 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
51 | @echo
52 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
53 |
54 | singlehtml:
55 | $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
56 | @echo
57 | @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
58 |
59 | pickle:
60 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
61 | @echo
62 | @echo "Build finished; now you can process the pickle files."
63 |
64 | json:
65 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
66 | @echo
67 | @echo "Build finished; now you can process the JSON files."
68 |
69 | htmlhelp:
70 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
71 | @echo
72 | @echo "Build finished; now you can run HTML Help Workshop with the" \
73 | ".hhp project file in $(BUILDDIR)/htmlhelp."
74 |
75 | qthelp:
76 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
77 | @echo
78 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \
79 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:"
80 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/django-async-cache.qhcp"
81 | @echo "To view the help file:"
82 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/django-async-cache.qhc"
83 |
84 | devhelp:
85 | $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
86 | @echo
87 | @echo "Build finished."
88 | @echo "To view the help file:"
89 | @echo "# mkdir -p $$HOME/.local/share/devhelp/django-async-cache"
90 | @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/django-async-cache"
91 | @echo "# devhelp"
92 |
93 | epub:
94 | $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
95 | @echo
96 | @echo "Build finished. The epub file is in $(BUILDDIR)/epub."
97 |
98 | latex:
99 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
100 | @echo
101 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
102 | @echo "Run \`make' in that directory to run these through (pdf)latex" \
103 | "(use \`make latexpdf' here to do that automatically)."
104 |
105 | latexpdf:
106 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
107 | @echo "Running LaTeX files through pdflatex..."
108 | $(MAKE) -C $(BUILDDIR)/latex all-pdf
109 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
110 |
111 | text:
112 | $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
113 | @echo
114 | @echo "Build finished. The text files are in $(BUILDDIR)/text."
115 |
116 | man:
117 | $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
118 | @echo
119 | @echo "Build finished. The manual pages are in $(BUILDDIR)/man."
120 |
121 | texinfo:
122 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
123 | @echo
124 | @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
125 | @echo "Run \`make' in that directory to run these through makeinfo" \
126 | "(use \`make info' here to do that automatically)."
127 |
128 | info:
129 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
130 | @echo "Running Texinfo files through makeinfo..."
131 | make -C $(BUILDDIR)/texinfo info
132 | @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
133 |
134 | gettext:
135 | $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
136 | @echo
137 | @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
138 |
139 | changes:
140 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
141 | @echo
142 | @echo "The overview file is in $(BUILDDIR)/changes."
143 |
144 | linkcheck:
145 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
146 | @echo
147 | @echo "Link check complete; look for any errors in the above output " \
148 | "or in $(BUILDDIR)/linkcheck/output.txt."
149 |
150 | doctest:
151 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
152 | @echo "Testing of doctests in the sources finished, look at the " \
153 | "results in $(BUILDDIR)/doctest/output.txt."
154 |
--------------------------------------------------------------------------------
/sandbox/settings.py:
--------------------------------------------------------------------------------
1 | import os
2 |
3 |
4 | DEBUG = True
5 |
6 | DATABASES = {
7 | 'default': {
8 | 'ENGINE': 'django.db.backends.sqlite3', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
9 | 'NAME': 'db.sqlite3', # Or path to database file if using sqlite3.
10 | 'USER': '', # Not used with sqlite3.
11 | 'PASSWORD': '', # Not used with sqlite3.
12 | 'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
13 | 'PORT': '', # Set to empty string for default. Not used with sqlite3.
14 | }
15 | }
16 |
17 | # Local time zone for this installation. Choices can be found here:
18 | # http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
19 | # although not all choices may be available on all operating systems.
20 | # On Unix systems, a value of None will cause Django to use the same
21 | # timezone as the operating system.
22 | # If running in a Windows environment this must be set to the same as your
23 | # system time zone.
24 | TIME_ZONE = 'America/Chicago'
25 |
26 | # Language code for this installation. All choices can be found here:
27 | # http://www.i18nguy.com/unicode/language-identifiers.html
28 | LANGUAGE_CODE = 'en-us'
29 |
30 | SITE_ID = 1
31 |
32 | # If you set this to False, Django will make some optimizations so as not
33 | # to load the internationalization machinery.
34 | USE_I18N = True
35 |
36 | # If you set this to False, Django will not format dates, numbers and
37 | # calendars according to the current locale.
38 | USE_L10N = True
39 |
40 | # If you set this to False, Django will not use timezone-aware datetimes.
41 | USE_TZ = True
42 |
43 | # Absolute filesystem path to the directory that will hold user-uploaded files.
44 | # Example: "/home/media/media.lawrence.com/media/"
45 | MEDIA_ROOT = ''
46 |
47 | # URL that handles the media served from MEDIA_ROOT. Make sure to use a
48 | # trailing slash.
49 | # Examples: "http://media.lawrence.com/media/", "http://example.com/media/"
50 | MEDIA_URL = ''
51 |
52 | # Absolute path to the directory static files should be collected to.
53 | # Don't put anything in this directory yourself; store your static files
54 | # in apps' "static/" subdirectories and in STATICFILES_DIRS.
55 | # Example: "/home/media/media.lawrence.com/static/"
56 | STATIC_ROOT = ''
57 |
58 | # URL prefix for static files.
59 | # Example: "http://media.lawrence.com/static/"
60 | STATIC_URL = '/static/'
61 |
62 | # Additional locations of static files
63 | STATICFILES_DIRS = (
64 | # Put strings here, like "/home/html/static" or "C:/www/django/static".
65 | # Always use forward slashes, even on Windows.
66 | # Don't forget to use absolute paths, not relative paths.
67 | )
68 |
69 | # List of finder classes that know how to find static files in
70 | # various locations.
71 | STATICFILES_FINDERS = (
72 | 'django.contrib.staticfiles.finders.FileSystemFinder',
73 | 'django.contrib.staticfiles.finders.AppDirectoriesFinder',
74 | )
75 |
76 | # Make this unique, and don't share it with anybody.
77 | SECRET_KEY = 'a5my98-t4si@aoegk1tm4!3w3&vmsehkpez+5xp@b0kvk42t#b'
78 |
79 | # List of callables that know how to import templates from various sources.
80 | TEMPLATES = [
81 | {
82 | 'BACKEND': 'django.template.backends.django.DjangoTemplates',
83 | 'OPTIONS': {
84 | 'loaders': [
85 | 'django.template.loaders.filesystem.Loader',
86 | 'django.template.loaders.app_directories.Loader',
87 | ],
88 | 'context_processors': [
89 | 'django.template.context_processors.debug',
90 | 'django.template.context_processors.request',
91 | 'django.contrib.auth.context_processors.auth',
92 | 'django.contrib.messages.context_processors.messages',
93 | ],
94 | },
95 | },
96 | ]
97 |
98 | MIDDLEWARE = (
99 | 'django.middleware.common.CommonMiddleware',
100 | 'django.contrib.sessions.middleware.SessionMiddleware',
101 | 'django.middleware.csrf.CsrfViewMiddleware',
102 | 'django.contrib.auth.middleware.AuthenticationMiddleware',
103 | 'django.contrib.messages.middleware.MessageMiddleware',
104 | )
105 |
106 | ROOT_URLCONF = 'urls'
107 |
108 | # Python dotted path to the WSGI application used by Django's runserver.
109 | WSGI_APPLICATION = 'wsgi.application'
110 |
111 | INSTALLED_APPS = (
112 | 'django.contrib.auth',
113 | 'django.contrib.contenttypes',
114 | 'django.contrib.sessions',
115 | 'django.contrib.sites',
116 | 'django.contrib.messages',
117 | 'django.contrib.staticfiles',
118 | 'sandbox',
119 | 'dummyapp',
120 | 'django_rq',
121 | 'cacheback',
122 | )
123 |
124 | INTERNAL_IPS = ('10.0.2.2',)
125 |
126 | # A sample logging configuration. The only tangible logging
127 | # performed by this configuration is to send an email to
128 | # the site admins on every HTTP 500 error when DEBUG=False.
129 | # See http://docs.djangoproject.com/en/dev/topics/logging for
130 | # more details on how to customize your logging configuration.
131 | LOGGING = {
132 | 'version': 1,
133 | 'disable_existing_loggers': False,
134 | 'filters': {
135 | 'require_debug_false': {
136 | '()': 'django.utils.log.RequireDebugFalse'
137 | }
138 | },
139 | 'handlers': {
140 | 'mail_admins': {
141 | 'level': 'ERROR',
142 | 'filters': ['require_debug_false'],
143 | 'class': 'django.utils.log.AdminEmailHandler'
144 | },
145 | 'console': {
146 | 'level': 'DEBUG',
147 | 'class': 'logging.StreamHandler',
148 | }
149 | },
150 | 'loggers': {
151 | 'django.request': {
152 | 'handlers': ['mail_admins'],
153 | 'level': 'ERROR',
154 | 'propagate': True,
155 | },
156 | 'cacheback': {
157 | 'handlers': ['console'],
158 | 'level': 'DEBUG',
159 | 'propagate': False,
160 | },
161 | }
162 | }
163 |
164 | # CACHEBACK SETTINGS
165 |
166 | CELERY_BROKER_URL = 'redis://localhost:6379/0'
167 | CELERY_RESULT_BACKEND = 'redis://localhost:6379/0'
168 | CELERY_TASK_SERIALIZER = 'json'
169 |
170 | RQ_QUEUES = {
171 | 'default': {
172 | 'HOST': 'localhost',
173 | 'PORT': 6379,
174 | 'DB': 0,
175 | },
176 | }
177 |
178 | CACHES = {
179 | 'default': {
180 | 'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache',
181 | 'LOCATION': '127.0.0.1:11211',
182 | }
183 | }
184 |
185 | CACHEBACK_TASK_QUEUE = dict([(q, q) for q in ('celery', 'rq')]).get(
186 | os.environ.get('QUEUE', ''), 'celery')
187 |
--------------------------------------------------------------------------------
/docs/usage.rst:
--------------------------------------------------------------------------------
1 | ============
2 | Sample usage
3 | ============
4 |
5 | As a decorator
6 | ~~~~~~~~~~~~~~
7 |
8 | Simply wrap the function whose results you want to cache::
9 |
10 | import requests
11 | from cacheback.decorators import cacheback
12 |
13 | @cacheback()
14 | def fetch_tweets(username):
15 | url = "https://twitter.com/statuses/user_timeline.json?screen_name=%s"
16 | return requests.get(url % username).json
17 |
18 | The default behaviour of the ``cacheback`` decorator is to:
19 |
20 | * Cache items for 10 minutes.
21 |
22 | * When the cache is empty for a given key, the data will be fetched
23 | synchronously.
24 |
25 | You can parameterise the decorator to cache items for longer and also to not block on a
26 | cache miss::
27 |
28 | import requests
29 | from cacheback.decorators import cacheback
30 |
31 | @cacheback(lifetime=1200, fetch_on_miss=False)
32 | def fetch_tweets(username):
33 | url = "https://twitter.com/statuses/user_timeline.json?screen_name=%s"
34 | return requests.get(url % username).json
35 |
36 | Now:
37 |
38 | * Items will be cached for 20 minutes;
39 |
40 | * For a cache miss, ``None`` will be returned and the cache refreshed
41 | asynchronously.
42 |
43 | As an instance of ``cacheback.Job``
44 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
45 |
46 | Subclassing ``cacheback.Job`` gives you complete control over the caching
47 | behaviour. The only method that must be overridden is ``fetch`` which is
48 | responsible for fetching the data to be cached::
49 |
50 | import requests
51 | from cacheback.base import Job
52 |
53 | class UserTweets(Job):
54 |
55 | def fetch(self, username):
56 | url = "https://twitter.com/statuses/user_timeline.json?screen_name=%s"
57 | return requests.get(url % username).json
58 |
59 | Client code only needs to be aware of the ``get`` method which returns the
60 | cached data. For example::
61 |
62 | from django.shortcuts import render
63 |
64 | def tweets(request, username):
65 | return render(request,
66 | 'tweets.html',
67 | {'tweets': UserTweets().get(username)})
68 |
69 | You can control the lifetime and behaviour on cache miss using either class
70 | attributes::
71 |
72 | import requests
73 | from cacheback.base import Job
74 |
75 | class UserTweets(Job):
76 | lifetime = 60*20
77 | fetch_on_miss = False
78 |
79 | def fetch(self, username):
80 | url = "https://twitter.com/statuses/user_timeline.json?screen_name=%s"
81 | return requests.get(url % username).json
82 |
83 | or by overriding methods::
84 |
85 | import time
86 | import requests
87 | from cacheback.base import Job
88 |
89 | class UserTweets(Job):
90 |
91 | def fetch(self, username):
92 | url = "https://twitter.com/statuses/user_timeline.json?screen_name=%s"
93 | return requests.get(url % username).json
94 |
95 | def expiry(self, username):
96 | now = time.time()
97 | if username.startswith(a):
98 | return now + 60*20
99 | return now + 60*10
100 |
101 | def should_item_be_fetched_synchronously(self, username):
102 | return username.startswith(a)
103 |
104 | In the above toy example, the cache behaviour will be different for usernames
105 | starting with 'a'.
106 |
107 | Invalidation
108 | ~~~~~~~~~~~~
109 |
110 | If you want to programmatically invalidate a cached item, use the ``invalidate``
111 | method on a job instance::
112 |
113 | job = UserTweets()
114 | job.invalidate(username)
115 |
116 | This will trigger a new asynchronous refresh of the item.
117 |
118 | You can also simply remove an item from the cache so that the next request will
119 | trigger the refresh::
120 |
121 | job.delete(username)
122 |
123 | Setting cache values
124 | ~~~~~~~~~~~~~~~~~~~~
125 |
126 | If you want to update the cache programmatically use the ``set`` method on
127 | a job instance (this can be useful when your program can discover updates through a
128 | separate mechanism for example, or for caching partial or derived data)::
129 |
130 | tweets_job = UserTweets()
131 |
132 | user_tweets = tweets_job.get(username)
133 |
134 | new_tweet = PostTweet(username, 'Trying out Cacheback!')
135 |
136 | # Naive example, assuming no other process would have updated the tweets
137 | tweets_job.set(username, user_tweets + [new_tweet])
138 |
139 | The data to be cached can be specified in a few ways. Firstly it can be the last
140 | positional argument, as above. If that is unclear, you can also use the keyword ``data``::
141 |
142 | tweets_job.set(username, data=(current_tweets + [new_tweet]))
143 |
144 | And if your cache method already uses a keyword argument called ``data`` you can specify
145 | the name of a different parameter as a class variable called ``set_data_kwarg``::
146 |
147 | class CustomKwUserTweets(UserTweets):
148 | set_data_kwarg = 'my_cache_data'
149 |
150 | custom_tweets_job = CustomKwUserTweets()
151 |
152 | custom_tweets_job.set(username, my_cache_data=(user_tweets + [new_tweet]))
153 |
154 | This also works with a decorated function::
155 |
156 | @cacheback()
157 | def fetch_tweets(username):
158 | url = "https://twitter.com/statuses/user_timeline.json?screen_name=%s"
159 | return requests.get(url % username).json
160 |
161 | user_tweets = fetch_tweets(username)
162 |
163 | new_tweet = PostTweet(username, 'Trying out Cacheback!')
164 |
165 | fetch_tweets.job.set(fetch_tweets, username, (user_tweets + [new_tweet])))
166 |
167 | or::
168 |
169 | fetch_tweets.job.set(fetch_tweets, username, data=(current_tweets + [new_tweet])))
170 |
171 | And you can specify the ``set_data_kwarg`` in the decorator params as you'd expect::
172 |
173 | @cacheback(set_data_kwarg='my_cache_data')
174 | def fetch_tweets(username):
175 | url = "https://twitter.com/statuses/user_timeline.json?screen_name=%s"
176 | return requests.get(url % username).json
177 |
178 | fetch_tweets.job.set(fetch_tweets, username, my_cache_data=(user_tweets + [new_tweet])))
179 |
180 | **NOTE:** If your ``fetch`` method, or cacheback-decorated function takes a named parameter
181 | of ``data`` and you wish to use the ``set`` method, you **must** provide a new value for the
182 | ``set_data_kwarg`` parameter, and not pass in the data to cache as the last positional argument.
183 | Otherwise the value of the ``data`` parameter will be used as the data to cache.
184 |
185 |
186 | Checking what's in the cache
187 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~
188 | On occasion you may wish to check exactly what Cacheback has stored in the cache without triggering a refresh — this is ususally useful for seeing if values have updated since the last time they were retrieved. The ``raw_get`` method allows you to do that, and uses the same semantics as ``get``, ``set``, etc. It returns the value that's actually stored in the cache, i.e., the ``(expiry, data)`` tuple, or ``None`` if no value has yet been set::
189 |
190 |
191 | # Don't want to trigger a refetch at this point
192 | raw_cache_value = fetch_tweets.job.raw_get(fetch_tweets, username)
193 |
194 | if raw_cache_value is not None:
195 | expiry, cached_tweets = raw_cache_value
196 |
197 |
198 | Post-processing
199 | ~~~~~~~~~~~~~~~
200 |
201 | The ``cacheback.Job`` instance provides a `process_result` method that can be
202 | overridden to modify the result value being returned. You can use this to append
203 | information about whether the result is being returned from cache or not.
204 |
--------------------------------------------------------------------------------
/docs/index.rst:
--------------------------------------------------------------------------------
1 | ================
2 | Django Cacheback
3 | ================
4 |
5 | Cacheback is an extensible caching library that refreshes stale cache items
6 | asynchronously using a Celery_ or rq_ task (utilizing django-rq). The key
7 | idea being that it's better to serve a stale item (and populate the cache
8 | asynchronously) than block the response process in order to populate the cache
9 | synchronously.
10 |
11 | .. _Celery: http://celeryproject.org/
12 | .. _rq: http://python-rq.org/
13 |
14 | Using this library, you can rework your views so that all reads are from
15 | cache - which can be a significant performance boost.
16 |
17 | A corollary of this technique is that cache stampedes can be easily avoided,
18 | avoiding sudden surges of expensive reads when cached items becomes stale.
19 |
20 | Cacheback provides a decorator for simple usage, a subclassable base
21 | class for more fine-grained control and helper classes for working with
22 | querysets.
23 |
24 | Example
25 | =======
26 |
27 | Consider a view for showing a user's tweets:
28 |
29 | .. sourcecode:: python
30 |
31 | from django.shortcuts import render
32 | from myproject.twitter import fetch_tweets
33 |
34 | def show_tweets(request, username):
35 | return render(
36 | request,
37 | 'tweets.html',
38 | {'tweets': fetch_tweets(username)}
39 | )
40 |
41 | This works fine but the ``fetch_tweets`` function involves a HTTP round-trip and
42 | is slow.
43 |
44 | Performance can be improved by using Django's `low-level cache API`_:
45 |
46 | .. _`low-level cache API`: https://docs.djangoproject.com/en/dev/topics/cache/?from=olddocs#the-low-level-cache-api
47 |
48 | .. sourcecode:: python
49 |
50 | from django.shortcuts import render
51 | from django.cache import cache
52 | from myproject.twitter import fetch_tweets
53 |
54 | def show_tweets(request, username):
55 | return render(
56 | request,
57 | 'tweets.html',
58 | {'tweets': fetch_cached_tweets(username)}
59 | )
60 |
61 | def fetch_cached_tweets(username):
62 | tweets = cache.get(username)
63 | if tweets is None:
64 | tweets = fetch_tweets(username)
65 | cache.set(username, tweets, 60*15)
66 | return tweets
67 |
68 | Now tweets are cached for 15 minutes after they are first fetched, using the
69 | twitter username as a key. This is obviously a performance improvement but the
70 | shortcomings of this approach are:
71 |
72 | * For a cache miss, the tweets are fetched synchronously, blocking code execution
73 | and leading to a slow response time.
74 |
75 | * This in turn exposes the view to a '`cache stampede`_' where
76 | multiple expensive reads run simultaneously when the cached item expires.
77 | Under heavy load, this can bring your site down and make you sad.
78 |
79 | .. _`cache stampede`: http://en.wikipedia.org/wiki/Cache_stampede
80 |
81 | Now, consider an alternative implementation that uses a Celery task to repopulate the
82 | cache asynchronously instead of during the request/response cycle:
83 |
84 | .. sourcecode:: python
85 |
86 | import datetime
87 | from django.shortcuts import render
88 | from django.cache import cache
89 | from myproject.tasks import update_tweets
90 |
91 | def show_tweets(request, username):
92 | return render(
93 | request,
94 | 'tweets.html',
95 | {'tweets': fetch_cached_tweets(username)}
96 | )
97 |
98 | def fetch_cached_tweets(username):
99 | item = cache.get(username)
100 | if item is None:
101 | # Scenario 1: Cache miss - return empty result set and trigger a refresh
102 | update_tweets.delay(username, 60*15)
103 | return []
104 | tweets, expiry = item
105 | if expiry > datetime.datetime.now():
106 | # Scenario 2: Cached item is stale - return it but trigger a refresh
107 | update_tweets.delay(username, 60*15)
108 | return tweets
109 |
110 | where the ``myproject.tasks.update_tweets`` task is implemented as:
111 |
112 | .. sourcecode:: python
113 |
114 | import datetime
115 | from celery import task
116 | from django.cache import cache
117 | from myproject.twitter import fetch_tweets
118 |
119 | @task()
120 | def update_tweets(username, ttl):
121 | tweets = fetch_tweets(username)
122 | now = datetime.datetime.now()
123 | cache.set(username, (tweets, now+ttl), 2592000)
124 |
125 | Some things to note:
126 |
127 | * Items are stored in the cache as tuples ``(data, expiry_timestamp)`` using
128 | Memcache's maximum expiry setting (2592000 seconds). By using this value, we
129 | are effectively bypassing memcache's replacement policy in favour of our own.
130 |
131 | * As the comments indicate, there are two scenarios to consider:
132 |
133 | 1. Cache miss. In this case, we don't have any data (stale or otherwise) to
134 | return. In the example above, we trigger an asynchronous refresh and
135 | return an empty result set. In other scenarios, it may make sense to
136 | perform a synchronous refresh.
137 |
138 | 2. Cache hit but with stale data. Here we return the stale data but trigger
139 | a Celery task to refresh the cached item.
140 |
141 | This pattern of re-populating the cache asynchronously works well. Indeed, it
142 | is the basis for the cacheback library.
143 |
144 | Here's the same functionality implemented using a django-cacheback decorator:
145 |
146 | .. sourcecode:: python
147 |
148 | from django.shortcuts import render
149 | from django.cache import cache
150 | from myproject.twitter import fetch_tweets
151 | from cacheback.decorators import cacheback
152 |
153 | def show_tweets(request, username):
154 | return render(
155 | request,
156 | 'tweets.html',
157 | {'tweets': cacheback(60*15, fetch_on_miss=False)(fetch_tweets)(username)}
158 | )
159 |
160 | Here the decorator simply wraps the ``fetch_tweets`` function - nothing else is
161 | needed. Cacheback ships with a flexible Celery task that can run any function
162 | asynchronously.
163 |
164 | To be clear, the behaviour of this implementation is as follows:
165 |
166 | * The first request for a particular user's tweets will be a cache miss. The
167 | default behaviour of Cacheback is to fetch the data synchronously in this
168 | situation, but by passing ``fetch_on_miss=False``, we indicate that it's ok
169 | to return ``None`` in this situation and to trigger an asynchronous refresh.
170 |
171 | * A Celery worker will pick up the job to refresh the cache for this user's
172 | tweets. If will import the ``fetch_tweets`` function and execute it with the
173 | correct username. The resulting data will be added to the cache with a
174 | lifetime of 15 minutes.
175 |
176 | * Any requests for this user's tweets during the period that Celery is
177 | refreshing the cache will also return ``None``. However Cacheback is aware of
178 | cache stampedes and does not trigger any additional jobs for refreshing the
179 | cached item.
180 |
181 | * Once the cached item is refreshed, any subsequent requests within the next 15
182 | minutes will be served from cache.
183 |
184 | * The first request after 15 minutes has elapsed will serve the (now-stale)
185 | cache result but will trigger a Celery task to fetch the user's tweets and
186 | repopulate the cache.
187 |
188 | Much of this behaviour can be configured by using a subclass of
189 | ``cacheback.Job``. The decorator is only intended for simple use-cases. See
190 | the :doc:`usage` and :doc:`api` documentation for more information.
191 |
192 | All of the worker related things above an also be done using rq instead of
193 | Celery.
194 |
195 | Contents
196 | ========
197 |
198 | .. toctree::
199 | :maxdepth: 2
200 | :glob:
201 |
202 | installation
203 | usage
204 | api
205 | settings
206 | advanced
207 | contributing
208 |
209 |
210 |
211 | Indices and tables
212 | ==================
213 |
214 | * :ref:`genindex`
215 | * :ref:`modindex`
216 | * :ref:`search`
217 |
--------------------------------------------------------------------------------
/docs/conf.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | #
3 | # django-cacheback documentation build configuration file, created by
4 | # sphinx-quickstart on Mon Jul 30 21:40:46 2012.
5 | #
6 | # This file is execfile()d with the current directory set to its containing dir.
7 | #
8 | # Note that not all possible configuration values are present in this
9 | # autogenerated file.
10 | #
11 | # All configuration values have a default; values that are commented out
12 | # serve to show the default.
13 |
14 | import sys, os
15 | code_dir = os.path.realpath(
16 | os.path.join(os.path.dirname(__file__), '..'))
17 | sys.path.append(code_dir)
18 |
19 | from django.conf import settings
20 | if not settings.configured:
21 | settings.configure(
22 | DATABASES={ 'default': {'ENGINE': 'django.db.backends.sqlite3'}},
23 | RQ_QUEUES={},
24 | )
25 |
26 | # If extensions (or modules to document with autodoc) are in another directory,
27 | # add these directories to sys.path here. If the directory is relative to the
28 | # documentation root, use os.path.abspath to make it absolute, like shown here.
29 | #sys.path.insert(0, os.path.abspath('.'))
30 |
31 | # -- General configuration -----------------------------------------------------
32 |
33 | # If your documentation needs a minimal Sphinx version, state it here.
34 | #needs_sphinx = '1.0'
35 |
36 | # Add any Sphinx extension module names here, as strings. They can be extensions
37 | # coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
38 | extensions = ['sphinx.ext.autodoc']
39 |
40 | # Add any paths that contain templates here, relative to this directory.
41 | templates_path = ['_templates']
42 |
43 | # The suffix of source filenames.
44 | source_suffix = '.rst'
45 |
46 | # The encoding of source files.
47 | #source_encoding = 'utf-8-sig'
48 |
49 | # The master toctree document.
50 | master_doc = 'index'
51 |
52 | # General information about the project.
53 | project = u'django-cacheback'
54 | copyright = u'2012, David Winterbottom'
55 |
56 | # The version info for the project you're documenting, acts as replacement for
57 | # |version| and |release|, also used in various other places throughout the
58 | # built documents.
59 | #
60 | # The short X.Y version.
61 | import cacheback
62 | version = cacheback.__version__
63 | # The full version, including alpha/beta/rc tags.
64 | release = cacheback.__version__
65 |
66 | # The language for content autogenerated by Sphinx. Refer to documentation
67 | # for a list of supported languages.
68 | #language = None
69 |
70 | # There are two options for replacing |today|: either, you set today to some
71 | # non-false value, then it is used:
72 | #today = ''
73 | # Else, today_fmt is used as the format for a strftime call.
74 | #today_fmt = '%B %d, %Y'
75 |
76 | # List of patterns, relative to source directory, that match files and
77 | # directories to ignore when looking for source files.
78 | exclude_patterns = ['_build']
79 |
80 | # The reST default role (used for this markup: `text`) to use for all documents.
81 | #default_role = None
82 |
83 | # If true, '()' will be appended to :func: etc. cross-reference text.
84 | #add_function_parentheses = True
85 |
86 | # If true, the current module name will be prepended to all description
87 | # unit titles (such as .. function::).
88 | #add_module_names = True
89 |
90 | # If true, sectionauthor and moduleauthor directives will be shown in the
91 | # output. They are ignored by default.
92 | #show_authors = False
93 |
94 | # The name of the Pygments (syntax highlighting) style to use.
95 | pygments_style = 'sphinx'
96 |
97 | # A list of ignored prefixes for module index sorting.
98 | #modindex_common_prefix = []
99 |
100 |
101 | # -- Options for HTML output ---------------------------------------------------
102 |
103 | # The theme to use for HTML and HTML Help pages. See the documentation for
104 | # a list of builtin themes.
105 | html_theme = 'default'
106 |
107 | # Theme options are theme-specific and customize the look and feel of a theme
108 | # further. For a list of options available for each theme, see the
109 | # documentation.
110 | #html_theme_options = {}
111 |
112 | # Add any paths that contain custom themes here, relative to this directory.
113 | #html_theme_path = []
114 |
115 | # The name for this set of Sphinx documents. If None, it defaults to
116 | # " v documentation".
117 | #html_title = None
118 |
119 | # A shorter title for the navigation bar. Default is the same as html_title.
120 | #html_short_title = None
121 |
122 | # The name of an image file (relative to this directory) to place at the top
123 | # of the sidebar.
124 | #html_logo = None
125 |
126 | # The name of an image file (within the static path) to use as favicon of the
127 | # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
128 | # pixels large.
129 | #html_favicon = None
130 |
131 | # Add any paths that contain custom static files (such as style sheets) here,
132 | # relative to this directory. They are copied after the builtin static files,
133 | # so a file named "default.css" will overwrite the builtin "default.css".
134 | # html_static_path = ['_static']
135 |
136 | # If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
137 | # using the given strftime format.
138 | #html_last_updated_fmt = '%b %d, %Y'
139 |
140 | # If true, SmartyPants will be used to convert quotes and dashes to
141 | # typographically correct entities.
142 | #html_use_smartypants = True
143 |
144 | # Custom sidebar templates, maps document names to template names.
145 | #html_sidebars = {}
146 |
147 | # Additional templates that should be rendered to pages, maps page names to
148 | # template names.
149 | #html_additional_pages = {}
150 |
151 | # If false, no module index is generated.
152 | #html_domain_indices = True
153 |
154 | # If false, no index is generated.
155 | #html_use_index = True
156 |
157 | # If true, the index is split into individual pages for each letter.
158 | #html_split_index = False
159 |
160 | # If true, links to the reST sources are added to the pages.
161 | #html_show_sourcelink = True
162 |
163 | # If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
164 | #html_show_sphinx = True
165 |
166 | # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
167 | #html_show_copyright = True
168 |
169 | # If true, an OpenSearch description file will be output, and all pages will
170 | # contain a tag referring to it. The value of this option must be the
171 | # base URL from which the finished HTML is served.
172 | #html_use_opensearch = ''
173 |
174 | # This is the file name suffix for HTML files (e.g. ".xhtml").
175 | #html_file_suffix = None
176 |
177 | # Output file base name for HTML help builder.
178 | htmlhelp_basename = 'django-cacheback'
179 |
180 |
181 | # -- Options for LaTeX output --------------------------------------------------
182 |
183 | latex_elements = {
184 | # The paper size ('letterpaper' or 'a4paper').
185 | #'papersize': 'letterpaper',
186 |
187 | # The font size ('10pt', '11pt' or '12pt').
188 | #'pointsize': '10pt',
189 |
190 | # Additional stuff for the LaTeX preamble.
191 | #'preamble': '',
192 | }
193 |
194 | # Grouping the document tree into LaTeX files. List of tuples
195 | # (source start file, target name, title, author, documentclass [howto/manual]).
196 | latex_documents = [
197 | ('index', 'django-cacheback', u'django-cacheback Documentation',
198 | u'David Winterbottom', 'manual'),
199 | ]
200 |
201 | # The name of an image file (relative to this directory) to place at the top of
202 | # the title page.
203 | #latex_logo = None
204 |
205 | # For "manual" documents, if this is true, then toplevel headings are parts,
206 | # not chapters.
207 | #latex_use_parts = False
208 |
209 | # If true, show page references after internal links.
210 | #latex_show_pagerefs = False
211 |
212 | # If true, show URL addresses after external links.
213 | #latex_show_urls = False
214 |
215 | # Documents to append as an appendix to all manuals.
216 | #latex_appendices = []
217 |
218 | # If false, no module index is generated.
219 | #latex_domain_indices = True
220 |
221 |
222 | # -- Options for manual page output --------------------------------------------
223 |
224 | # One entry per manual page. List of tuples
225 | # (source start file, name, description, authors, manual section).
226 | man_pages = [
227 | ('index', 'django-cacheback', u'django-cacheback Documentation',
228 | [u'David Winterbottom'], 1)
229 | ]
230 |
231 | # If true, show URL addresses after external links.
232 | #man_show_urls = False
233 |
234 |
235 | # -- Options for Texinfo output ------------------------------------------------
236 |
237 | # Grouping the document tree into Texinfo files. List of tuples
238 | # (source start file, target name, title, author,
239 | # dir menu entry, description, category)
240 | texinfo_documents = [
241 | ('index', 'django-cacheback', u'django-cacheback Documentation',
242 | u'David Winterbottom', 'django-cacheback', 'One line description of project.',
243 | 'Miscellaneous'),
244 | ]
245 |
246 | # Documents to append as an appendix to all manuals.
247 | #texinfo_appendices = []
248 |
249 | # If false, no module index is generated.
250 | #texinfo_domain_indices = True
251 |
252 | # How to display URL addresses: 'footnote', 'no', or 'inline'.
253 | #texinfo_show_urls = 'footnote'
254 |
--------------------------------------------------------------------------------
/tests/test_base_job.py:
--------------------------------------------------------------------------------
1 | from unittest import mock
2 |
3 | import pytest
4 | from django.core.cache import cache, caches
5 | from django.core.cache.backends.base import BaseCache
6 | from django.utils import timezone
7 | from freezegun import freeze_time
8 |
9 | from cacheback.base import Job
10 | from tests.dummyapp.models import DummyModel
11 |
12 |
13 | class DummyJob(Job):
14 | def fetch(self, param):
15 | return ('JOB-EXECUTED:{0}'.format(param), timezone.now())
16 |
17 |
18 | class CacheAliasDummyJob(DummyJob):
19 | cache_alias = 'secondary'
20 |
21 |
22 | class EmptyDummyJob(DummyJob):
23 | fetch_on_miss = False
24 |
25 |
26 | class StaleDummyJob(DummyJob):
27 | fetch_on_stale_threshold = 900
28 |
29 |
30 | class FailJob(Job):
31 | def fetch(self):
32 | raise Exception('JOB-FAILED')
33 |
34 |
35 | class CustomPayloadLabelJob(Job):
36 | set_data_kwarg = 'my_cache_data'
37 |
38 |
39 | @pytest.mark.usefixtures('cleared_cache', scope='function')
40 | class TestJob:
41 | def test_init(self):
42 | job = DummyJob()
43 | assert isinstance(job.cache, BaseCache)
44 | assert job.task_options == {}
45 |
46 | def test_get_miss_sync(self):
47 | assert DummyJob().get('foo')[0] == 'JOB-EXECUTED:foo'
48 |
49 | def test_get_uses_cache_alias(self):
50 | job = CacheAliasDummyJob()
51 | assert job.get('foo')[0] == 'JOB-EXECUTED:foo'
52 | assert job.key('foo') not in cache
53 | assert job.key('foo') in caches[CacheAliasDummyJob.cache_alias]
54 |
55 | @pytest.mark.redis_required
56 | def test_get_miss_empty_async(self, rq_burst):
57 | job = EmptyDummyJob()
58 | assert job.get('foo') is None
59 | rq_burst()
60 | assert job.get('foo')[0] == 'JOB-EXECUTED:foo'
61 |
62 | def test_get_stale_sync(self):
63 | job = StaleDummyJob()
64 | with freeze_time('2016-03-20 14:00'):
65 | first_result = job.get('foo')
66 |
67 | with freeze_time('2016-03-20 14:16'):
68 | second_result = job.get('foo')
69 |
70 | assert first_result[1] < second_result[1]
71 |
72 | @pytest.mark.redis_required
73 | def test_get_stale_async(self, rq_burst):
74 | job = StaleDummyJob()
75 | with freeze_time('2016-03-20 14:00'):
76 | first_result = job.get('foo')
77 |
78 | with freeze_time('2016-03-20 14:14'):
79 | second_result = job.get('foo')
80 |
81 | assert first_result[1] == second_result[1]
82 |
83 | with freeze_time('2016-03-20 14:16'):
84 | rq_burst()
85 |
86 | with freeze_time('2016-03-20 14:17'):
87 | third_result = job.get('foo')
88 |
89 | assert second_result[1] < third_result[1]
90 |
91 | @pytest.mark.redis_required
92 | def test_get_hit(self, rq_worker):
93 | job = StaleDummyJob()
94 | with freeze_time('2016-03-20 14:00'):
95 | first_result = job.get('foo')
96 |
97 | with freeze_time('2016-03-20 14:05'):
98 | second_result = job.get('foo')
99 |
100 | assert first_result[1] == second_result[1]
101 |
102 | # Check if a task was inserted.
103 | assert len(rq_worker.queues[0].jobs) == 0
104 |
105 | @pytest.mark.redis_required
106 | def test_invalidate_miss(self, rq_worker):
107 | DummyJob().invalidate('foo')
108 | # There was no cached item, nothing todo.
109 | assert len(rq_worker.queues[0].jobs) == 0
110 |
111 | @pytest.mark.redis_required
112 | def test_invalidate_hit(self, rq_worker):
113 | job = DummyJob()
114 | job.refresh('foo')
115 | job.invalidate('foo')
116 | assert len(rq_worker.queues[0].jobs) == 1
117 |
118 | def test_delete_miss(self):
119 | job = DummyJob()
120 | job.delete('foo')
121 | assert job.key('foo') not in job.cache
122 |
123 | def test_delete_hit(self):
124 | job = DummyJob()
125 | job.refresh('foo')
126 | assert job.key('foo') in job.cache
127 | job.delete('foo')
128 | assert job.key('foo') not in job.cache
129 |
130 | def test_store(self):
131 | job = DummyJob()
132 | job.store(job.key('foo'), job.expiry(), True)
133 | assert job.key('foo') in job.cache
134 |
135 | def test_store_verify_fail(self, settings):
136 | settings.CACHEBACK_CACHE_ALIAS = 'dummy'
137 | settings.CACHEBACK_VERIFY_CACHE_WRITE = True
138 |
139 | job = DummyJob()
140 |
141 | with pytest.raises(RuntimeError) as exc:
142 | job.store(job.key('foo'), job.expiry(), True)
143 |
144 | assert 'Unable to save' in str(exc.value)
145 |
146 | def test_store_no_verify_fail(self, settings):
147 | settings.CACHEBACK_CACHE_ALIAS = 'dummy'
148 | settings.CACHEBACK_VERIFY_CACHE_WRITE = False
149 |
150 | job = DummyJob()
151 | job.store(job.key('foo'), job.expiry(), True)
152 | assert job.key('foo') not in job.cache
153 |
154 | def test_refresh(self):
155 | job = DummyJob()
156 | result = job.refresh('foo')
157 | assert result[0] == 'JOB-EXECUTED:foo'
158 | assert job.key('foo') in job.cache
159 |
160 | @pytest.mark.redis_required
161 | def test_async_refresh(self, rq_worker):
162 | job = DummyJob()
163 | job.async_refresh('foo')
164 | assert job.key('foo') not in job.cache
165 | assert len(rq_worker.queues[0].jobs) == 1
166 |
167 | @mock.patch('cacheback.base.enqueue_task')
168 | def test_async_refresh_task_fail(self, enqueue_mock):
169 | enqueue_mock.side_effect = Exception
170 | job = DummyJob()
171 | job.async_refresh('foo')
172 | assert job.get('foo')[0] == 'JOB-EXECUTED:foo'
173 |
174 | @mock.patch('cacheback.base.enqueue_task')
175 | @mock.patch('cacheback.base.Job.refresh')
176 | def test_async_refresh_task_fail_sync_fail(self, refresh_mock, enqueue_mock):
177 | refresh_mock.side_effect = Exception
178 | enqueue_mock.side_effect = Exception
179 | job = DummyJob()
180 | job.async_refresh('foo')
181 | assert job.key('foo') not in job.cache
182 |
183 | def test_expiry(self):
184 | with freeze_time('2016-03-20 14:05'):
185 | job = DummyJob()
186 | assert job.expiry() == 1458483300
187 |
188 | def test_timeout(self):
189 | with freeze_time('2016-03-20 14:05'):
190 | job = DummyJob()
191 | assert job.timeout() == 1458482760
192 |
193 | def test_should_stale_item_be_fetched_synchronously_no_threshold(self):
194 | assert DummyJob().should_stale_item_be_fetched_synchronously(0) is False
195 |
196 | def test_should_stale_item_be_fetched_synchronously_reached(self):
197 | assert StaleDummyJob().should_stale_item_be_fetched_synchronously(301) is True
198 |
199 | def test_should_stale_item_be_fetched_synchronously_not_reached(self):
200 | assert StaleDummyJob().should_stale_item_be_fetched_synchronously(300) is False
201 |
202 | def test_key_no_args_no_kwargs(self):
203 | assert DummyJob().key() == 'tests.test_base_job.DummyJob'
204 |
205 | def test_key_args_no_kwargs(self):
206 | assert DummyJob().key(1, 2, 3) == (
207 | 'tests.test_base_job.DummyJob:7b6e2994f12a7e000c01190edec1921e'
208 | )
209 |
210 | def test_key_no_args_kwargs(self):
211 | assert DummyJob().key(foo='bar') == (
212 | 'tests.test_base_job.DummyJob:d41d8cd98f00b204e9800998ecf8427e:'
213 | 'acbd18db4cc2f85cedef654fccc4a4d8:37b51d194a7513e45b56f6524f2d51f2'
214 | )
215 |
216 | def test_key_args_kwargs(self):
217 | assert DummyJob().key(1, 2, foo='bar', bar='baz') == (
218 | 'tests.test_base_job.DummyJob:def474a313bffa002eae8941b2e12620:'
219 | '8856328b99ee7881e9bf7205296e056d:c9ebc77141c29f6d619cf8498631343d'
220 | )
221 |
222 | def test_raw_get(self):
223 | job = DummyJob()
224 | with freeze_time('2016-03-20 14:05'):
225 |
226 | job.set('foo', 'MANUALLY_SET')
227 |
228 | expiry, value = job.raw_get('foo')
229 |
230 | assert expiry == float(1458483300)
231 | assert value == 'MANUALLY_SET'
232 |
233 | def test_raw_get_empty(self):
234 | job = DummyJob()
235 |
236 | assert job.raw_get() is None
237 |
238 | def test_set(self):
239 | job = DummyJob()
240 | job.set('foo', 'MANUALLY_SET')
241 |
242 | assert job.get('foo') == 'MANUALLY_SET'
243 |
244 | def test_set_preset_cache(self):
245 | job = DummyJob()
246 | assert job.get('foo')[0] == 'JOB-EXECUTED:foo'
247 |
248 | # It is cached
249 | assert job.key('foo') in job.cache
250 |
251 | job.set('foo', 'MANUALLY_SET')
252 |
253 | assert job.get('foo') == 'MANUALLY_SET'
254 |
255 | def test_set_default_kw_arg(self):
256 |
257 | job = DummyJob()
258 | job.set('foo', data='MANUALLY_SET_WITH_KW_ARG')
259 |
260 | assert job.get('foo') == 'MANUALLY_SET_WITH_KW_ARG'
261 |
262 | def test_set_default_custom_kw_arg(self):
263 |
264 | job = CustomPayloadLabelJob()
265 | job.set('foo', my_cache_data='MANUALLY_SET_WITH_CUSTOM_KW_ARG')
266 |
267 | assert job.get('foo') == 'MANUALLY_SET_WITH_CUSTOM_KW_ARG'
268 |
269 | @pytest.mark.django_db
270 | def test_key_django_model(self):
271 | alan = DummyModel.objects.create(name="Alan")
272 | john = DummyModel.objects.create(name="John")
273 | assert (
274 | DummyJob().key(alan)
275 | == 'tests.test_base_job.DummyJob:9df82067f944cc95795bc89ec0aa65df'
276 | )
277 | assert DummyJob().key(alan) != DummyJob().key(john)
278 |
279 | @mock.patch('cacheback.base.logger')
280 | def test_job_refresh_unkown_jobclass(self, logger_mock):
281 | Job.perform_async_refresh('foomodule.BarJob', (), {}, (), {})
282 | assert 'Unable to construct %s with' in (logger_mock.error.call_args[0][0])
283 | assert logger_mock.error.call_args[0][1] == 'foomodule.BarJob'
284 |
285 | @mock.patch('cacheback.base.logger')
286 | def test_job_refresh_perform_error(self, logger_mock):
287 | Job.perform_async_refresh('tests.test_base_job.FailJob', (), {}, (), {})
288 | assert 'Error running job' in (logger_mock.exception.call_args[0][0])
289 | assert isinstance(logger_mock.exception.call_args[0][1], Exception)
290 |
291 | def test_job_refresh(self):
292 | Job.perform_async_refresh('tests.test_base_job.EmptyDummyJob', (), {}, ('foo',), {})
293 | assert EmptyDummyJob().get('foo') is not None
294 |
--------------------------------------------------------------------------------
/cacheback/base.py:
--------------------------------------------------------------------------------
1 | import collections
2 | import hashlib
3 | import logging
4 | import time
5 |
6 | from django.conf import settings
7 | from django.core.cache import DEFAULT_CACHE_ALIAS, caches
8 | from django.db.models import Model as DjangoModel
9 | from django.utils.itercompat import is_iterable
10 |
11 | from .utils import enqueue_task, get_job_class
12 |
13 |
14 | logger = logging.getLogger('cacheback')
15 |
16 | MEMCACHE_MAX_EXPIRATION = 2592000
17 |
18 |
19 | # Container for call args (which makes things simpler to pass around)
20 | Call = collections.namedtuple("Call", ['args', 'kwargs'])
21 |
22 |
23 | def to_bytestring(value):
24 | """
25 | Encode an object as a UTF8 bytestring. This function could be passed a
26 | bytestring, unicode string or object so must distinguish between them.
27 |
28 | :param value: object we want to transform into a bytestring
29 | :returns: a bytestring
30 | """
31 | if isinstance(value, DjangoModel):
32 | return ('%s:%s' % (value.__class__, hash(value))).encode('utf-8')
33 | if isinstance(value, str):
34 | return value.encode('utf8')
35 | if isinstance(value, bytes):
36 | return value
37 | return bytes(str(value), 'utf8')
38 |
39 |
40 | class Job(object):
41 | """
42 | A cached read job.
43 |
44 | This is the core class for the package which is intended to be subclassed
45 | to allow the caching behaviour to be customised.
46 | """
47 |
48 | # All items are stored in memcache as a tuple (expiry, data). We don't use
49 | # the TTL functionality within memcache but implement on own. If the
50 | # expiry value is None, this indicates that there is already a job created
51 | # for refreshing this item.
52 |
53 | #: Default cache lifetime is 10 minutes. After this time, the result will
54 | #: be considered stale and requests will trigger a job to refresh it.
55 | lifetime = 600
56 |
57 | #: Timeout period during which no new tasks will be created for a
58 | #: single cache item. This time should cover the normal time required to
59 | #: refresh the cache.
60 | refresh_timeout = 60
61 |
62 | #: Secifies which cache to use from your `CACHES` setting. It defaults to
63 | #: `default`.
64 | cache_alias = None
65 |
66 | #: Time to store items in the cache. After this time, we will get a cache
67 | #: miss which can lead to synchronous refreshes if you have
68 | #: fetch_on_miss=True.
69 | cache_ttl = MEMCACHE_MAX_EXPIRATION
70 |
71 | #: Whether to perform a synchronous refresh when a result is missing from
72 | #: the cache. Default behaviour is to do a synchronous fetch when the cache is empty.
73 | #: Stale results are generally ok, but not no results.
74 | fetch_on_miss = True
75 |
76 | #: Whether to perform a synchronous refresh when a result is in the cache
77 | #: but stale from. Default behaviour is never to do a synchronous fetch but
78 | #: there will be times when an item is _too_ stale to be returned.
79 | fetch_on_stale_threshold = None
80 |
81 | #: parameter name to pass in the data which is to be cached in the set method. Data can
82 | #: also be passed as last positional argument in set method, but using a kw arg may be
83 | #: clearer or even necessary. Defaults to 'data'
84 | set_data_kwarg = 'data'
85 |
86 | #: Overrides options for `refresh_cache.apply_async` (e.g. `queue`).
87 | task_options = None
88 |
89 | #: Cache statuses
90 | MISS, HIT, STALE = range(3)
91 |
92 | @property
93 | def class_path(self):
94 | return '%s.%s' % (self.__module__, self.__class__.__name__)
95 |
96 | def __init__(self):
97 | self.cache_alias = self.cache_alias or getattr(
98 | settings, 'CACHEBACK_CACHE_ALIAS', DEFAULT_CACHE_ALIAS
99 | )
100 | self.task_options = self.task_options or {}
101 |
102 | @property
103 | def cache(self):
104 | return caches[self.cache_alias]
105 |
106 | def get_init_args(self):
107 | """
108 | Return the args that need to be passed to __init__ when
109 | reconstructing this class.
110 | """
111 | return ()
112 |
113 | def get_init_kwargs(self):
114 | """
115 | Return the kwargs that need to be passed to __init__ when
116 | reconstructing this class.
117 | """
118 | return {}
119 |
120 | # --------
121 | # MAIN API
122 | # --------
123 |
124 | def get(self, *raw_args, **raw_kwargs):
125 | """
126 | Return the data for this function (using the cache if possible).
127 |
128 | This method is not intended to be overidden
129 | """
130 | # We pass args and kwargs through a filter to allow them to be
131 | # converted into values that can be pickled.
132 | args = self.prepare_args(*raw_args)
133 | kwargs = self.prepare_kwargs(**raw_kwargs)
134 |
135 | # Build the cache key and attempt to fetch the cached item
136 | key = self.key(*args, **kwargs)
137 | item = self.cache.get(key)
138 |
139 | call = Call(args=raw_args, kwargs=raw_kwargs)
140 |
141 | if item is None:
142 | # Cache MISS - we can either:
143 | # a) fetch the data immediately, blocking execution until
144 | # the fetch has finished, or
145 | # b) trigger an async refresh and return an empty result
146 | if self.should_missing_item_be_fetched_synchronously(*args, **kwargs):
147 | logger.debug(
148 | ("Job %s with key '%s' - cache MISS - running " "synchronous refresh"),
149 | self.class_path,
150 | key,
151 | )
152 | result = self.refresh(*args, **kwargs)
153 | return self.process_result(
154 | result, call=call, cache_status=self.MISS, sync_fetch=True
155 | )
156 |
157 | else:
158 | logger.debug(
159 | (
160 | "Job %s with key '%s' - cache MISS - triggering "
161 | "async refresh and returning empty result"
162 | ),
163 | self.class_path,
164 | key,
165 | )
166 | # To avoid cache hammering (ie lots of identical tasks
167 | # to refresh the same cache item), we reset the cache with an
168 | # empty result which will be returned until the cache is
169 | # refreshed.
170 | result = self.empty()
171 | self.store(key, self.timeout(*args, **kwargs), result)
172 | self.async_refresh(*args, **kwargs)
173 | return self.process_result(
174 | result, call=call, cache_status=self.MISS, sync_fetch=False
175 | )
176 |
177 | expiry, data = item
178 | delta = time.time() - expiry
179 | if delta > 0:
180 | # Cache HIT but STALE expiry - we can either:
181 | # a) fetch the data immediately, blocking execution until
182 | # the fetch has finished, or
183 | # b) trigger a refresh but allow the stale result to be
184 | # returned this time. This is normally acceptable.
185 | if self.should_stale_item_be_fetched_synchronously(delta, *args, **kwargs):
186 | logger.debug(
187 | ("Job %s with key '%s' - STALE cache hit - running " "synchronous refresh"),
188 | self.class_path,
189 | key,
190 | )
191 | result = self.refresh(*args, **kwargs)
192 | return self.process_result(
193 | result, call=call, cache_status=self.STALE, sync_fetch=True
194 | )
195 |
196 | else:
197 | logger.debug(
198 | (
199 | "Job %s with key '%s' - STALE cache hit - triggering "
200 | "async refresh and returning stale result"
201 | ),
202 | self.class_path,
203 | key,
204 | )
205 | # We replace the item in the cache with a 'timeout' expiry - this
206 | # prevents cache hammering but guards against a 'limbo' situation
207 | # where the refresh task fails for some reason.
208 | timeout = self.timeout(*args, **kwargs)
209 | self.store(key, timeout, data)
210 | self.async_refresh(*args, **kwargs)
211 | return self.process_result(
212 | data, call=call, cache_status=self.STALE, sync_fetch=False
213 | )
214 | else:
215 | logger.debug("Job %s with key '%s' - cache HIT", self.class_path, key)
216 | return self.process_result(data, call=call, cache_status=self.HIT)
217 |
218 | def invalidate(self, *raw_args, **raw_kwargs):
219 | """
220 | Mark a cached item invalid and trigger an asynchronous
221 | job to refresh the cache
222 | """
223 | args = self.prepare_args(*raw_args)
224 | kwargs = self.prepare_kwargs(**raw_kwargs)
225 | key = self.key(*args, **kwargs)
226 | item = self.cache.get(key)
227 | if item is not None:
228 | expiry, data = item
229 | self.store(key, self.timeout(*args, **kwargs), data)
230 | self.async_refresh(*args, **kwargs)
231 |
232 | def delete(self, *raw_args, **raw_kwargs):
233 | """
234 | Remove an item from the cache
235 | """
236 | args = self.prepare_args(*raw_args)
237 | kwargs = self.prepare_kwargs(**raw_kwargs)
238 | key = self.key(*args, **kwargs)
239 | self.cache.delete(key)
240 |
241 | def raw_get(self, *raw_args, **raw_kwargs):
242 | """
243 | Retrieve the item (tuple of value and expiry) that is actually in the cache,
244 | without causing a refresh.
245 | """
246 |
247 | args = self.prepare_args(*raw_args)
248 | kwargs = self.prepare_kwargs(**raw_kwargs)
249 |
250 | key = self.key(*args, **kwargs)
251 |
252 | return self.cache.get(key)
253 |
254 | def set(self, *raw_args, **raw_kwargs):
255 | """
256 | Manually set the cache value with its appropriate expiry.
257 | """
258 | if self.set_data_kwarg in raw_kwargs:
259 | data = raw_kwargs.pop(self.set_data_kwarg)
260 | else:
261 | raw_args = list(raw_args)
262 | data = raw_args.pop()
263 |
264 | args = self.prepare_args(*raw_args)
265 | kwargs = self.prepare_kwargs(**raw_kwargs)
266 |
267 | key = self.key(*args, **kwargs)
268 |
269 | expiry = self.expiry(*args, **kwargs)
270 |
271 | logger.debug(
272 | "Setting %s cache with key '%s', args '%r', kwargs '%r', expiry '%r'",
273 | self.class_path,
274 | key,
275 | args,
276 | kwargs,
277 | expiry,
278 | )
279 |
280 | self.store(key, expiry, data)
281 |
282 | # --------------
283 | # HELPER METHODS
284 | # --------------
285 |
286 | def prepare_args(self, *args):
287 | return args
288 |
289 | def prepare_kwargs(self, **kwargs):
290 | return kwargs
291 |
292 | def store(self, key, expiry, data):
293 | """
294 | Add a result to the cache
295 |
296 | :key: Cache key to use
297 | :expiry: The expiry timestamp after which the result is stale
298 | :data: The data to cache
299 | """
300 | self.cache.set(key, (expiry, data), self.cache_ttl)
301 |
302 | if getattr(settings, 'CACHEBACK_VERIFY_CACHE_WRITE', True):
303 | # We verify that the item was cached correctly. This is to avoid a
304 | # Memcache problem where some values aren't cached correctly
305 | # without warning.
306 | __, cached_data = self.cache.get(key, (None, None))
307 | if data is not None and cached_data is None:
308 | raise RuntimeError("Unable to save data of type %s to cache" % (type(data)))
309 |
310 | def refresh(self, *args, **kwargs):
311 | """
312 | Fetch the result SYNCHRONOUSLY and populate the cache
313 | """
314 | result = self.fetch(*args, **kwargs)
315 | self.store(self.key(*args, **kwargs), self.expiry(*args, **kwargs), result)
316 | return result
317 |
318 | def async_refresh(self, *args, **kwargs):
319 | """
320 | Trigger an asynchronous job to refresh the cache
321 | """
322 | # We trigger the task with the class path to import as well as the
323 | # (a) args and kwargs for instantiating the class
324 | # (b) args and kwargs for calling the 'refresh' method
325 |
326 | try:
327 | enqueue_task(
328 | dict(
329 | klass_str=self.class_path,
330 | obj_args=self.get_init_args(),
331 | obj_kwargs=self.get_init_kwargs(),
332 | call_args=args,
333 | call_kwargs=kwargs,
334 | ),
335 | task_options=self.task_options,
336 | )
337 | except Exception:
338 | # Handle exceptions from talking to RabbitMQ - eg connection
339 | # refused. When this happens, we try to run the task
340 | # synchronously.
341 | logger.error(
342 | "Unable to trigger task asynchronously - failing "
343 | "over to synchronous refresh",
344 | exc_info=True,
345 | )
346 | try:
347 | return self.refresh(*args, **kwargs)
348 | except Exception as e:
349 | # Something went wrong while running the task
350 | logger.error("Unable to refresh data synchronously: %s", e, exc_info=True)
351 | else:
352 | logger.debug("Failover synchronous refresh completed successfully")
353 |
354 | # Override these methods
355 |
356 | def empty(self):
357 | """
358 | Return the appropriate value for a cache MISS (and when we defer the
359 | repopulation of the cache)
360 | """
361 | return None
362 |
363 | def expiry(self, *args, **kwargs):
364 | """
365 | Return the expiry timestamp for this item.
366 | """
367 | return time.time() + self.lifetime
368 |
369 | def timeout(self, *args, **kwargs):
370 | """
371 | Return the refresh timeout for this item
372 | """
373 | return time.time() + self.refresh_timeout
374 |
375 | def should_missing_item_be_fetched_synchronously(self, *args, **kwargs):
376 | """
377 | Return whether to refresh an item synchronously when it is missing from
378 | the cache
379 | """
380 | return self.fetch_on_miss
381 |
382 | def should_stale_item_be_fetched_synchronously(self, delta, *args, **kwargs):
383 | """
384 | Return whether to refresh an item synchronously when it is found in the
385 | cache but stale
386 | """
387 | if self.fetch_on_stale_threshold is None:
388 | return False
389 | return delta > (self.fetch_on_stale_threshold - self.lifetime)
390 |
391 | def key(self, *args, **kwargs):
392 | """
393 | Return the cache key to use.
394 |
395 | If you're passing anything but primitive types to the ``get`` method,
396 | it's likely that you'll need to override this method.
397 | """
398 | if not args and not kwargs:
399 | return self.class_path
400 | try:
401 | if args and not kwargs:
402 | return "%s:%s" % (self.class_path, self.hash(args))
403 | # The line might break if your passed values are un-hashable. If
404 | # it does, you need to override this method and implement your own
405 | # key algorithm.
406 | return "%s:%s:%s:%s" % (
407 | self.class_path,
408 | self.hash(args),
409 | self.hash([k for k in sorted(kwargs)]),
410 | self.hash([kwargs[k] for k in sorted(kwargs)]),
411 | )
412 | except TypeError:
413 | raise RuntimeError(
414 | "Unable to generate cache key due to unhashable"
415 | "args or kwargs - you need to implement your own"
416 | "key generation method to avoid this problem"
417 | )
418 |
419 | def hash(self, value):
420 | """
421 | Generate a hash of the given iterable.
422 |
423 | This is for use in a cache key.
424 | """
425 | if is_iterable(value):
426 | value = tuple(to_bytestring(v) for v in value)
427 | return hashlib.md5(b':'.join(value)).hexdigest()
428 |
429 | def fetch(self, *args, **kwargs):
430 | """
431 | Return the data for this job - this is where the expensive work should
432 | be done.
433 | """
434 | raise NotImplementedError()
435 |
436 | def process_result(self, result, call, cache_status, sync_fetch=None):
437 | """
438 | Transform the fetched data right before returning from .get(...)
439 |
440 | :param result: The result to be returned
441 | :param call: A named tuple with properties 'args' and 'kwargs that
442 | holds the call args and kwargs
443 | :param cache_status: A status integrer, accessible as class constants
444 | self.MISS, self.HIT, self.STALE
445 | :param sync_fetch: A boolean indicating whether a synchronous fetch was
446 | performed. A value of None indicates that no fetch
447 | was required (ie the result was a cache hit).
448 | """
449 | return result
450 |
451 | # --------------------
452 | # ASYNC HELPER METHODS
453 | # --------------------
454 |
455 | @classmethod
456 | def perform_async_refresh(cls, klass_str, obj_args, obj_kwargs, call_args, call_kwargs):
457 | """
458 | Re-populate cache using the given job class.
459 |
460 | The job class is instantiated with the passed constructor args and the
461 | refresh method is called with the passed call args. That is::
462 |
463 | data = klass(*obj_args, **obj_kwargs).refresh(
464 | *call_args, **call_kwargs)
465 |
466 | :klass_str: String repr of class (eg 'apps.twitter.jobs.FetchTweetsJob')
467 | :obj_args: Constructor args
468 | :obj_kwargs: Constructor kwargs
469 | :call_args: Refresh args
470 | :call_kwargs: Refresh kwargs
471 | """
472 | klass = get_job_class(klass_str)
473 | if klass is None:
474 | logger.error(
475 | "Unable to construct %s with args %r and kwargs %r",
476 | klass_str,
477 | obj_args,
478 | obj_kwargs,
479 | )
480 | return
481 |
482 | logger.info(
483 | "Using %s with constructor args %r and kwargs %r", klass_str, obj_args, obj_kwargs
484 | )
485 | logger.info("Calling refresh with args %r and kwargs %r", call_args, call_kwargs)
486 | start = time.time()
487 | try:
488 | klass(*obj_args, **obj_kwargs).refresh(*call_args, **call_kwargs)
489 | except Exception as e:
490 | logger.exception("Error running job: '%s'", e)
491 | else:
492 | duration = time.time() - start
493 | logger.info("Refreshed cache in %.6f seconds", duration)
494 |
--------------------------------------------------------------------------------
/sandbox/fixture.json:
--------------------------------------------------------------------------------
1 | [
2 | {
3 | "pk": 1,
4 | "model": "dummyapp.dummymodel",
5 | "fields": {
6 | "date_created": "2012-08-07T08:23:24.718Z",
7 | "name": "Item_0"
8 | }
9 | },
10 | {
11 | "pk": 2,
12 | "model": "dummyapp.dummymodel",
13 | "fields": {
14 | "date_created": "2012-08-07T08:23:24.728Z",
15 | "name": "Item_1"
16 | }
17 | },
18 | {
19 | "pk": 3,
20 | "model": "dummyapp.dummymodel",
21 | "fields": {
22 | "date_created": "2012-08-07T08:23:24.731Z",
23 | "name": "Item_2"
24 | }
25 | },
26 | {
27 | "pk": 4,
28 | "model": "dummyapp.dummymodel",
29 | "fields": {
30 | "date_created": "2012-08-07T08:23:24.735Z",
31 | "name": "Item_3"
32 | }
33 | },
34 | {
35 | "pk": 5,
36 | "model": "dummyapp.dummymodel",
37 | "fields": {
38 | "date_created": "2012-08-07T08:23:24.738Z",
39 | "name": "Item_4"
40 | }
41 | },
42 | {
43 | "pk": 6,
44 | "model": "dummyapp.dummymodel",
45 | "fields": {
46 | "date_created": "2012-08-07T08:23:24.741Z",
47 | "name": "Item_5"
48 | }
49 | },
50 | {
51 | "pk": 7,
52 | "model": "dummyapp.dummymodel",
53 | "fields": {
54 | "date_created": "2012-08-07T08:23:24.743Z",
55 | "name": "Item_6"
56 | }
57 | },
58 | {
59 | "pk": 8,
60 | "model": "dummyapp.dummymodel",
61 | "fields": {
62 | "date_created": "2012-08-07T08:23:24.746Z",
63 | "name": "Item_7"
64 | }
65 | },
66 | {
67 | "pk": 9,
68 | "model": "dummyapp.dummymodel",
69 | "fields": {
70 | "date_created": "2012-08-07T08:23:24.749Z",
71 | "name": "Item_8"
72 | }
73 | },
74 | {
75 | "pk": 10,
76 | "model": "dummyapp.dummymodel",
77 | "fields": {
78 | "date_created": "2012-08-07T08:23:24.752Z",
79 | "name": "Item_9"
80 | }
81 | },
82 | {
83 | "pk": 11,
84 | "model": "dummyapp.dummymodel",
85 | "fields": {
86 | "date_created": "2012-08-07T08:23:24.754Z",
87 | "name": "Item_10"
88 | }
89 | },
90 | {
91 | "pk": 12,
92 | "model": "dummyapp.dummymodel",
93 | "fields": {
94 | "date_created": "2012-08-07T08:23:24.756Z",
95 | "name": "Item_11"
96 | }
97 | },
98 | {
99 | "pk": 13,
100 | "model": "dummyapp.dummymodel",
101 | "fields": {
102 | "date_created": "2012-08-07T08:23:24.760Z",
103 | "name": "Item_12"
104 | }
105 | },
106 | {
107 | "pk": 14,
108 | "model": "dummyapp.dummymodel",
109 | "fields": {
110 | "date_created": "2012-08-07T08:23:24.763Z",
111 | "name": "Item_13"
112 | }
113 | },
114 | {
115 | "pk": 15,
116 | "model": "dummyapp.dummymodel",
117 | "fields": {
118 | "date_created": "2012-08-07T08:23:24.765Z",
119 | "name": "Item_14"
120 | }
121 | },
122 | {
123 | "pk": 16,
124 | "model": "dummyapp.dummymodel",
125 | "fields": {
126 | "date_created": "2012-08-07T08:23:24.768Z",
127 | "name": "Item_15"
128 | }
129 | },
130 | {
131 | "pk": 17,
132 | "model": "dummyapp.dummymodel",
133 | "fields": {
134 | "date_created": "2012-08-07T08:23:24.771Z",
135 | "name": "Item_16"
136 | }
137 | },
138 | {
139 | "pk": 18,
140 | "model": "dummyapp.dummymodel",
141 | "fields": {
142 | "date_created": "2012-08-07T08:23:24.773Z",
143 | "name": "Item_17"
144 | }
145 | },
146 | {
147 | "pk": 19,
148 | "model": "dummyapp.dummymodel",
149 | "fields": {
150 | "date_created": "2012-08-07T08:23:24.775Z",
151 | "name": "Item_18"
152 | }
153 | },
154 | {
155 | "pk": 20,
156 | "model": "dummyapp.dummymodel",
157 | "fields": {
158 | "date_created": "2012-08-07T08:23:24.778Z",
159 | "name": "Item_19"
160 | }
161 | },
162 | {
163 | "pk": 21,
164 | "model": "dummyapp.dummymodel",
165 | "fields": {
166 | "date_created": "2012-08-07T08:23:24.780Z",
167 | "name": "Item_20"
168 | }
169 | },
170 | {
171 | "pk": 22,
172 | "model": "dummyapp.dummymodel",
173 | "fields": {
174 | "date_created": "2012-08-07T08:23:24.782Z",
175 | "name": "Item_21"
176 | }
177 | },
178 | {
179 | "pk": 23,
180 | "model": "dummyapp.dummymodel",
181 | "fields": {
182 | "date_created": "2012-08-07T08:23:24.784Z",
183 | "name": "Item_22"
184 | }
185 | },
186 | {
187 | "pk": 24,
188 | "model": "dummyapp.dummymodel",
189 | "fields": {
190 | "date_created": "2012-08-07T08:23:24.788Z",
191 | "name": "Item_23"
192 | }
193 | },
194 | {
195 | "pk": 25,
196 | "model": "dummyapp.dummymodel",
197 | "fields": {
198 | "date_created": "2012-08-07T08:23:24.791Z",
199 | "name": "Item_24"
200 | }
201 | },
202 | {
203 | "pk": 26,
204 | "model": "dummyapp.dummymodel",
205 | "fields": {
206 | "date_created": "2012-08-07T08:23:24.793Z",
207 | "name": "Item_25"
208 | }
209 | },
210 | {
211 | "pk": 27,
212 | "model": "dummyapp.dummymodel",
213 | "fields": {
214 | "date_created": "2012-08-07T08:23:24.796Z",
215 | "name": "Item_26"
216 | }
217 | },
218 | {
219 | "pk": 28,
220 | "model": "dummyapp.dummymodel",
221 | "fields": {
222 | "date_created": "2012-08-07T08:23:24.803Z",
223 | "name": "Item_27"
224 | }
225 | },
226 | {
227 | "pk": 29,
228 | "model": "dummyapp.dummymodel",
229 | "fields": {
230 | "date_created": "2012-08-07T08:23:24.805Z",
231 | "name": "Item_28"
232 | }
233 | },
234 | {
235 | "pk": 30,
236 | "model": "dummyapp.dummymodel",
237 | "fields": {
238 | "date_created": "2012-08-07T08:23:24.808Z",
239 | "name": "Item_29"
240 | }
241 | },
242 | {
243 | "pk": 31,
244 | "model": "dummyapp.dummymodel",
245 | "fields": {
246 | "date_created": "2012-08-07T08:23:24.811Z",
247 | "name": "Item_30"
248 | }
249 | },
250 | {
251 | "pk": 32,
252 | "model": "dummyapp.dummymodel",
253 | "fields": {
254 | "date_created": "2012-08-07T08:23:24.813Z",
255 | "name": "Item_31"
256 | }
257 | },
258 | {
259 | "pk": 33,
260 | "model": "dummyapp.dummymodel",
261 | "fields": {
262 | "date_created": "2012-08-07T08:23:24.816Z",
263 | "name": "Item_32"
264 | }
265 | },
266 | {
267 | "pk": 34,
268 | "model": "dummyapp.dummymodel",
269 | "fields": {
270 | "date_created": "2012-08-07T08:23:24.818Z",
271 | "name": "Item_33"
272 | }
273 | },
274 | {
275 | "pk": 35,
276 | "model": "dummyapp.dummymodel",
277 | "fields": {
278 | "date_created": "2012-08-07T08:23:24.820Z",
279 | "name": "Item_34"
280 | }
281 | },
282 | {
283 | "pk": 36,
284 | "model": "dummyapp.dummymodel",
285 | "fields": {
286 | "date_created": "2012-08-07T08:23:24.823Z",
287 | "name": "Item_35"
288 | }
289 | },
290 | {
291 | "pk": 37,
292 | "model": "dummyapp.dummymodel",
293 | "fields": {
294 | "date_created": "2012-08-07T08:23:24.826Z",
295 | "name": "Item_36"
296 | }
297 | },
298 | {
299 | "pk": 38,
300 | "model": "dummyapp.dummymodel",
301 | "fields": {
302 | "date_created": "2012-08-07T08:23:24.829Z",
303 | "name": "Item_37"
304 | }
305 | },
306 | {
307 | "pk": 39,
308 | "model": "dummyapp.dummymodel",
309 | "fields": {
310 | "date_created": "2012-08-07T08:23:24.831Z",
311 | "name": "Item_38"
312 | }
313 | },
314 | {
315 | "pk": 40,
316 | "model": "dummyapp.dummymodel",
317 | "fields": {
318 | "date_created": "2012-08-07T08:23:24.834Z",
319 | "name": "Item_39"
320 | }
321 | },
322 | {
323 | "pk": 41,
324 | "model": "dummyapp.dummymodel",
325 | "fields": {
326 | "date_created": "2012-08-07T08:23:24.836Z",
327 | "name": "Item_40"
328 | }
329 | },
330 | {
331 | "pk": 42,
332 | "model": "dummyapp.dummymodel",
333 | "fields": {
334 | "date_created": "2012-08-07T08:23:24.839Z",
335 | "name": "Item_41"
336 | }
337 | },
338 | {
339 | "pk": 43,
340 | "model": "dummyapp.dummymodel",
341 | "fields": {
342 | "date_created": "2012-08-07T08:23:24.841Z",
343 | "name": "Item_42"
344 | }
345 | },
346 | {
347 | "pk": 44,
348 | "model": "dummyapp.dummymodel",
349 | "fields": {
350 | "date_created": "2012-08-07T08:23:24.844Z",
351 | "name": "Item_43"
352 | }
353 | },
354 | {
355 | "pk": 45,
356 | "model": "dummyapp.dummymodel",
357 | "fields": {
358 | "date_created": "2012-08-07T08:23:24.846Z",
359 | "name": "Item_44"
360 | }
361 | },
362 | {
363 | "pk": 46,
364 | "model": "dummyapp.dummymodel",
365 | "fields": {
366 | "date_created": "2012-08-07T08:23:24.849Z",
367 | "name": "Item_45"
368 | }
369 | },
370 | {
371 | "pk": 47,
372 | "model": "dummyapp.dummymodel",
373 | "fields": {
374 | "date_created": "2012-08-07T08:23:24.852Z",
375 | "name": "Item_46"
376 | }
377 | },
378 | {
379 | "pk": 48,
380 | "model": "dummyapp.dummymodel",
381 | "fields": {
382 | "date_created": "2012-08-07T08:23:24.854Z",
383 | "name": "Item_47"
384 | }
385 | },
386 | {
387 | "pk": 49,
388 | "model": "dummyapp.dummymodel",
389 | "fields": {
390 | "date_created": "2012-08-07T08:23:24.856Z",
391 | "name": "Item_48"
392 | }
393 | },
394 | {
395 | "pk": 50,
396 | "model": "dummyapp.dummymodel",
397 | "fields": {
398 | "date_created": "2012-08-07T08:23:24.858Z",
399 | "name": "Item_49"
400 | }
401 | },
402 | {
403 | "pk": 51,
404 | "model": "dummyapp.dummymodel",
405 | "fields": {
406 | "date_created": "2012-08-07T08:23:24.861Z",
407 | "name": "Item_50"
408 | }
409 | },
410 | {
411 | "pk": 52,
412 | "model": "dummyapp.dummymodel",
413 | "fields": {
414 | "date_created": "2012-08-07T08:23:24.864Z",
415 | "name": "Item_51"
416 | }
417 | },
418 | {
419 | "pk": 53,
420 | "model": "dummyapp.dummymodel",
421 | "fields": {
422 | "date_created": "2012-08-07T08:23:24.866Z",
423 | "name": "Item_52"
424 | }
425 | },
426 | {
427 | "pk": 54,
428 | "model": "dummyapp.dummymodel",
429 | "fields": {
430 | "date_created": "2012-08-07T08:23:24.869Z",
431 | "name": "Item_53"
432 | }
433 | },
434 | {
435 | "pk": 55,
436 | "model": "dummyapp.dummymodel",
437 | "fields": {
438 | "date_created": "2012-08-07T08:23:24.871Z",
439 | "name": "Item_54"
440 | }
441 | },
442 | {
443 | "pk": 56,
444 | "model": "dummyapp.dummymodel",
445 | "fields": {
446 | "date_created": "2012-08-07T08:23:24.874Z",
447 | "name": "Item_55"
448 | }
449 | },
450 | {
451 | "pk": 57,
452 | "model": "dummyapp.dummymodel",
453 | "fields": {
454 | "date_created": "2012-08-07T08:23:24.876Z",
455 | "name": "Item_56"
456 | }
457 | },
458 | {
459 | "pk": 58,
460 | "model": "dummyapp.dummymodel",
461 | "fields": {
462 | "date_created": "2012-08-07T08:23:24.879Z",
463 | "name": "Item_57"
464 | }
465 | },
466 | {
467 | "pk": 59,
468 | "model": "dummyapp.dummymodel",
469 | "fields": {
470 | "date_created": "2012-08-07T08:23:24.881Z",
471 | "name": "Item_58"
472 | }
473 | },
474 | {
475 | "pk": 60,
476 | "model": "dummyapp.dummymodel",
477 | "fields": {
478 | "date_created": "2012-08-07T08:23:24.883Z",
479 | "name": "Item_59"
480 | }
481 | },
482 | {
483 | "pk": 61,
484 | "model": "dummyapp.dummymodel",
485 | "fields": {
486 | "date_created": "2012-08-07T08:23:24.886Z",
487 | "name": "Item_60"
488 | }
489 | },
490 | {
491 | "pk": 62,
492 | "model": "dummyapp.dummymodel",
493 | "fields": {
494 | "date_created": "2012-08-07T08:23:24.888Z",
495 | "name": "Item_61"
496 | }
497 | },
498 | {
499 | "pk": 63,
500 | "model": "dummyapp.dummymodel",
501 | "fields": {
502 | "date_created": "2012-08-07T08:23:24.891Z",
503 | "name": "Item_62"
504 | }
505 | },
506 | {
507 | "pk": 64,
508 | "model": "dummyapp.dummymodel",
509 | "fields": {
510 | "date_created": "2012-08-07T08:23:24.894Z",
511 | "name": "Item_63"
512 | }
513 | },
514 | {
515 | "pk": 65,
516 | "model": "dummyapp.dummymodel",
517 | "fields": {
518 | "date_created": "2012-08-07T08:23:24.896Z",
519 | "name": "Item_64"
520 | }
521 | },
522 | {
523 | "pk": 66,
524 | "model": "dummyapp.dummymodel",
525 | "fields": {
526 | "date_created": "2012-08-07T08:23:24.898Z",
527 | "name": "Item_65"
528 | }
529 | },
530 | {
531 | "pk": 67,
532 | "model": "dummyapp.dummymodel",
533 | "fields": {
534 | "date_created": "2012-08-07T08:23:24.901Z",
535 | "name": "Item_66"
536 | }
537 | },
538 | {
539 | "pk": 68,
540 | "model": "dummyapp.dummymodel",
541 | "fields": {
542 | "date_created": "2012-08-07T08:23:24.904Z",
543 | "name": "Item_67"
544 | }
545 | },
546 | {
547 | "pk": 69,
548 | "model": "dummyapp.dummymodel",
549 | "fields": {
550 | "date_created": "2012-08-07T08:23:24.907Z",
551 | "name": "Item_68"
552 | }
553 | },
554 | {
555 | "pk": 70,
556 | "model": "dummyapp.dummymodel",
557 | "fields": {
558 | "date_created": "2012-08-07T08:23:24.909Z",
559 | "name": "Item_69"
560 | }
561 | },
562 | {
563 | "pk": 71,
564 | "model": "dummyapp.dummymodel",
565 | "fields": {
566 | "date_created": "2012-08-07T08:23:24.912Z",
567 | "name": "Item_70"
568 | }
569 | },
570 | {
571 | "pk": 72,
572 | "model": "dummyapp.dummymodel",
573 | "fields": {
574 | "date_created": "2012-08-07T08:23:24.914Z",
575 | "name": "Item_71"
576 | }
577 | },
578 | {
579 | "pk": 73,
580 | "model": "dummyapp.dummymodel",
581 | "fields": {
582 | "date_created": "2012-08-07T08:23:24.917Z",
583 | "name": "Item_72"
584 | }
585 | },
586 | {
587 | "pk": 74,
588 | "model": "dummyapp.dummymodel",
589 | "fields": {
590 | "date_created": "2012-08-07T08:23:24.920Z",
591 | "name": "Item_73"
592 | }
593 | },
594 | {
595 | "pk": 75,
596 | "model": "dummyapp.dummymodel",
597 | "fields": {
598 | "date_created": "2012-08-07T08:23:24.922Z",
599 | "name": "Item_74"
600 | }
601 | },
602 | {
603 | "pk": 76,
604 | "model": "dummyapp.dummymodel",
605 | "fields": {
606 | "date_created": "2012-08-07T08:23:24.924Z",
607 | "name": "Item_75"
608 | }
609 | },
610 | {
611 | "pk": 77,
612 | "model": "dummyapp.dummymodel",
613 | "fields": {
614 | "date_created": "2012-08-07T08:23:24.927Z",
615 | "name": "Item_76"
616 | }
617 | },
618 | {
619 | "pk": 78,
620 | "model": "dummyapp.dummymodel",
621 | "fields": {
622 | "date_created": "2012-08-07T08:23:24.929Z",
623 | "name": "Item_77"
624 | }
625 | },
626 | {
627 | "pk": 79,
628 | "model": "dummyapp.dummymodel",
629 | "fields": {
630 | "date_created": "2012-08-07T08:23:24.931Z",
631 | "name": "Item_78"
632 | }
633 | },
634 | {
635 | "pk": 80,
636 | "model": "dummyapp.dummymodel",
637 | "fields": {
638 | "date_created": "2012-08-07T08:23:24.934Z",
639 | "name": "Item_79"
640 | }
641 | },
642 | {
643 | "pk": 81,
644 | "model": "dummyapp.dummymodel",
645 | "fields": {
646 | "date_created": "2012-08-07T08:23:24.936Z",
647 | "name": "Item_80"
648 | }
649 | },
650 | {
651 | "pk": 82,
652 | "model": "dummyapp.dummymodel",
653 | "fields": {
654 | "date_created": "2012-08-07T08:23:24.939Z",
655 | "name": "Item_81"
656 | }
657 | },
658 | {
659 | "pk": 83,
660 | "model": "dummyapp.dummymodel",
661 | "fields": {
662 | "date_created": "2012-08-07T08:23:24.942Z",
663 | "name": "Item_82"
664 | }
665 | },
666 | {
667 | "pk": 84,
668 | "model": "dummyapp.dummymodel",
669 | "fields": {
670 | "date_created": "2012-08-07T08:23:24.944Z",
671 | "name": "Item_83"
672 | }
673 | },
674 | {
675 | "pk": 85,
676 | "model": "dummyapp.dummymodel",
677 | "fields": {
678 | "date_created": "2012-08-07T08:23:24.947Z",
679 | "name": "Item_84"
680 | }
681 | },
682 | {
683 | "pk": 86,
684 | "model": "dummyapp.dummymodel",
685 | "fields": {
686 | "date_created": "2012-08-07T08:23:24.949Z",
687 | "name": "Item_85"
688 | }
689 | },
690 | {
691 | "pk": 87,
692 | "model": "dummyapp.dummymodel",
693 | "fields": {
694 | "date_created": "2012-08-07T08:23:24.951Z",
695 | "name": "Item_86"
696 | }
697 | },
698 | {
699 | "pk": 88,
700 | "model": "dummyapp.dummymodel",
701 | "fields": {
702 | "date_created": "2012-08-07T08:23:24.954Z",
703 | "name": "Item_87"
704 | }
705 | },
706 | {
707 | "pk": 89,
708 | "model": "dummyapp.dummymodel",
709 | "fields": {
710 | "date_created": "2012-08-07T08:23:24.956Z",
711 | "name": "Item_88"
712 | }
713 | },
714 | {
715 | "pk": 90,
716 | "model": "dummyapp.dummymodel",
717 | "fields": {
718 | "date_created": "2012-08-07T08:23:24.959Z",
719 | "name": "Item_89"
720 | }
721 | },
722 | {
723 | "pk": 91,
724 | "model": "dummyapp.dummymodel",
725 | "fields": {
726 | "date_created": "2012-08-07T08:23:24.961Z",
727 | "name": "Item_90"
728 | }
729 | },
730 | {
731 | "pk": 92,
732 | "model": "dummyapp.dummymodel",
733 | "fields": {
734 | "date_created": "2012-08-07T08:23:24.963Z",
735 | "name": "Item_91"
736 | }
737 | },
738 | {
739 | "pk": 93,
740 | "model": "dummyapp.dummymodel",
741 | "fields": {
742 | "date_created": "2012-08-07T08:23:24.966Z",
743 | "name": "Item_92"
744 | }
745 | },
746 | {
747 | "pk": 94,
748 | "model": "dummyapp.dummymodel",
749 | "fields": {
750 | "date_created": "2012-08-07T08:23:24.968Z",
751 | "name": "Item_93"
752 | }
753 | },
754 | {
755 | "pk": 95,
756 | "model": "dummyapp.dummymodel",
757 | "fields": {
758 | "date_created": "2012-08-07T08:23:24.971Z",
759 | "name": "Item_94"
760 | }
761 | },
762 | {
763 | "pk": 96,
764 | "model": "dummyapp.dummymodel",
765 | "fields": {
766 | "date_created": "2012-08-07T08:23:24.973Z",
767 | "name": "Item_95"
768 | }
769 | },
770 | {
771 | "pk": 97,
772 | "model": "dummyapp.dummymodel",
773 | "fields": {
774 | "date_created": "2012-08-07T08:23:24.976Z",
775 | "name": "Item_96"
776 | }
777 | },
778 | {
779 | "pk": 98,
780 | "model": "dummyapp.dummymodel",
781 | "fields": {
782 | "date_created": "2012-08-07T08:23:24.979Z",
783 | "name": "Item_97"
784 | }
785 | },
786 | {
787 | "pk": 99,
788 | "model": "dummyapp.dummymodel",
789 | "fields": {
790 | "date_created": "2012-08-07T08:23:24.981Z",
791 | "name": "Item_98"
792 | }
793 | },
794 | {
795 | "pk": 100,
796 | "model": "dummyapp.dummymodel",
797 | "fields": {
798 | "date_created": "2012-08-07T08:23:24.983Z",
799 | "name": "Item_99"
800 | }
801 | }
802 | ]
--------------------------------------------------------------------------------