├── .gitignore ├── LICENSE ├── MANIFEST.in ├── README.rst ├── cachebot ├── __init__.py ├── admin.py ├── backends │ ├── __init__.py │ ├── dummy.py │ └── memcached.py ├── conf.py ├── logger.py ├── management │ ├── __init__.py │ └── commands │ │ ├── __init__.py │ │ └── flush_cache.py ├── managers.py ├── models.py ├── monkey.py ├── queryset.py ├── signals.py ├── test_models.py ├── tests │ ├── __init__.py │ ├── base_tests.py │ ├── manager_tests.py │ ├── many_to_many_tests.py │ ├── no_cache_tests.py │ ├── reverse_lookup_tests.py │ └── values_tests.py └── utils.py └── setup.py /.gitignore: -------------------------------------------------------------------------------- 1 | *.tar 2 | *.tar.gz 3 | *.tar.bz2 4 | *.pyc 5 | *.pyo 6 | .DS_Store 7 | *.egg-info 8 | 9 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2010, David Ziegler 2 | All rights reserved. 3 | 4 | Redistribution and use in source and binary forms, with or without modification, 5 | are permitted provided that the following conditions are met: 6 | 7 | * Redistributions of source code must retain the above copyright notice, 8 | this list of conditions and the following disclaimer. 9 | * Redistributions in binary form must reproduce the above copyright notice, 10 | this list of conditions and the following disclaimer in the documentation 11 | and/or other materials provided with the distribution. 12 | * Neither the name django-cachebot nor the names of its contributors 13 | may be used to endorse or promote products derived from this software without 14 | specific prior written permission. 15 | * If you meet the author(s) some day, and you think this stuff is worth it, 16 | you can buy the author(s) a beer in return. 17 | 18 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND 19 | ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED 20 | WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 21 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR 22 | ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES 23 | (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; 24 | LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON 25 | ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 26 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS 27 | SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include LICENSE 2 | include README.rst -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | Django-cachebot 2 | ================= 3 | 4 | Django-cachebot provides automated caching and invalidation for the Django ORM. 5 | 6 | Installation 7 | ************ 8 | 9 | 1. ``easy_install django-cachebot`` or ``pip install django-cachebot`` 10 | 11 | 2. Add ``cachebot`` to your ``INSTALLED_APPS`` 12 | 13 | 3. Set a cache backend to one of the backends in ``cachebots.backends``, for instance:: 14 | 15 | CACHES = { 16 | 'default': { 17 | 'BACKEND': 'cachebot.backends.memcached.MemcachedCache', 18 | 'LOCATION': '127.0.0.1:11211', 19 | } 20 | } 21 | 22 | Current supported backends are:: 23 | 24 | cachebot.backends.dummy.DummyCache 25 | cachebot.backends.memcached.MemcachedCache 26 | cachebot.backends.memcached.PyLibMCCache 27 | 28 | 29 | 4. If you want to add caching to a model, the model's manager needs to be ``CacheBotManager`` or a subclass of it, e.g:: 30 | 31 | from django.db import models 32 | from cachebot.managers import CacheBotManager 33 | 34 | class Author(models.Model): 35 | name = models.CharField(max_length=50) 36 | objects = CacheBotManager() 37 | 38 | class BookManager(CacheBotManager): 39 | 40 | def for_author(self, name): 41 | return self.filter(author__name=name) 42 | 43 | class Book(models.Model): 44 | title = models.CharField(max_length=50) 45 | author = models.ForeignKey(Author) 46 | objects = BookManager() 47 | 48 | Usage 49 | ****** 50 | 51 | By default, all ``get`` queries for ``CacheBotManager`` will be cached:: 52 | 53 | photo = Photo.objects.get(user=user) 54 | 55 | If you don't want this behavior, call ``CacheBotManager(cache_get=False)`` when defining the manager, or to change this globally set ``CACHEBOT_CACHE_GET=False`` in settings. 56 | 57 | ------------ 58 | 59 | For more complex queries, suppose you had a query that looked like this and you wanted to cache it:: 60 | 61 | Photo.objects.filter(user=user, status=2) 62 | 63 | Just add ``.cache()`` to the queryset chain like so:: 64 | 65 | Photo.objects.cache().filter(user=user, status=2) 66 | 67 | This query will get invalidated if any of the following conditions are met:: 68 | 69 | 1. One of the objects returned by the query is altered. 70 | 2. The user is altered. 71 | 3. A Photo is modified and has status = 2. 72 | 4. A Photo is modified and has user = user. 73 | 74 | This invalidation criteria is probably too cautious, because we don't want to invalidate this cache every time a Photo with ``status = 2`` is saved. To fine tune the invalidation criteria, we can specify to only invalidate on certain fields. For example:: 75 | 76 | Photo.objects.cache('user').filter(user=user, status=2) 77 | 78 | This query will get invalidated if any of the following conditions are met:: 79 | 80 | 1. One of the objects returned by the query is altered. 81 | 2. The user is altered. 82 | 3. A Photo is modified and has user = user. 83 | 84 | 85 | django-cachebot can also handle select_related, forward relations, and reverse relations, ie:: 86 | 87 | Photo.objects.select_related().cache('user').filter(user__username="david", status=2) 88 | 89 | Photo.objects.cache('user').filter(user__username="david", status=2) 90 | 91 | Photo.objects.cache('message__sender').filter(message__sender=user, status=2) 92 | 93 | 94 | Settings 95 | ******** 96 | 97 | - ``CACHEBOT_CACHE_GET`` 98 | 99 | - default: ``True`` 100 | - If set to ``True``, ``CacheBotManager`` will be called with ``cache_get=True`` by default. 101 | 102 | - ``CACHEBOT_TABLE_BLACKLIST`` 103 | 104 | - default: ('django_session', 'django_content_type', 'south_migrationhistory') 105 | - A list of tables that cachebot should ignore. 106 | 107 | 108 | Caveats (Important!) 109 | ******************** 110 | 111 | 1. Adding/Removing objects with a ManyRelatedManager will not automatically invalidate. You'll need to manually invalidate these queries like so:: 112 | 113 | from cachebot.signals import invalidate_object 114 | 115 | user.friends.add(friend) 116 | invalidate_object(user) 117 | invalidate_object(friend) 118 | 119 | 2. ``count()`` queries will not get cached. 120 | 121 | 3. If you're invalidating on a field that is in a range or exclude query, these queries will get invalidated when anything in the table changes. For example the following would get invalidated when anything on the User table changed:: 122 | 123 | Photo.objects.cache('user').filter(user__in=users, status=2) 124 | 125 | Photo.objects.cache('user').exclude(user=user, status=2) 126 | 127 | 128 | 4. You should probably use a tool like django-memcache-status_ to check on the status of your cache. If memcache overfills and starts dropping keys, it's possible that your queries might not get invalidated. 129 | 130 | 5. .values_list() doesn't cache yet. You should do something like this instead:: 131 | 132 | [photo['id'] for photo in Photo.objects.cache('user').filter(user=user).values('id')] 133 | 134 | 135 | .. _django-memcache-status: http://github.com/bartTC/django-memcache-status 136 | 137 | Dependencies 138 | ************* 139 | 140 | * Django 1.3 141 | 142 | If you use Django 1.2, you can use django-cachebot version 0.3.1 143 | 144 | -------------------------------------------------------------------------------- /cachebot/__init__.py: -------------------------------------------------------------------------------- 1 | VERSION = (0, 4, 1) 2 | __version__ = '.'.join(map(str, VERSION)) 3 | 4 | -------------------------------------------------------------------------------- /cachebot/admin.py: -------------------------------------------------------------------------------- 1 | from django.contrib import admin 2 | from cachebot.models import CacheBotSignals 3 | 4 | admin.site.register(CacheBotSignals) 5 | -------------------------------------------------------------------------------- /cachebot/backends/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dziegler/django-cachebot/2f213e18d5e40488b328b167bef0bfbbabe00d51/cachebot/backends/__init__.py -------------------------------------------------------------------------------- /cachebot/backends/dummy.py: -------------------------------------------------------------------------------- 1 | "Dummy cache backend" 2 | 3 | from django.core.cache.backends import dummy 4 | 5 | from cachebot.logger import CacheLogDecorator 6 | 7 | @CacheLogDecorator 8 | class DummyCache(dummy.DummyCache): 9 | 10 | def append(self, **kwargs): 11 | pass 12 | 13 | def prepend(self, **kwargs): 14 | pass 15 | 16 | def replace(self, **kwargs): 17 | pass 18 | 19 | def smart_incr(self, **kwargs): 20 | pass 21 | 22 | def smart_decr(self, **kwargs): 23 | pass -------------------------------------------------------------------------------- /cachebot/backends/memcached.py: -------------------------------------------------------------------------------- 1 | from threading import local 2 | 3 | from django.core.cache.backends import memcached 4 | 5 | from cachebot.logger import CacheLogDecorator 6 | 7 | @CacheLogDecorator 8 | class BaseMemcachedCache(memcached.BaseMemcachedCache): 9 | 10 | def _get_memcache_timeout(self, timeout): 11 | if timeout is None: 12 | timeout = self.default_timeout 13 | return timeout 14 | 15 | def append(self, key, value, version=None): 16 | key = self.make_key(key, version=version) 17 | self._cache.append(key, value) 18 | 19 | def prepend(self, key, value, version=None): 20 | key = self.make_key(key, version=version) 21 | self._cache.prepend(key, value) 22 | 23 | def smart_incr(self, key, delta=1, default=0, **kwargs): 24 | try: 25 | return self.incr(key, delta=1) 26 | except ValueError: 27 | val = default + delta 28 | self.add(key, val, **kwargs) 29 | return val 30 | 31 | def smart_decr(self, key, delta=1, default=0, **kwargs): 32 | try: 33 | return self.incr(key, delta=1) 34 | except ValueError: 35 | val = default - delta 36 | self.add(key, val, **kwargs) 37 | return val 38 | 39 | def replace(self, key, value, timeout=0, version=None): 40 | key = self.make_key(key, version=version) 41 | return self._cache.replace(key, value, self._get_memcache_timeout(timeout)) 42 | 43 | 44 | class MemcachedCache(BaseMemcachedCache): 45 | "An implementation of a cache binding using python-memcached" 46 | def __init__(self, server, params): 47 | import memcache 48 | super(MemcachedCache, self).__init__(server, params, 49 | library=memcache, 50 | value_not_found_exception=ValueError) 51 | 52 | class PyLibMCCache(BaseMemcachedCache): 53 | "An implementation of a cache binding using pylibmc" 54 | def __init__(self, server, params): 55 | import pylibmc 56 | self._local = local() 57 | super(PyLibMCCache, self).__init__(server, params, 58 | library=pylibmc, 59 | value_not_found_exception=pylibmc.NotFound) 60 | 61 | @property 62 | def _cache(self): 63 | # PylibMC uses cache options as the 'behaviors' attribute. 64 | # It also needs to use threadlocals, because some versions of 65 | # PylibMC don't play well with the GIL. 66 | client = getattr(self._local, 'client', None) 67 | if client: 68 | return client 69 | 70 | client = self._lib.Client(self._servers) 71 | if self._options: 72 | client.behaviors = self._options 73 | 74 | self._local.client = client 75 | 76 | return client -------------------------------------------------------------------------------- /cachebot/conf.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | from django.conf import settings 4 | 5 | CACHE_SECONDS = getattr(settings, 'CACHE_SECONDS', 0) 6 | CACHEBOT_CACHE_GET = getattr(settings, 'CACHEBOT_CACHE_GET', True) 7 | CACHEBOT_CACHE_ALL = getattr(settings, 'CACHEBOT_CACHE_ALL', False) 8 | CACHEBOT_TABLE_BLACKLIST = getattr(settings, 'CACHEBOT_TABLE_BLACKLIST', ('django_session', 'django_content_type', 'south_migrationhistory')) 9 | CACHEBOT_ENABLE_LOG = getattr(settings, 'CACHEBOT_ENABLE_LOG', False) 10 | CACHEBOT_LOG = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'cachebot.log') 11 | CACHEBOT_DEBUG_RESULTS = getattr(settings, 'CACHEBOT_DEBUG_RESULTS', False) 12 | CACHE_INVALIDATION_TIMEOUT = getattr(settings, 'CACHE_INVALIDATION_TIMEOUT', 5) 13 | RUNNING_TESTS = getattr(settings, 'RUNNING_TESTS', False) 14 | if RUNNING_TESTS: 15 | CACHEBOT_DEBUG_RESULTS = True 16 | CACHE_INVALIDATION_TIMEOUT = 1 17 | -------------------------------------------------------------------------------- /cachebot/logger.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import threading 3 | from time import time 4 | 5 | from django.template import Template, Context 6 | from django.utils.translation import ugettext as _ 7 | 8 | from cachebot import conf 9 | 10 | cachebot_log = logging.getLogger(__name__) 11 | 12 | LOG_FUNCS = ('append', 'prepend', 'replace', 'add', 'get', 'set', 'delete', 'get_many', 'incr', 'set_many', 'delete_many') 13 | 14 | def CacheLogDecorator(klass): 15 | orig_init = klass.__init__ 16 | 17 | def __init__(self, *args, **kwargs): 18 | self._logger = CacheLogger() 19 | orig_init(self, *args, **kwargs) 20 | 21 | if conf.CACHEBOT_ENABLE_LOG: 22 | for func in LOG_FUNCS: 23 | setattr(klass, func, logged_func(getattr(klass, func))) 24 | 25 | klass.__init__ = __init__ 26 | return klass 27 | 28 | class CacheLogger(threading.local): 29 | 30 | def __init__(self): 31 | self.reset() 32 | 33 | def reset(self, **kwargs): 34 | self.log = [] 35 | 36 | 37 | class CacheLogInstance(object): 38 | 39 | def __init__(self, name, key, end, hit=None): 40 | self.name = name 41 | self.key = key 42 | self.time = end 43 | self.hit = hit 44 | 45 | def __repr__(self): 46 | return ' - '.join((self.name, str(self.key))) 47 | 48 | def logged_func(func): 49 | def inner(instance, key, *args, **kwargs): 50 | t = time() 51 | val = func(instance, key, *args, **kwargs) 52 | 53 | if conf.CACHEBOT_ENABLE_LOG: 54 | end = 1000 * (time() - t) 55 | hit = None 56 | if func.func_name == 'get': 57 | hit = val != None 58 | elif func.func_name == 'get_many': 59 | hit = bool(val) 60 | log = CacheLogInstance(func.func_name, key, end, hit=hit) 61 | instance._logger.log.append(log) 62 | cachebot_log.debug(str(log)) 63 | 64 | return val 65 | return inner 66 | 67 | try: 68 | from debug_toolbar.panels import DebugPanel 69 | 70 | class CachePanel(DebugPanel): 71 | 72 | name = 'Cache' 73 | has_content = True 74 | 75 | def nav_title(self): 76 | return _('Cache') 77 | 78 | def title(self): 79 | return _('Cache Queries') 80 | 81 | def nav_subtitle(self): 82 | from django.core.cache import cache 83 | # Aggregate stats. 84 | stats = {'hit': 0, 'miss': 0, 'time': 0} 85 | for log in cache._logger.log: 86 | if hasattr(log, 'hit'): 87 | stats[log.hit and 'hit' or 'miss'] += 1 88 | stats['time'] += log.time 89 | 90 | # No ngettext, too many combos! 91 | stats['time'] = round(stats['time'], 2) 92 | return _('%(hit)s hits, %(miss)s misses in %(time)sms') % stats 93 | 94 | def content(self): 95 | from django.core.cache import cache 96 | context = {'logs': cache._logger.log} 97 | return Template(template).render(Context(context)) 98 | 99 | def url(self): 100 | return '' 101 | 102 | def process_request(self, request): 103 | from django.core.cache import cache 104 | cache._logger.reset() 105 | 106 | 107 | 108 | template = """ 109 | 113 | 114 | 115 | 116 | 117 | 118 | 119 | 120 | 121 | 122 | {% for log in logs %} 123 | {% if log.hit %} 124 | 125 | {% else %} 126 | 127 | {% endif %} 128 | 129 | 130 | 131 | 132 | {% endfor %} 133 | 134 |
{{ _('Time (ms)') }}{{ _('Method') }}{{ _('Key') }}
{{ log.time|floatformat:"2" }}{{ log.name }}{{ log.key }}
135 | """ 136 | except ImportError: 137 | pass 138 | -------------------------------------------------------------------------------- /cachebot/management/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dziegler/django-cachebot/2f213e18d5e40488b328b167bef0bfbbabe00d51/cachebot/management/__init__.py -------------------------------------------------------------------------------- /cachebot/management/commands/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dziegler/django-cachebot/2f213e18d5e40488b328b167bef0bfbbabe00d51/cachebot/management/commands/__init__.py -------------------------------------------------------------------------------- /cachebot/management/commands/flush_cache.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # encoding: utf-8 3 | 4 | from django.core.management.base import BaseCommand 5 | from cachebot.utils import flush_cache 6 | 7 | class Command(BaseCommand): 8 | """ 9 | Empty the cache 10 | """ 11 | help = 'Empty the cache' 12 | 13 | def handle(self, *args, **options): 14 | 15 | flush_cache(hard=True) -------------------------------------------------------------------------------- /cachebot/managers.py: -------------------------------------------------------------------------------- 1 | from django.db.models import Manager 2 | 3 | from cachebot import conf 4 | from cachebot.queryset import CachedQuerySet 5 | 6 | class CacheBotManager(Manager): 7 | 8 | def __init__(self, cache_all=conf.CACHEBOT_CACHE_ALL, cache_get=conf.CACHEBOT_CACHE_GET, **kwargs): 9 | super(CacheBotManager, self).__init__(**kwargs) 10 | self.cache_all = cache_all 11 | if cache_all: 12 | self.cache_get = True 13 | else: 14 | self.cache_get = cache_get 15 | 16 | def get_query_set(self): 17 | qs = CachedQuerySet(self.model, using=self.db) 18 | if self.cache_all: 19 | return qs.cache() 20 | else: 21 | return qs 22 | 23 | def cache(self, *args): 24 | return self.get_query_set().cache(*args) 25 | 26 | def select_reverse(self, *args, **kwargs): 27 | return self.get_query_set().select_reverse(*args, **kwargs) 28 | -------------------------------------------------------------------------------- /cachebot/models.py: -------------------------------------------------------------------------------- 1 | from django.db import models 2 | from django import dispatch 3 | 4 | from cachebot import conf 5 | 6 | class CacheBotSignals(models.Model): 7 | table_name = models.CharField(max_length=100) 8 | accessor_path = models.CharField(max_length=100) 9 | lookup_type = models.CharField(max_length=20) 10 | exclude = models.BooleanField(default=False) 11 | 12 | class Meta: 13 | ordering = ('table_name','accessor_path','lookup_type','exclude') 14 | 15 | def __unicode__(self): 16 | return u".".join((self.table_name,self.accessor_path,self.lookup_type,str(self.exclude))) 17 | 18 | class CacheBotException(Exception): 19 | pass 20 | 21 | post_update = dispatch.Signal(providing_args=["sender", "queryset"]) 22 | 23 | if conf.CACHEBOT_ENABLE_LOG: 24 | from django.core.signals import request_finished 25 | from django.core.cache import cache 26 | 27 | request_finished.connect(cache._logger.reset) 28 | 29 | if conf.RUNNING_TESTS: 30 | from cachebot.test_models import * -------------------------------------------------------------------------------- /cachebot/monkey.py: -------------------------------------------------------------------------------- 1 | def patch_manager(): 2 | from django.db import models 3 | from cachebot.managers import CacheBotManager 4 | models.Manager = CacheBotManager 5 | 6 | def patch_queryset(): 7 | from django.db.models import query 8 | from cachebot.queryset import CachedQuerySet 9 | query.QuerySet = CachedQuerySet 10 | 11 | def patch_all(manager=True, queryset=True): 12 | if manager: 13 | patch_manager() 14 | if queryset: 15 | patch_queryset() 16 | -------------------------------------------------------------------------------- /cachebot/queryset.py: -------------------------------------------------------------------------------- 1 | from itertools import chain 2 | 3 | from django.core.cache import cache 4 | from django.core.exceptions import ImproperlyConfigured, FieldError 5 | from django.db import connection 6 | from django.db.models import get_models 7 | from django.db.models.query import QuerySet, ValuesQuerySet 8 | from django.db.models.fields.related import ForeignRelatedObjectsDescriptor, ReverseManyRelatedObjectsDescriptor, ManyRelatedObjectsDescriptor 9 | from django.db.models.sql.constants import LOOKUP_SEP 10 | from django.db.models.sql.where import WhereNode 11 | from django.utils.hashcompat import md5_constructor 12 | 13 | from cachebot import conf 14 | from cachebot.models import post_update 15 | from cachebot.signals import cache_signals 16 | from cachebot.utils import get_invalidation_key, get_values, set_value 17 | 18 | class CacheBot(object): 19 | 20 | def __init__(self, queryset, extra_args=''): 21 | # have to call clone for some reason 22 | self.queryset = queryset._clone() 23 | if isinstance(self.queryset, ValuesQuerySet): 24 | self.parent_class = ValuesQuerySet 25 | else: 26 | self.parent_class = QuerySet 27 | self.result_key = queryset.get_cache_key(extra_args) 28 | 29 | 30 | def __iter__(self): 31 | cache_query = getattr(self.queryset, '_cache_query', False) 32 | 33 | if cache_query: 34 | results = cache.get(self.result_key) 35 | if results is not None: 36 | for obj in results: 37 | if conf.CACHEBOT_DEBUG_RESULTS: 38 | set_value(obj, 'from_cache', True) 39 | yield obj 40 | raise StopIteration 41 | 42 | results = [] 43 | pk_name = self.queryset.model._meta.pk.name 44 | self.queryset._fill_select_reverse_cache() 45 | 46 | reversemapping_keys = self.queryset._reversemapping.keys() 47 | reversemapping_keys.sort() 48 | 49 | for obj in self.parent_class.iterator(self.queryset): 50 | for related_name in reversemapping_keys: 51 | reversemap = self.queryset._target_maps[related_name] 52 | related_split = related_name.split(LOOKUP_SEP) 53 | for related_obj, related_field in self._nested_select_reverse(obj, related_split): 54 | val = reversemap.get(get_values(related_obj, pk_name),[]) 55 | set_value(related_obj, related_field, val) 56 | 57 | if cache_query: 58 | results.append(obj) 59 | if conf.CACHEBOT_DEBUG_RESULTS: 60 | set_value(obj, 'from_cache', False) 61 | yield obj 62 | 63 | if cache_query: 64 | self.cache_results(results) 65 | 66 | def _nested_select_reverse(self, obj, related_split): 67 | related_field = related_split.pop(0) 68 | try: 69 | related_obj = getattr(obj, related_field) 70 | if hasattr(related_obj, '__iter__'): 71 | for related_obj_ in related_obj: 72 | for nested_obj, related_field in self._nested_select_reverse(related_obj_, related_split): 73 | yield nested_obj, related_field 74 | else: 75 | for nested_obj, related_field in self._nested_select_reverse(related_obj, related_split): 76 | yield nested_obj, related_field 77 | except AttributeError: 78 | yield obj, related_field 79 | 80 | def _is_valid_flush_path(self, accessor_path): 81 | if not self.queryset._flush_fields: 82 | return True 83 | elif (accessor_path in self.queryset._flush_fields) or (accessor_path+'_id' in self.queryset._flush_fields): 84 | return True 85 | else: 86 | return False 87 | 88 | def _register_signal(self, model_class, accessor_path, lookup_type, negate, params): 89 | cache_signals.register(model_class, accessor_path, lookup_type, negate=negate) 90 | return get_invalidation_key( 91 | model_class._meta.db_table, 92 | accessor_path = accessor_path, 93 | lookup_type = lookup_type, 94 | negate = negate, 95 | value = params) 96 | 97 | def cache_results(self, results): 98 | """ 99 | Create invalidation signals for these results in the form of CacheBotSignals. 100 | A CacheBotSignal stores a model and it's accessor path to self.queryset.model. 101 | """ 102 | # cache the results 103 | invalidation_dict = {} 104 | if cache.add(self.result_key, results, conf.CACHE_SECONDS): 105 | 106 | invalidation_dict.update(dict([(key, self.result_key) for key in self.get_invalidation_keys(results)])) 107 | 108 | for child, negate in self.queryset._get_where_clause(self.queryset.query.where): 109 | constraint, lookup_type, value_annotation, params = child 110 | for model_class, accessor_path in self._get_join_paths(constraint.alias, constraint.col): 111 | if self._is_valid_flush_path(accessor_path): 112 | invalidation_key = self._register_signal(model_class, accessor_path, lookup_type, negate, params) 113 | invalidation_dict[invalidation_key] = self.result_key 114 | 115 | for join_tuple in self.queryset.query.join_map.keys(): 116 | if join_tuple[0] == model_class._meta.db_table and self._is_valid_flush_path(accessor_path): 117 | model_klass, m2m = self.queryset._get_model_class_from_table(join_tuple[1]) 118 | invalidation_key = self._register_signal(model_klass, join_tuple[3], lookup_type, negate, params) 119 | invalidation_dict[invalidation_key] = self.result_key 120 | 121 | # need to add and append to prevent race conditions 122 | # replace this with batch operations later 123 | for flush_key, flush_list in invalidation_dict.iteritems(): 124 | added = cache.add(flush_key, self.result_key, 0) 125 | if not added: 126 | cache.append(flush_key, ',%s' % self.result_key) 127 | 128 | def _get_join_paths(self, table_alias, accessor_path): 129 | model_class, m2m = self.queryset._get_model_class_from_table(table_alias) 130 | if m2m: 131 | accessor_path = model_class._meta.pk.attname 132 | 133 | yield model_class, accessor_path 134 | 135 | for join_tuple in self.queryset.query.join_map.keys(): 136 | if join_tuple[0] and join_tuple[1] == table_alias: 137 | for model_class, join_accessor_path in self._get_join_paths(join_tuple[0], join_tuple[2]): 138 | if join_accessor_path == model_class._meta.pk.attname: 139 | for attname, related in self.queryset._get_reverse_relations(model_class): 140 | join_accessor_path = attname 141 | yield model_class, LOOKUP_SEP.join((join_accessor_path, accessor_path)) 142 | elif join_accessor_path.split(LOOKUP_SEP)[-1] == 'id': 143 | accessor_path_split = join_accessor_path.split(LOOKUP_SEP) 144 | join_accessor_path = LOOKUP_SEP.join(accessor_path_split[:-1]) 145 | yield model_class, LOOKUP_SEP.join((join_accessor_path, accessor_path)) 146 | elif join_accessor_path.endswith('_id'): 147 | join_accessor_path = join_accessor_path[:-3] 148 | yield model_class, LOOKUP_SEP.join((join_accessor_path, accessor_path)) 149 | else: 150 | yield model_class, LOOKUP_SEP.join((join_accessor_path, accessor_path)) 151 | 152 | 153 | def get_invalidation_keys(self, results): 154 | """ 155 | Iterates through a list of results, and returns an invalidation key for each result. If the 156 | query spans multiple tables, also return invalidation keys of any related rows. 157 | """ 158 | related_fields = self.queryset._related_fields 159 | for obj in results: 160 | for field, model_class in related_fields.iteritems(): 161 | pk_name = model_class._meta.pk.attname 162 | cache_signals.register(model_class, pk_name, 'exact') 163 | for value in get_values(obj, field): 164 | invalidation_key = get_invalidation_key( 165 | model_class._meta.db_table, 166 | accessor_path = pk_name, 167 | value = value) 168 | yield invalidation_key 169 | 170 | 171 | class CachedQuerySetMixin(object): 172 | 173 | def get_cache_key(self, extra_args='', version=None): 174 | """Cache key used to identify this query""" 175 | query, params = self.query.get_compiler(using=self.db).as_sql() 176 | query_string = (query % params).strip().encode("utf-8") 177 | base_key = md5_constructor('.'.join((query_string, extra_args))).hexdigest() 178 | return cache.make_key('.'.join((self.model._meta.db_table, 'cachebot.results', base_key)), version=version) 179 | 180 | def _get_model_class_from_table(self, table): 181 | """Helper method that accepts a table name and returns the Django model class it belongs to""" 182 | try: 183 | model_class = [m for m in get_models() if connection.introspection.table_name_converter(m._meta.db_table) in map(connection.introspection.table_name_converter,[table])][0] 184 | m2m = False 185 | except IndexError: 186 | try: 187 | # this is a many to many field 188 | model_class = [f.rel.to for m in get_models() for f in m._meta.local_many_to_many if f.m2m_db_table() == table][0] 189 | m2m = True 190 | except IndexError: 191 | # this is an inner join 192 | table = self.query.alias_map[table][0] 193 | return self._get_model_class_from_table(table) 194 | return model_class, m2m 195 | 196 | @property 197 | def _related_fields(self): 198 | """Returns the primary key accessor name and model class for any table this query spans.""" 199 | model_class, m2m = self._get_model_class_from_table(self.model._meta.db_table) 200 | related_fields = { 201 | self.model._meta.pk.attname: model_class 202 | } 203 | for attname, model_class in self._get_related_models(self.model): 204 | related_fields[attname] = model_class 205 | return related_fields 206 | 207 | def _get_related_models(self, parent_model): 208 | """ 209 | A recursive function that looks at what tables this query spans, and 210 | finds that table's primary key accessor name and model class. 211 | """ 212 | related_models = set() 213 | rev_reversemapping = dict([(v,k) for k,v in self._reversemapping.iteritems()]) 214 | if rev_reversemapping: 215 | for attname, related in self._get_reverse_relations(parent_model): 216 | related_models.add((rev_reversemapping[attname], related.model)) 217 | 218 | for field in parent_model._meta.fields: 219 | if field.rel and field.rel.to._meta.db_table in self.query.tables and field.rel.to != parent_model: 220 | related_models.add((field.attname, field.rel.to)) 221 | 222 | for attname, model_class in related_models: 223 | yield attname, model_class 224 | if attname.endswith("_id"): 225 | attname = attname[:-3] 226 | for join_attname, model_klass in self._get_related_models(model_class): 227 | yield LOOKUP_SEP.join((attname,join_attname)), model_klass 228 | 229 | def _get_reverse_relations(self, model_class): 230 | for related in chain(model_class._meta.get_all_related_objects(), model_class._meta.get_all_related_many_to_many_objects()): 231 | if related.opts.db_table in self.query.tables and related.model != model_class: 232 | related_name = related.get_accessor_name() 233 | yield related_name, related 234 | if related.model != related.parent_model: 235 | for attname, join_related in self._get_reverse_relations(related.model): 236 | yield LOOKUP_SEP.join((related_name + '_cache', attname)), join_related 237 | 238 | def _base_clone(self, queryset, klass=None, setup=False, **kwargs): 239 | """ 240 | Clones a CachedQuerySet. If caching and this is a ValuesQuerySet, automatically add any 241 | related foreign relations to the select fields so we can invalidate this query. 242 | """ 243 | cache_query = kwargs.get('_cache_query', getattr(self, '_cache_query', False)) 244 | kwargs['_cache_query'] = cache_query 245 | if not hasattr(self, '_reversemapping'): 246 | self._reversemapping = {} 247 | 248 | if cache_query and isinstance(queryset, ValuesQuerySet): 249 | fields = kwargs.get('_fields', getattr(self,'_fields', ())) 250 | if fields: 251 | fields = list(fields) 252 | else: 253 | fields = [f.attname for f in self.model._meta.fields] 254 | 255 | for related_field in self._related_fields.keys(): 256 | if related_field not in fields and self._is_valid_field(related_field): 257 | fields.append(related_field) 258 | setup = True 259 | kwargs['_fields'] = tuple(fields) 260 | 261 | if cache_query: 262 | reversemapping = {} 263 | for attname, related in self._get_reverse_relations(self.model): 264 | reversemapping[attname + '_cache'] = attname 265 | kwargs['_reversemapping'] = reversemapping 266 | if isinstance(queryset, ValuesQuerySet): 267 | parent_class = ValuesQuerySet 268 | else: 269 | parent_class = QuerySet 270 | clone = parent_class._clone(self, klass=klass, setup=setup, **kwargs) 271 | if not hasattr(clone, '_cache_query'): 272 | clone._cache_query = getattr(self, '_cache_query', False) 273 | if not hasattr(clone, '_reversemapping'): 274 | clone._reversemapping = getattr(self, '_reversemapping', {}) 275 | if not hasattr(clone, '_target_maps'): 276 | clone._target_maps = getattr(self, '_target_maps', {}) 277 | if not hasattr(clone, '_flush_fields'): 278 | clone._flush_fields = getattr(self, '_flush_fields', ()) 279 | 280 | return clone 281 | 282 | def _is_valid_field(self, field, allow_m2m=True): 283 | """A hackish way to figure out if this is a field or reverse foreign relation""" 284 | try: 285 | self.query.setup_joins(field.split(LOOKUP_SEP), self.query.get_meta(), self.query.get_initial_alias(), False, allow_m2m, True) 286 | return True 287 | except FieldError: 288 | return False 289 | 290 | def _get_select_reverse_model(self, model_class, lookup_args): 291 | model_arg = lookup_args.pop(0) 292 | try: 293 | descriptor = getattr(model_class, model_arg) 294 | except AttributeError: 295 | # for nested reverse relations 296 | descriptor = getattr(model_class, self._reversemapping[model_arg]) 297 | if lookup_args: 298 | if isinstance(descriptor, ForeignRelatedObjectsDescriptor): 299 | return self._get_select_reverse_model(descriptor.related.model, lookup_args) 300 | elif isinstance(descriptor, ReverseManyRelatedObjectsDescriptor): 301 | return self._get_select_reverse_model(descriptor.field.rel.to, lookup_args) 302 | elif isinstance(descriptor, ManyRelatedObjectsDescriptor): 303 | return self._get_select_reverse_model(descriptor.related.model, lookup_args) 304 | else: 305 | return model_class, model_arg 306 | 307 | def _fill_select_reverse_cache(self): 308 | reversemapping = getattr(self, '_reversemapping', {}) 309 | target_maps = {} 310 | if reversemapping: 311 | if isinstance(self, ValuesQuerySet): 312 | pk_name = self.model._meta.pk.name 313 | queryset = self._clone().values(pk_name) 314 | else: 315 | queryset = self._clone() 316 | 317 | # Need to clear any limits on this query because of http://code.djangoproject.com/ticket/10099 318 | queryset.query.clear_limits() 319 | 320 | # we need to iterate through these in a certain order 321 | reversemapping_keys = self._reversemapping.keys() 322 | reversemapping_keys.sort() 323 | 324 | for key in reversemapping_keys: 325 | target_map= {} 326 | val = self._reversemapping[key] 327 | 328 | model_class, model_arg = self._get_select_reverse_model(self.model, val.split(LOOKUP_SEP)) 329 | if hasattr(model_class, key): 330 | raise ImproperlyConfigured, "Model %s already has an attribute %s" % (model_class, key) 331 | 332 | descriptor = getattr(model_class, model_arg) 333 | if isinstance(descriptor, ForeignRelatedObjectsDescriptor): 334 | rel = descriptor.related 335 | related_queryset = rel.model.objects.filter(**{rel.field.name+'__in':queryset}).all() 336 | for item in related_queryset.iterator(): 337 | target_map.setdefault(getattr(item, rel.field.get_attname()), []).append(item) 338 | elif isinstance(descriptor, ReverseManyRelatedObjectsDescriptor): 339 | field = descriptor.field 340 | related_queryset = field.rel.to.objects.filter(**{field.rel.related_name +'__in':queryset}).all().extra( \ 341 | select={'main_id': field.m2m_db_table() + '.' + field.m2m_column_name()}) 342 | for item in related_queryset.iterator(): 343 | target_map.setdefault(getattr(item, 'main_id'), []).append(item) 344 | elif isinstance(descriptor, ManyRelatedObjectsDescriptor): 345 | rel = descriptor.related 346 | related_queryset = rel.model.objects.filter(**{rel.field.name +'__in':queryset}).all().extra( \ 347 | select={'main_id': rel.field.m2m_db_table() + '.' + rel.field.m2m_column_name()}) 348 | for item in related_queryset.iterator(): 349 | target_map.setdefault(getattr(item, 'main_id'), []).append(item) 350 | else: 351 | raise ImproperlyConfigured, "Unsupported mapping %s %s" % (val, descriptor) 352 | target_maps[key]=target_map 353 | self._target_maps = target_maps 354 | 355 | def _get_where_clause(self, node): 356 | for child in node.children: 357 | if isinstance(child, WhereNode): 358 | for child_node, negated in self._get_where_clause(child): 359 | yield child_node, negated 360 | else: 361 | yield child, node.negated 362 | 363 | def select_reverse(self, *reversemapping, **kwargs): 364 | """ 365 | Like select_related, but follows reverse and m2m foreign relations. Example usage: 366 | 367 | article_list = Article.objects.select_reverse('book_set') 368 | 369 | for article in article_list: 370 | # these will return the same queryset 371 | print article.book_set_cache 372 | print article.book_set.all() 373 | 374 | If there are N Articles belonging to K Books, this will return N + K results. The actual 375 | reversed book queryset would be cached in article_list._target_maps['book_set_cache'] 376 | 377 | Nested queries are also supported: 378 | 379 | article_list = Article.objects.select_reverse('book_set','book_set__publisher_set') 380 | 381 | for article in article_list: 382 | 383 | # these will return the same queryset 384 | for book in article.book_set_cache: 385 | print book.publisher_set_cache 386 | print book.publisher_set.all() 387 | 388 | # these will return the same queryset 389 | for book in article.book_set.all(): 390 | print book.publisher_set_cache 391 | print book.publisher_set.all() 392 | 393 | 394 | This could probably be better, because it does a SQL query for each reverse or m2m foreign 395 | relation in select_reverse, i.e. 396 | 397 | Article.objects.select_reverse('book_set','author_set') 398 | 399 | will be 3 SQL queries. This is a lot better than the alternative of a separate SQL query 400 | for each article in article_list, but it'd be nice to be able to do the whole thing in 1. 401 | 402 | Based off django-selectreverse: http://code.google.com/p/django-selectreverse/ 403 | """ 404 | _reversemapping = dict([(key +'_cache', key) for key in reversemapping]) 405 | return self._clone(_reversemapping=_reversemapping, **kwargs) 406 | 407 | def values(self, *fields): 408 | return self._clone(klass=CachedValuesQuerySet, setup=True, _fields=fields) 409 | 410 | def cache(self, *flush_fields): 411 | """ 412 | Cache this queryset. If this is a query over reverse foreign relations, those fields will automatically 413 | be added to select_reverse, because we need them for invalidation. Do not cache queries on 414 | tables in CACHEBOT_TABLE_BLACKLIST 415 | """ 416 | _cache_query = self.model._meta.db_table not in conf.CACHEBOT_TABLE_BLACKLIST 417 | return self._clone(setup=True, _cache_query=_cache_query, _flush_fields=flush_fields) 418 | 419 | def get(self, *args, **kwargs): 420 | if self.model.objects.cache_get: 421 | return super(CachedQuerySetMixin, self.cache()).get(*args, **kwargs) 422 | else: 423 | return super(CachedQuerySetMixin, self).get(*args, **kwargs) 424 | 425 | 426 | class CachedQuerySet(CachedQuerySetMixin, QuerySet): 427 | 428 | def __init__(self, *args, **kwargs): 429 | super(CachedQuerySet, self).__init__(*args, **kwargs) 430 | self._reversemapping = {} 431 | 432 | def iterator(self): 433 | for obj in CacheBot(self): 434 | yield obj 435 | raise StopIteration 436 | 437 | def _clone(self, klass=None, setup=False, **kwargs): 438 | return self._base_clone(self, klass=klass, setup=setup, **kwargs) 439 | 440 | def update(self, **kwargs): 441 | post_update.send(sender=self.model, queryset=self) 442 | return super(CachedQuerySet, self).update(**kwargs) 443 | 444 | 445 | class CachedValuesQuerySet(CachedQuerySetMixin, ValuesQuerySet): 446 | 447 | def __init__(self, *args, **kwargs): 448 | super(CachedValuesQuerySet, self).__init__(*args, **kwargs) 449 | self._reversemapping = {} 450 | 451 | def iterator(self): 452 | for obj in CacheBot(self): 453 | yield obj 454 | raise StopIteration 455 | 456 | def _clone(self, klass=None, setup=False, **kwargs): 457 | return self._base_clone(self, klass=klass, setup=setup, **kwargs) 458 | 459 | def update(self, **kwargs): 460 | post_update.send(sender=self.model, queryset=self) 461 | return super(CachedQuerySet, self).update(**kwargs) 462 | 463 | -------------------------------------------------------------------------------- /cachebot/signals.py: -------------------------------------------------------------------------------- 1 | from django.core.cache import cache 2 | from django.core.signals import request_finished, request_started 3 | from django.db.models.signals import post_save, pre_delete 4 | from django.utils.http import urlquote 5 | from django.utils.hashcompat import md5_constructor 6 | 7 | from cachebot import conf 8 | from cachebot.models import CacheBotSignals, post_update 9 | from cachebot.utils import get_invalidation_key, get_values 10 | 11 | if conf.CACHEBOT_ENABLE_LOG: 12 | request_finished.connect(cache._logger.reset) 13 | 14 | class CacheSignals(object): 15 | """ 16 | An object that handles installed cache signals. Keep a local copy of the signals 17 | so we don't hammer memcache 18 | """ 19 | 20 | __shared_state = dict( 21 | ready = False, 22 | local_signals = dict() 23 | ) 24 | 25 | def __init__(self): 26 | self.__dict__ = self.__shared_state 27 | 28 | def get_lookup_key(self, model_class, version=None): 29 | return cache.make_key('.'.join(('cachesignals', model_class._meta.db_table)), version=version) 30 | 31 | def get_local_signals(self, model_class): 32 | accessor_set = self.local_signals.get(model_class._meta.db_table) 33 | if not accessor_set: 34 | accessor_set = set() 35 | return accessor_set 36 | 37 | def get_global_signals(self, model_class): 38 | lookup_key = self.get_lookup_key(model_class) 39 | accessor_set = cache.get(lookup_key) 40 | if not accessor_set: 41 | accessor_set = set() 42 | self.local_signals[model_class._meta.db_table] = accessor_set 43 | return accessor_set 44 | 45 | def set_signals(self, model_class, accessor_set): 46 | lookup_key = self.get_lookup_key(model_class) 47 | self.local_signals[model_class._meta.db_table] = accessor_set 48 | cache.set(lookup_key, accessor_set, 0) 49 | 50 | def register(self, model_class, accessor_path, lookup_type, negate=False): 51 | path_tuple = (accessor_path, lookup_type, negate) 52 | if path_tuple not in self.get_local_signals(model_class): 53 | # not in local cache, check the global cache 54 | accessor_set = self.get_global_signals(model_class) 55 | if path_tuple not in accessor_set: 56 | # can't use get_or_create here 57 | try: 58 | CacheBotSignals.objects.filter( 59 | table_name=model_class._meta.db_table, 60 | accessor_path=accessor_path, 61 | lookup_type=lookup_type, 62 | exclude=negate 63 | )[0] 64 | except IndexError: 65 | CacheBotSignals.objects.create( 66 | table_name=model_class._meta.db_table, 67 | accessor_path=accessor_path, 68 | lookup_type=lookup_type, 69 | exclude=negate 70 | ) 71 | accessor_set.add(path_tuple) 72 | self.set_signals(model_class, accessor_set) 73 | 74 | cache_signals = CacheSignals() 75 | 76 | def load_cache_signals(version=None, **kwargs): 77 | """On startup, sync signals with registered models""" 78 | if not cache_signals.ready: 79 | results = CacheBotSignals.objects.all() 80 | tables = [r.table_name for r in results] 81 | mapping = cache.get_many(tables) 82 | for result in results: 83 | key = cache.make_key(u'.'.join(('cachesignals', result.table_name)), version=version) 84 | accessor_set = mapping.get(key) or set() 85 | accessor_set.add((result.accessor_path, result.lookup_type, result.exclude)) 86 | mapping[key] = accessor_set 87 | cache.set_many(mapping, 0) 88 | cache_signals.ready = True 89 | request_started.connect(load_cache_signals) 90 | 91 | 92 | ### INVALIDATION FUNCTIONS ### 93 | def post_update_cachebot(sender, queryset, **kwargs): 94 | invalidate_cache(sender, queryset) 95 | post_update.connect(post_update_cachebot) 96 | 97 | def post_save_cachebot(sender, instance, **kwargs): 98 | invalidate_cache(sender, (instance,)) 99 | post_save.connect(post_save_cachebot) 100 | 101 | def pre_delete_cachebot(sender, instance, **kwargs): 102 | invalidate_cache(sender, (instance,)) 103 | pre_delete.connect(pre_delete_cachebot) 104 | 105 | def invalidate_object(instance): 106 | invalidate_cache(type(instance), (instance,)) 107 | 108 | def invalidate_cache(model_class, objects, **extra_keys): 109 | """ 110 | Flushes the cache of any cached objects associated with this instance. 111 | 112 | Explicitly set a None value instead of just deleting so we don't have any race 113 | conditions where: 114 | Thread 1 -> Cache miss, get object from DB 115 | Thread 2 -> Object saved, deleted from cache 116 | Thread 1 -> Store (stale) object fetched from DB in cache 117 | Five second should be more than enough time to prevent this from happening for 118 | a web app. 119 | """ 120 | invalidation_dict = {} 121 | accessor_set = cache_signals.get_global_signals(model_class) 122 | for obj in objects: 123 | for (accessor_path, lookup_type, negate) in accessor_set: 124 | if lookup_type != 'exact' or negate: 125 | invalidation_key = get_invalidation_key( 126 | model_class._meta.db_table, 127 | accessor_path = accessor_path, 128 | negate = negate, 129 | value = '') 130 | invalidation_dict[invalidation_key] = None 131 | else: 132 | for value in get_values(obj, accessor_path): 133 | invalidation_key = get_invalidation_key( 134 | model_class._meta.db_table, 135 | accessor_path = accessor_path, 136 | negate = negate, 137 | value = value) 138 | invalidation_dict[invalidation_key] = None 139 | 140 | if invalidation_dict: 141 | invalidation_dict.update(cache.get_many(invalidation_dict.keys())) 142 | 143 | cache_keys = set() 144 | for obj_key, cache_key_list in invalidation_dict.iteritems(): 145 | if cache_key_list: 146 | cache_keys.update(cache_key_list.split(',')) 147 | 148 | if cache_keys: 149 | cache.set_many(dict([(key, None) for key in cache_keys]), conf.CACHE_INVALIDATION_TIMEOUT) 150 | invalidation_dict.update(extra_keys) 151 | cache.delete_many(invalidation_dict.keys()) 152 | 153 | def invalidate_template_cache(fragment_name, *variables): 154 | args = md5_constructor(u':'.join(map(urlquote, variables)).encode('utf-8')).hexdigest() 155 | cache_key = 'template.cache.%s.%s' % (fragment_name, args) 156 | cache.delete(cache_key) 157 | 158 | 159 | -------------------------------------------------------------------------------- /cachebot/test_models.py: -------------------------------------------------------------------------------- 1 | from django.db import models 2 | from django.contrib.contenttypes.models import ContentType 3 | from django.contrib.contenttypes import generic 4 | 5 | from cachebot.managers import CacheBotManager 6 | 7 | class UniqueModel(models.Model): 8 | text = models.CharField(max_length=50, unique=True) 9 | objects = CacheBotManager(cache_get=True) 10 | 11 | class NoCacheModel(models.Model): 12 | text = models.CharField(max_length=50) 13 | objects = CacheBotManager(cache_get=False) 14 | 15 | class FirstModel(models.Model): 16 | text = models.CharField(max_length=50) 17 | objects = CacheBotManager(cache_get=True) 18 | 19 | class SecondModel(models.Model): 20 | text = models.CharField(max_length=50) 21 | obj = models.ForeignKey(FirstModel) 22 | objects = CacheBotManager(cache_get=True) 23 | 24 | class ThirdModel(models.Model): 25 | text = models.CharField(max_length=50) 26 | obj = models.ForeignKey(SecondModel) 27 | objects = CacheBotManager(cache_get=True) 28 | 29 | class ManyModel(models.Model): 30 | text = models.CharField(max_length=50) 31 | firstmodel = models.ManyToManyField(FirstModel) 32 | thirdmodel = models.ManyToManyField(ThirdModel) 33 | objects = CacheBotManager(cache_get=True) 34 | 35 | class GenericModel(models.Model): 36 | text = models.CharField(max_length=50) 37 | content_type = models.ForeignKey(ContentType) 38 | object_id = models.PositiveIntegerField() 39 | obj = generic.GenericForeignKey('content_type', 'object_id') 40 | objects = CacheBotManager(cache_get=True) 41 | -------------------------------------------------------------------------------- /cachebot/tests/__init__.py: -------------------------------------------------------------------------------- 1 | from cachebot.tests.base_tests import * 2 | from cachebot.tests.manager_tests import * 3 | from cachebot.tests.values_tests import * 4 | from cachebot.tests.reverse_lookup_tests import * 5 | from cachebot.tests.many_to_many_tests import * 6 | from cachebot.tests.no_cache_tests import * 7 | -------------------------------------------------------------------------------- /cachebot/tests/base_tests.py: -------------------------------------------------------------------------------- 1 | from django.contrib.contenttypes.models import ContentType 2 | from django.core.cache import cache 3 | from django.db.models.query import ValuesQuerySet 4 | from django.db.models import Q 5 | from django.test import TestCase 6 | 7 | from cachebot.models import FirstModel, SecondModel, ThirdModel, GenericModel, ManyModel 8 | from cachebot.utils import flush_cache 9 | 10 | class BaseTestCase(TestCase): 11 | 12 | def tearDown(self): 13 | super(BaseTestCase, self).tearDown() 14 | cache._logger.reset() 15 | 16 | def setUp(self): 17 | super(BaseTestCase, self).setUp() 18 | flush_cache(hard=False) 19 | 20 | class BasicCacheTests(BaseTestCase): 21 | 22 | def setUp(self): 23 | super(BasicCacheTests, self).setUp() 24 | self.append_cache = False 25 | self.firstmodel = FirstModel.objects.create(text="test1") 26 | self.secondmodel = SecondModel.objects.create(text="test2", obj=self.firstmodel) 27 | self.thirdmodel = ThirdModel.objects.create(text="test3", obj=self.secondmodel) 28 | ctype = ContentType.objects.get_for_model(self.secondmodel) 29 | self.genericmodel = GenericModel.objects.create(text="test4", content_type=ctype, object_id=self.secondmodel.id) 30 | self.manymodel = ManyModel.objects.create(text='test5') 31 | self.manymodel.firstmodel.add(self.firstmodel) 32 | self.manymodel.thirdmodel.add(self.thirdmodel) 33 | self.manager = ThirdModel.objects 34 | self.func = self.manager.cache().filter 35 | self.obj = self.thirdmodel 36 | self.kwargs = {'id':self.obj.id} 37 | 38 | def _test_cache_lookup(self, from_cache=False): 39 | try: 40 | if self.append_cache: 41 | results = self.func(**self.kwargs).cache() 42 | else: 43 | results = self.func(**self.kwargs) 44 | except (self.obj.DoesNotExist, self.obj.MultipleObjectsReturned): 45 | self.assertEqual(from_cache, False) 46 | return 47 | 48 | if isinstance(results, ValuesQuerySet): 49 | if hasattr(results,'__iter__'): 50 | for obj in results: 51 | self.assertEqual(obj['from_cache'], from_cache) 52 | else: 53 | self.assertEqual(results['from_cache'], from_cache) 54 | else: 55 | if hasattr(results,'__iter__'): 56 | for obj in results: 57 | self.assertEqual(obj.from_cache, from_cache) 58 | else: 59 | self.assertEqual(results.from_cache, from_cache) 60 | return results 61 | 62 | def _test_lookup(self): 63 | self._test_cache_lookup(from_cache=False) 64 | results = self._test_cache_lookup(from_cache=True) 65 | return results 66 | 67 | def test_lookup(self): 68 | self._test_lookup() 69 | 70 | def test_save_signal(self, obj=None): 71 | if obj is None: 72 | obj = self.obj 73 | self._test_lookup() 74 | obj.text = "jedi" 75 | obj.save() 76 | self._test_cache_lookup(from_cache=False) 77 | 78 | def test_delete_signal(self, obj=None): 79 | if obj is None: 80 | obj = self.obj 81 | self._test_lookup() 82 | obj.delete() 83 | self._test_cache_lookup(from_cache=False) 84 | 85 | def test_new_obj(self, obj=None, kwargs=None): 86 | if obj is None: 87 | obj = self.obj 88 | if kwargs is None: 89 | self.kwargs = {'text':obj.text} 90 | else: 91 | self.kwargs = kwargs 92 | self._test_lookup() 93 | new_obj = obj.__class__(text=obj.text) 94 | if hasattr(new_obj,'obj_id'): 95 | new_obj.obj = obj.obj 96 | if hasattr(new_obj,'firstmodel_id'): 97 | new_obj.firstmodel = obj.firstmodel 98 | if hasattr(new_obj,'secondmodel_id'): 99 | new_obj.secondmodel = obj.secondmodel 100 | if hasattr(new_obj,'content_type_id'): 101 | new_obj.content_type_id = obj.content_type_id 102 | new_obj.object_id = obj.object_id 103 | new_obj.save() 104 | self._test_cache_lookup(from_cache=False) 105 | 106 | 107 | class FieldCacheTests(BasicCacheTests): 108 | 109 | def setUp(self): 110 | BasicCacheTests.setUp(self) 111 | self.kwargs = {'text':self.obj.text} 112 | 113 | 114 | class GenericCacheTests(BasicCacheTests): 115 | 116 | def setUp(self): 117 | BasicCacheTests.setUp(self) 118 | self.manager = GenericModel.objects 119 | self.func = self.manager.cache().filter 120 | self.obj = self.genericmodel 121 | 122 | 123 | class RelatedCacheTests(BasicCacheTests): 124 | 125 | def setUp(self): 126 | BasicCacheTests.setUp(self) 127 | self.func = self.manager.cache().filter 128 | self.kwargs = {'obj':self.secondmodel} 129 | 130 | def test_related_save_signal(self): 131 | self.test_save_signal(obj=self.obj.obj) 132 | 133 | def test_related_delete_signal(self): 134 | self.test_delete_signal(obj=self.obj.obj) 135 | 136 | def test_related_new_obj(self): 137 | if hasattr(self.obj, 'obj'): 138 | kwargs = {'obj__text':self.obj.obj.text} 139 | self.test_new_obj(obj=self.obj.obj, kwargs=kwargs) 140 | 141 | 142 | class RelatedIDCacheTests(RelatedCacheTests): 143 | 144 | def setUp(self): 145 | RelatedCacheTests.setUp(self) 146 | self.kwargs = {'obj__id':self.secondmodel.id} 147 | 148 | 149 | class RelatedFieldCacheTests(RelatedCacheTests): 150 | 151 | def setUp(self): 152 | RelatedCacheTests.setUp(self) 153 | self.kwargs = {'obj__text':self.secondmodel.text} 154 | 155 | 156 | class ExtraRelatedCacheTests(RelatedCacheTests): 157 | 158 | def setUp(self): 159 | RelatedCacheTests.setUp(self) 160 | self.func = self.manager.cache().filter 161 | self.kwargs = {'obj__obj':self.firstmodel} 162 | 163 | def test_extra_related_save_signal(self): 164 | self.test_save_signal(obj=self.obj.obj.obj) 165 | 166 | def test_extra_related_delete_signal(self): 167 | self.test_delete_signal(obj=self.obj.obj.obj) 168 | 169 | def test_extra_related_new_obj(self): 170 | if hasattr(self.obj, 'obj') and hasattr(self.obj.obj, 'obj') : 171 | kwargs = {'obj__obj__text':self.obj.obj.obj.text} 172 | self.test_new_obj(obj=self.obj.obj.obj, kwargs=kwargs) 173 | 174 | 175 | class ExtraRelatedIDCacheTests(ExtraRelatedCacheTests): 176 | 177 | def setUp(self): 178 | ExtraRelatedCacheTests.setUp(self) 179 | self.kwargs = {'obj__obj__id':self.firstmodel.id} 180 | 181 | 182 | class ExtraRelatedFieldCacheTests(ExtraRelatedCacheTests): 183 | 184 | def setUp(self): 185 | ExtraRelatedCacheTests.setUp(self) 186 | self.kwargs = {'obj__obj__text':self.firstmodel.text} 187 | 188 | 189 | class ExtraRelatedAppendCacheTests(ExtraRelatedCacheTests): 190 | 191 | def setUp(self): 192 | ExtraRelatedCacheTests.setUp(self) 193 | self.append_cache = True 194 | 195 | 196 | class SelectiveCacheTests(ExtraRelatedCacheTests): 197 | 198 | def setUp(self): 199 | ExtraRelatedCacheTests.setUp(self) 200 | self.append_cache = True 201 | self.func = self.manager.cache('obj__obj').filter 202 | 203 | 204 | class SelectiveCacheIDTests(ExtraRelatedCacheTests): 205 | 206 | def setUp(self): 207 | ExtraRelatedCacheTests.setUp(self) 208 | self.append_cache = True 209 | self.func = self.manager.cache('obj__obj_id').filter 210 | 211 | 212 | class ComplexQueryCacheTests(ExtraRelatedCacheTests): 213 | 214 | def setUp(self): 215 | ExtraRelatedCacheTests.setUp(self) 216 | 217 | def _test_cache_lookup(self, from_cache=False): 218 | try: 219 | if self.append_cache: 220 | results = self.func(Q(obj__obj__id=self.firstmodel.id)|Q(obj__obj__text='blah blah blah')).cache() 221 | else: 222 | results = self.func(Q(obj__obj__id=self.firstmodel.id)|Q(obj__obj__text='blah blah blah')) 223 | except (self.obj.DoesNotExist, self.obj.MultipleObjectsReturned): 224 | self.assertEqual(from_cache, False) 225 | return 226 | 227 | if isinstance(results, ValuesQuerySet): 228 | if hasattr(results,'__iter__'): 229 | for obj in results: 230 | self.assertEqual(obj['from_cache'], from_cache) 231 | else: 232 | self.assertEqual(results['from_cache'], from_cache) 233 | else: 234 | if hasattr(results,'__iter__'): 235 | for obj in results: 236 | self.assertEqual(obj.from_cache, from_cache) 237 | else: 238 | self.assertEqual(results.from_cache, from_cache) 239 | return results 240 | 241 | def _test_lookup(self): 242 | self._test_cache_lookup(from_cache=False) 243 | results = self._test_cache_lookup(from_cache=True) 244 | return results 245 | 246 | def test_lookup(self): 247 | self._test_lookup() 248 | 249 | def test_extra_related_new_obj(self): 250 | pass 251 | -------------------------------------------------------------------------------- /cachebot/tests/manager_tests.py: -------------------------------------------------------------------------------- 1 | import time 2 | 3 | from django.db import connection 4 | from django.conf import settings 5 | 6 | from cachebot import conf 7 | from cachebot.models import FirstModel 8 | from cachebot.tests.base_tests import BaseTestCase, BasicCacheTests, FieldCacheTests, RelatedCacheTests, ExtraRelatedCacheTests 9 | 10 | class GetBasicCacheTests(BasicCacheTests): 11 | 12 | def setUp(self): 13 | BasicCacheTests.setUp(self) 14 | self.func = self.manager.get 15 | 16 | 17 | class GetRelatedCacheTests(RelatedCacheTests): 18 | 19 | def setUp(self): 20 | RelatedCacheTests.setUp(self) 21 | self.func = self.manager.get 22 | 23 | 24 | class GetExtraRelatedCacheTests(ExtraRelatedCacheTests): 25 | 26 | def setUp(self): 27 | ExtraRelatedCacheTests.setUp(self) 28 | self.func = self.manager.get 29 | 30 | 31 | class GetOrCreateCacheTests(BaseTestCase): 32 | 33 | def test_get_then_create(self): 34 | self.assertRaises(FirstModel.DoesNotExist, FirstModel.objects.get, **{'text':'new'}) 35 | FirstModel.objects.create(text='new') 36 | time.sleep(conf.CACHE_INVALIDATION_TIMEOUT) 37 | obj = FirstModel.objects.get(text='new') 38 | self.assertEqual(obj.from_cache,False) 39 | obj = FirstModel.objects.get(text='new') 40 | self.assertEqual(obj.from_cache,True) 41 | 42 | def test_get_or_create(self): 43 | obj, created = FirstModel.objects.get_or_create(text='new') 44 | self.assertEqual(created, True) 45 | time.sleep(conf.CACHE_INVALIDATION_TIMEOUT) 46 | obj = FirstModel.objects.get(text='new') 47 | self.assertEqual(obj.from_cache,False) 48 | obj = FirstModel.objects.get(text='new') 49 | self.assertEqual(obj.from_cache,True) 50 | 51 | class SelectRelatedCacheTests(ExtraRelatedCacheTests): 52 | 53 | def setUp(self): 54 | ExtraRelatedCacheTests.setUp(self) 55 | self.func = self.manager.select_related().cache().filter 56 | self.obj = self.thirdmodel 57 | self.kwargs = {'id':self.obj.id} 58 | 59 | class ExcludeCacheTests(BasicCacheTests): 60 | 61 | def setUp(self): 62 | BasicCacheTests.setUp(self) 63 | self.obj = self.thirdmodel 64 | self.kwargs = {'id':self.obj.id+1} 65 | self.func = self.manager.cache().exclude 66 | 67 | 68 | class ExcludeFieldCacheTests(FieldCacheTests): 69 | 70 | def setUp(self): 71 | FieldCacheTests.setUp(self) 72 | self.kwargs = {'text':'this text is not in any model'} 73 | self.func = self.manager.cache().exclude 74 | 75 | 76 | class ExtraRelatedExcludeCacheTests(ExtraRelatedCacheTests): 77 | 78 | def setUp(self): 79 | ExtraRelatedCacheTests.setUp(self) 80 | self.kwargs = {'obj__obj':self.obj.obj.obj.id+1} 81 | self.func = self.manager.cache().exclude 82 | 83 | 84 | class ExcludeAndFilterCacheTests(BasicCacheTests): 85 | 86 | def setUp(self): 87 | BasicCacheTests.setUp(self) 88 | self.obj = self.thirdmodel 89 | self.kwargs = {'id':self.obj.id+1} 90 | self.func = self.manager.cache().filter(id=self.obj.id).exclude 91 | 92 | 93 | class ExcludeAndFilterFieldCacheTests(FieldCacheTests): 94 | 95 | def setUp(self): 96 | FieldCacheTests.setUp(self) 97 | self.kwargs = {'text':'this text is not in any model'} 98 | self.func = self.manager.cache().filter(text=self.obj.text).exclude 99 | 100 | 101 | class ExtraRelatedExcludeAndFilterCacheTests(ExtraRelatedCacheTests): 102 | 103 | def setUp(self): 104 | ExtraRelatedCacheTests.setUp(self) 105 | self.kwargs = {'obj__obj':self.obj.obj.obj.id+1} 106 | self.func = self.manager.cache().filter(obj__obj=self.obj.obj.obj).exclude 107 | 108 | 109 | class RangeCacheTests(ExtraRelatedCacheTests): 110 | 111 | def setUp(self): 112 | ExtraRelatedCacheTests.setUp(self) 113 | self.kwargs = {'obj__obj__in':[self.firstmodel]} 114 | 115 | 116 | class NestedQuerysetCacheTests(ExtraRelatedCacheTests): 117 | 118 | def setUp(self): 119 | ExtraRelatedCacheTests.setUp(self) 120 | queryset = FirstModel.objects.all() 121 | self.kwargs = {'obj__obj__in':queryset} 122 | 123 | # disable these tests 124 | 125 | class CountCacheTests(BasicCacheTests): 126 | 127 | def setUp(self): 128 | settings.DEBUG = True 129 | BasicCacheTests.setUp(self) 130 | # call count to create any CacheBotSignals first 131 | self.func(**self.kwargs).count() 132 | 133 | def test_lookup(self, count=1): 134 | return 135 | connection.queries = [] 136 | self.assertEqual(self.func(**self.kwargs).count(), count) 137 | self.assertEqual(len(connection.queries), 1) 138 | self.assertEqual(self.func(**self.kwargs).count(), count) 139 | self.assertEqual(len(connection.queries), 1) 140 | 141 | 142 | def test_save_signal(self, obj=None): 143 | return 144 | if obj is None: 145 | obj = self.obj 146 | self.test_lookup(count=1) 147 | obj.save() 148 | self.test_lookup(count=1) 149 | 150 | def test_delete_signal(self, obj=None): 151 | return 152 | if obj is None: 153 | obj = self.obj 154 | self.test_lookup(count=1) 155 | obj.delete() 156 | self.test_lookup(count=0) 157 | 158 | class ExtraRelatedCountCacheTests(ExtraRelatedCacheTests): 159 | 160 | def setUp(self): 161 | settings.DEBUG = True 162 | ExtraRelatedCacheTests.setUp(self) 163 | # call count to create any CacheBotSignals first 164 | self.func(**self.kwargs).count() 165 | 166 | def test_related_save_signal(self): 167 | return 168 | self.test_save_signal(obj=self.obj.obj) 169 | 170 | def test_related_delete_signal(self): 171 | return 172 | self.test_delete_signal(obj=self.obj.obj) 173 | 174 | def test_extra_related_save_signal(self): 175 | return 176 | self.test_save_signal(obj=self.obj.obj.obj) 177 | 178 | def test_extra_related_delete_signal(self): 179 | return 180 | self.test_delete_signal(obj=self.obj.obj.obj) 181 | 182 | -------------------------------------------------------------------------------- /cachebot/tests/many_to_many_tests.py: -------------------------------------------------------------------------------- 1 | from cachebot.models import ManyModel 2 | from cachebot.tests.base_tests import BasicCacheTests, RelatedCacheTests 3 | 4 | class BasicManyToManyCacheTests(BasicCacheTests): 5 | 6 | def setUp(self): 7 | BasicCacheTests.setUp(self) 8 | self.manager = ManyModel.objects 9 | self.func = self.manager.cache().filter 10 | self.obj = self.manymodel 11 | self.related_obj = self.firstmodel 12 | self.kwargs = {'id':self.obj.id} 13 | 14 | def test_lookup(self): 15 | self._test_lookup() 16 | 17 | class RelatedManyToManyCacheTests(RelatedCacheTests): 18 | 19 | def setUp(self): 20 | RelatedCacheTests.setUp(self) 21 | self.manager = ManyModel.objects 22 | self.func = self.manager.cache().filter 23 | self.obj = self.manymodel 24 | self.related_obj = self.firstmodel 25 | self.kwargs = {'firstmodel':self.related_obj} 26 | 27 | def test_related_save_signal(self): 28 | # these will fail until we get many to many signals 29 | pass 30 | 31 | def test_related_delete_signal(self): 32 | self._test_lookup() 33 | obj = self.related_obj 34 | obj.text = "mind" 35 | obj.delete() 36 | self._test_cache_lookup(from_cache=False) 37 | 38 | -------------------------------------------------------------------------------- /cachebot/tests/no_cache_tests.py: -------------------------------------------------------------------------------- 1 | from cachebot import conf 2 | from cachebot.models import FirstModel, NoCacheModel 3 | from cachebot.tests.base_tests import BaseTestCase 4 | 5 | class BlacklistCacheTests(BaseTestCase): 6 | 7 | def tearDown(self): 8 | super(BaseTestCase, self).tearDown() 9 | conf.CACHEBOT_TABLE_BLACKLIST = self._CACHEBOT_TABLE_BLACKLIST 10 | 11 | def setUp(self): 12 | BaseTestCase.setUp(self) 13 | self.obj = FirstModel.objects.create(text="test") 14 | self.func = FirstModel.objects.get 15 | self._CACHEBOT_TABLE_BLACKLIST = conf.CACHEBOT_TABLE_BLACKLIST 16 | conf.CACHEBOT_TABLE_BLACKLIST += (FirstModel._meta.db_table,) 17 | 18 | def test_lookup_not_in_cache(self): 19 | obj = self.func(id=self.obj.id) 20 | self.assertFalse(obj.from_cache) 21 | obj = self.func(id=self.obj.id) 22 | self.assertFalse(obj.from_cache) 23 | 24 | class CacheGetFalseCacheTests(BlacklistCacheTests): 25 | 26 | def setUp(self): 27 | BlacklistCacheTests.setUp(self) 28 | self.obj = NoCacheModel.objects.create(text="test") 29 | self.func = NoCacheModel.objects.get 30 | -------------------------------------------------------------------------------- /cachebot/tests/reverse_lookup_tests.py: -------------------------------------------------------------------------------- 1 | from cachebot.models import FirstModel 2 | from cachebot.tests.base_tests import RelatedCacheTests, ExtraRelatedCacheTests 3 | 4 | class ReverseRelatedCacheTests(RelatedCacheTests): 5 | 6 | def setUp(self): 7 | RelatedCacheTests.setUp(self) 8 | self.manager = FirstModel.objects 9 | self.func = self.manager.cache().filter 10 | self.obj = self.secondmodel 11 | self.kwargs = {'secondmodel':self.obj} 12 | 13 | def test_related_new_obj(self): 14 | kwargs = {'secondmodel__text':self.secondmodel.text} 15 | self.test_new_obj(obj=self.secondmodel, kwargs=kwargs) 16 | 17 | 18 | class ReverseExtraRelatedCacheTests(ReverseRelatedCacheTests, ExtraRelatedCacheTests): 19 | 20 | def setUp(self): 21 | ExtraRelatedCacheTests.setUp(self) 22 | self.manager = FirstModel.objects 23 | self.func = self.manager.cache().filter 24 | self.obj = self.thirdmodel 25 | self.kwargs = {'secondmodel__thirdmodel':self.obj} 26 | 27 | def test_extra_related_new_obj(self): 28 | kwargs = {'secondmodel__thirdmodel__text':self.thirdmodel.text} 29 | self.test_new_obj(obj=self.thirdmodel, kwargs=kwargs) 30 | 31 | 32 | class ReverseRelatedValuesCacheTests(ReverseRelatedCacheTests, RelatedCacheTests): 33 | 34 | def setUp(self): 35 | RelatedCacheTests.setUp(self) 36 | self.manager = FirstModel.objects 37 | self.func = self.manager.cache().values().filter 38 | self.obj = self.secondmodel 39 | self.kwargs = {'secondmodel':self.obj} 40 | 41 | 42 | class ReverseExtraRelatedValuesCacheTests(ReverseExtraRelatedCacheTests, ExtraRelatedCacheTests): 43 | 44 | def setUp(self): 45 | ExtraRelatedCacheTests.setUp(self) 46 | self.manager = FirstModel.objects 47 | self.func = self.manager.cache().values().filter 48 | self.obj = self.thirdmodel 49 | self.kwargs = {'secondmodel__thirdmodel':self.obj} 50 | 51 | 52 | class ReverseExtraRelatedExcludeCacheTests(ReverseRelatedCacheTests, ExtraRelatedCacheTests): 53 | 54 | def setUp(self): 55 | ExtraRelatedCacheTests.setUp(self) 56 | self.manager = FirstModel.objects 57 | self.func = self.manager.cache().exclude(secondmodel__thirdmodel__id=500).filter 58 | self.obj = self.thirdmodel 59 | self.kwargs = {'secondmodel__thirdmodel':self.obj} 60 | 61 | def test_extra_related_new_obj(self): 62 | pass 63 | 64 | 65 | -------------------------------------------------------------------------------- /cachebot/tests/values_tests.py: -------------------------------------------------------------------------------- 1 | from cachebot.models import ThirdModel 2 | from cachebot.tests.base_tests import BasicCacheTests, RelatedCacheTests, ExtraRelatedCacheTests 3 | 4 | class ValuesBasicCacheTests1(BasicCacheTests): 5 | 6 | def setUp(self): 7 | BasicCacheTests.setUp(self) 8 | self.manager = ThirdModel.objects.cache().values() 9 | self.func = self.manager.filter 10 | 11 | 12 | class ValuesBasicCacheTests2(BasicCacheTests): 13 | 14 | def setUp(self): 15 | BasicCacheTests.setUp(self) 16 | self.manager = ThirdModel.objects.values().cache() 17 | self.func = self.manager.filter 18 | 19 | 20 | class ValuesBasicCacheTests3(BasicCacheTests): 21 | 22 | def setUp(self): 23 | BasicCacheTests.setUp(self) 24 | self.manager = ThirdModel.objects.cache().values('text') 25 | self.func = self.manager.filter 26 | 27 | 28 | class ValuesBasicCacheTests4(BasicCacheTests): 29 | 30 | def setUp(self): 31 | BasicCacheTests.setUp(self) 32 | self.manager = ThirdModel.objects.values('text').cache() 33 | self.func = self.manager.filter 34 | 35 | 36 | class ValuesBasicCacheTests5(BasicCacheTests): 37 | 38 | def setUp(self): 39 | BasicCacheTests.setUp(self) 40 | self.manager = ThirdModel.objects.values('text') 41 | self.func = self.manager.filter 42 | self.append_cache = True 43 | 44 | 45 | class ValuesRelatedCacheTests1(RelatedCacheTests): 46 | 47 | def setUp(self): 48 | RelatedCacheTests.setUp(self) 49 | self.manager = ThirdModel.objects.cache().values() 50 | self.func = self.manager.filter 51 | 52 | 53 | class ValuesRelatedCacheTests2(RelatedCacheTests): 54 | 55 | def setUp(self): 56 | RelatedCacheTests.setUp(self) 57 | self.manager = ThirdModel.objects.values().cache() 58 | self.func = self.manager.filter 59 | 60 | 61 | class ValuesRelatedCacheTests3(RelatedCacheTests): 62 | 63 | def setUp(self): 64 | RelatedCacheTests.setUp(self) 65 | self.manager = ThirdModel.objects.cache().values('text','obj__text') 66 | self.func = self.manager.filter 67 | 68 | 69 | class ValuesRelatedCacheTests4(RelatedCacheTests): 70 | 71 | def setUp(self): 72 | RelatedCacheTests.setUp(self) 73 | self.manager = ThirdModel.objects.values('text','obj__text').cache() 74 | self.func = self.manager.filter 75 | 76 | 77 | class ValuesRelatedCacheTests5(RelatedCacheTests): 78 | 79 | def setUp(self): 80 | RelatedCacheTests.setUp(self) 81 | self.manager = ThirdModel.objects.values('text','obj__text') 82 | self.func = self.manager.filter 83 | self.append_cache = True 84 | 85 | 86 | class ValuesExtraRelatedCacheTests1(ExtraRelatedCacheTests): 87 | 88 | def setUp(self): 89 | ExtraRelatedCacheTests.setUp(self) 90 | self.manager = ThirdModel.objects.cache().values() 91 | self.func = self.manager.filter 92 | 93 | 94 | class ValuesExtraRelatedCacheTests2(ExtraRelatedCacheTests): 95 | 96 | def setUp(self): 97 | ExtraRelatedCacheTests.setUp(self) 98 | self.manager = ThirdModel.objects.values().cache() 99 | self.func = self.manager.filter 100 | 101 | 102 | class ValuesExtraRelatedCacheTests3(ExtraRelatedCacheTests): 103 | 104 | def setUp(self): 105 | ExtraRelatedCacheTests.setUp(self) 106 | self.manager = ThirdModel.objects.cache().values('obj__text','obj__obj__text') 107 | self.func = self.manager.filter 108 | 109 | 110 | class ValuesExtraRelatedCacheTests4(ExtraRelatedCacheTests): 111 | 112 | def setUp(self): 113 | ExtraRelatedCacheTests.setUp(self) 114 | self.manager = ThirdModel.objects.values('obj__text','obj__obj__text').cache() 115 | self.func = self.manager.filter 116 | 117 | 118 | class ValuesExtraRelatedAppendCacheTests4(ExtraRelatedCacheTests): 119 | 120 | def setUp(self): 121 | ExtraRelatedCacheTests.setUp(self) 122 | self.manager = ThirdModel.objects.values('text','obj__text','obj__obj__text') 123 | self.func = self.manager.filter 124 | self.append_cache = True 125 | 126 | -------------------------------------------------------------------------------- /cachebot/utils.py: -------------------------------------------------------------------------------- 1 | from time import time 2 | 3 | from django.core.cache import cache 4 | from django.utils.hashcompat import md5_constructor 5 | from django.db.models.sql.constants import LOOKUP_SEP 6 | from django.db.models.base import ModelBase 7 | from django.db.models.query_utils import QueryWrapper 8 | from django.core.exceptions import ObjectDoesNotExist 9 | 10 | def set_value(obj, key, value): 11 | """Helper method to handle setting values in a CachedQuerySet or ValuesQuerySet object""" 12 | try: 13 | obj[key] = value 14 | except TypeError: 15 | setattr(obj, key, value) 16 | 17 | def get_invalidation_key(table_alias, accessor_path='', lookup_type='exact', negate=False, value='', version=None): 18 | """ 19 | An invalidation key is associated with a set of cached queries. A blank accessor_path 20 | will create an invalidation key for this entire table instead of a specific row 21 | """ 22 | 23 | # punt on this problem for now 24 | if isinstance(value, QueryWrapper) or lookup_type != 'exact' or negate: 25 | value = '' 26 | 27 | if hasattr(value, '__iter__'): 28 | if len(value) == 1: 29 | value = value[0] 30 | else: 31 | value = '' 32 | 33 | base_key = md5_constructor('.'.join((accessor_path, unicode(value))).encode('utf-8')).hexdigest() 34 | return cache.make_key('.'.join((table_alias, 'cachebot.invalidation', base_key)), version=version) 35 | 36 | def get_values(instance, accessor_path): 37 | accessor_split = accessor_path.split(LOOKUP_SEP) 38 | if isinstance(instance, dict): 39 | try: 40 | yield instance[accessor_path] 41 | raise StopIteration 42 | except KeyError: 43 | # maybe this is a nested reverse relation 44 | accessor = accessor_split.pop(0) 45 | try: 46 | instance = instance[accessor] 47 | except KeyError: 48 | instance = instance[accessor + '_cache'] 49 | 50 | for value in _get_nested_value(instance, accessor_split): 51 | if value is None: 52 | continue 53 | if isinstance(value.__class__, ModelBase): 54 | value = getattr(value, 'pk') 55 | yield value 56 | raise StopIteration 57 | 58 | def _get_nested_value(instance, accessor_split): 59 | accessor = accessor_split.pop(0) 60 | try: 61 | value = getattr(instance, accessor) 62 | except AttributeError: 63 | if not instance: 64 | yield None 65 | raise StopIteration 66 | 67 | raise_error = True 68 | for modifier in ('_cache', '_id'): 69 | if accessor.endswith(modifier): 70 | accessor = accessor[:-len(modifier)] 71 | try: 72 | value = getattr(instance, accessor) 73 | raise_error = False 74 | break 75 | except AttributeError: 76 | pass 77 | 78 | if raise_error: 79 | yield None 80 | raise StopIteration 81 | 82 | if hasattr(value, 'select_reverse'): 83 | # check if a cached version of this reverse relation exists 84 | if hasattr(value, accessor + '_cache'): 85 | value = getattr(instance, accessor + '_cache') 86 | else: 87 | value = value.all() 88 | 89 | if hasattr(value, '__iter__'): 90 | if accessor_split: 91 | for obj in value: 92 | for nested_val in _get_nested_value(obj, accessor_split): 93 | yield nested_val 94 | else: 95 | for nested_val in value: 96 | yield nested_val 97 | else: 98 | if accessor_split: 99 | for nested_val in _get_nested_value(value, accessor_split): 100 | yield nested_val 101 | else: 102 | yield value 103 | raise StopIteration 104 | 105 | def get_many_by_key(cache_key_f, item_keys, version=None): 106 | """ 107 | For a series of item keys and a function that maps these keys to cache keys, 108 | get all the items from the cache if they are available there. 109 | 110 | Return a dictionary mapping the item keys to the objects retrieved from the 111 | cache. Any items not found in the cache are not returned. 112 | """ 113 | cache_key_to_item_key = {} 114 | for item_key in item_keys: 115 | cache_key = cache.make_key(cache_key_f(item_key), version=version) 116 | cache_key_to_item_key[cache_key] = item_key 117 | 118 | # request from cache 119 | from_cache = cache.get_many(cache_key_to_item_key.keys()) 120 | 121 | results = {} 122 | for cache_key, value in from_cache.iteritems(): 123 | item_key = cache_key_to_item_key[cache_key] 124 | results[item_key] = value 125 | return results 126 | 127 | def fetch_objects(cache_key_f, get_database_f, item_keys): 128 | """ 129 | For a series of item keys and two functions, get these items from the cache 130 | or from the database (individually so that the queries are cached). 131 | 132 | cache_key_f: function to convert an item_key to a cache key 133 | get_database_f: function to get an item from the database 134 | 135 | Returns a dictionary mapping item_keys to objects. If the object 136 | does not exist in the database, ignore it. 137 | """ 138 | item_key_to_item = get_many_by_key(cache_key_f, item_keys) 139 | 140 | for item_key in item_keys: 141 | if item_key not in item_key_to_item: 142 | # failed to get the item from the cache 143 | try: 144 | # have to get each item individually to cache the query 145 | item = get_database_f(item_key) 146 | item_key_to_item[item_key] = item 147 | except ObjectDoesNotExist: 148 | pass 149 | 150 | return item_key_to_item 151 | 152 | def fetch_instances(model, field, values): 153 | """ 154 | For a series of item keys, attempt to get the model from the cache, 155 | if that doesn't work, query the database. 156 | 157 | The point of all this is to do a single memcache query and then individual database queries 158 | for the remaining items. It would be nice to do a single database query for the remaining 159 | items, but it does not appear that cachebot supports this. 160 | """ 161 | cache_key_f = lambda value: model.objects.filter((field, value)).get_cache_key() 162 | # since the filter query returns a list, it seems we need a list here to keep the types the same 163 | get_database_f = lambda value: [model.objects.get((field, value))] 164 | 165 | item_key_to_object = fetch_objects(cache_key_f, get_database_f, values) 166 | 167 | # remove the list surrounding each value by grabbing the first entry 168 | for k, v in item_key_to_object.items(): 169 | if len(v) > 0: 170 | item_key_to_object[k] = v[0] 171 | else: 172 | del item_key_to_object[k] # this happens when cachebot has cached a result of [] for the query 173 | 174 | return item_key_to_object 175 | 176 | def flush_cache(hard=True): 177 | from cachebot.models import CacheBotSignals 178 | from cachebot.signals import cache_signals 179 | 180 | CacheBotSignals.objects.all().delete() 181 | cache_signals.local_signals = {} 182 | if hard: 183 | cache.clear() 184 | else: 185 | cache.version = int(time()*10000) 186 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | from setuptools import setup, find_packages 2 | import cachebot 3 | import os 4 | 5 | def read(fname): 6 | return open(os.path.join(os.path.dirname(__file__), fname)).read() 7 | 8 | README = read('README.rst') 9 | 10 | 11 | setup( 12 | name = "django-cachebot", 13 | version = cachebot.__version__, 14 | description = 'Automated caching and invalidation for the Django ORM', 15 | long_description = README, 16 | url = 'http://github.com/dziegler/django-cachebot', 17 | download_url = 'http://github.com/dziegler/django-cachebot/archives/master', 18 | author = 'David Ziegler', 19 | author_email = 'david.ziegler@gmail.com', 20 | license = 'BSD', 21 | zip_safe = False, 22 | packages = find_packages(), 23 | include_package_data = True, 24 | install_requires = [ 25 | 'django>=1.3', 26 | ], 27 | classifiers = [ 28 | 'Environment :: Web Environment', 29 | 'Framework :: Django', 30 | 'Intended Audience :: Developers', 31 | 'License :: OSI Approved :: BSD License', 32 | 'Operating System :: OS Independent', 33 | 'Programming Language :: Python', 34 | 'Topic :: Internet :: WWW/HTTP', 35 | ] 36 | ) 37 | --------------------------------------------------------------------------------