├── tests ├── __init__.py ├── testapp │ ├── __init__.py │ ├── tests │ │ ├── __init__.py │ │ ├── socket_timeout_tests.py │ │ ├── multi_server_tests.py │ │ ├── master_slave_tests.py │ │ ├── compressor_tests.py │ │ ├── socket_tests.py │ │ ├── tcp_tests.py │ │ ├── serializers_tests.py │ │ └── base_tests.py │ └── models.py ├── urls.py ├── views.py └── settings.py ├── redis_cache ├── backends │ ├── __init__.py │ ├── dummy.py │ ├── single.py │ ├── multiple.py │ └── base.py ├── constants.py ├── __init__.py ├── cache.py ├── compressors.py ├── sharder.py ├── serializers.py ├── connection.py └── utils.py ├── requirements.txt ├── MANIFEST.in ├── requirements-dev.txt ├── install_redis.sh ├── .gitignore ├── .travis.yml ├── docs ├── index.rst ├── intro_quick_start.rst ├── api.rst ├── Makefile ├── advanced_configuration.rst └── conf.py ├── Makefile ├── setup.py ├── LICENSE ├── AUTHORS.rst └── README.rst /tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/testapp/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /redis_cache/backends/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | redis<4.0 2 | -------------------------------------------------------------------------------- /tests/testapp/tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include LICENSE AUTHORS.rst README.rst -------------------------------------------------------------------------------- /redis_cache/constants.py: -------------------------------------------------------------------------------- 1 | KEY_EXPIRED = -2 2 | KEY_NON_VOLATILE = -1 3 | -------------------------------------------------------------------------------- /requirements-dev.txt: -------------------------------------------------------------------------------- 1 | hiredis==0.2.0 2 | django-nose==1.4.4 3 | nose==1.3.6 4 | msgpack-python==0.4.6 5 | pyyaml==5.3.1 6 | -------------------------------------------------------------------------------- /tests/urls.py: -------------------------------------------------------------------------------- 1 | from django.conf.urls.defaults import * 2 | 3 | 4 | urlpatterns = patterns('', 5 | (r'^$', 'tests.views.someview'), 6 | ) -------------------------------------------------------------------------------- /install_redis.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | : ${REDIS_VERSION:="4.0.11"} 4 | 5 | test -d redis || git clone https://github.com/antirez/redis 6 | git -C redis checkout $REDIS_VERSION 7 | make -C redis 8 | -------------------------------------------------------------------------------- /redis_cache/__init__.py: -------------------------------------------------------------------------------- 1 | from redis_cache.backends.single import RedisCache 2 | from redis_cache.backends.multiple import ShardedRedisCache 3 | from redis_cache.backends.dummy import RedisDummyCache 4 | -------------------------------------------------------------------------------- /tests/views.py: -------------------------------------------------------------------------------- 1 | from django.core.cache import caches 2 | from django.http import HttpResponse 3 | 4 | 5 | def someview(request): 6 | cache = caches['default'] 7 | cache.set("foo", "bar") 8 | return HttpResponse("Pants") 9 | -------------------------------------------------------------------------------- /redis_cache/cache.py: -------------------------------------------------------------------------------- 1 | # for backwards compat 2 | from redis_cache import RedisCache 3 | from redis_cache import ShardedRedisCache 4 | from redis_cache.backends.base import ImproperlyConfigured 5 | from redis_cache.connection import pool 6 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.py[c|o] 2 | .DS_Store 3 | *.sql 4 | *.bz2 5 | *~ 6 | *.log 7 | *.json 8 | *.wsgi 9 | local_settings.py 10 | development_settings.py 11 | *.egg-info 12 | .project 13 | .pydevproject 14 | .settings 15 | dist/* 16 | *.rdb 17 | dist/* 18 | MANIFEST 19 | .venv 20 | redis/ 21 | */_build/ 22 | build/* 23 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | dist: xenial 2 | language: python 3 | python: 4 | - 3.6 5 | - 3.7 6 | - 3.8 7 | env: 8 | - DJANGO_VERSION='>=3.0,<3.1' 9 | - DJANGO_VERSION='>=3.1,<3.2' 10 | - DJANGO_VERSION='>=3.2,<4.0' 11 | # command to run tests 12 | install: ./install_redis.sh 13 | script: make test DJANGO_VERSION=$DJANGO_VERSION 14 | branches: 15 | only: 16 | - unstable 17 | - master 18 | -------------------------------------------------------------------------------- /tests/testapp/models.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime 2 | from django.db import models 3 | 4 | def expensive_calculation(): 5 | expensive_calculation.num_runs += 1 6 | return datetime.now() 7 | 8 | class Poll(models.Model): 9 | question = models.CharField(max_length=200) 10 | answer = models.CharField(max_length=200) 11 | pub_date = models.DateTimeField('date published', default=expensive_calculation) 12 | -------------------------------------------------------------------------------- /redis_cache/backends/dummy.py: -------------------------------------------------------------------------------- 1 | from django.core.cache.backends.dummy import DummyCache 2 | 3 | 4 | class RedisDummyCache(DummyCache): 5 | def ttl(self, key): 6 | return 0 7 | 8 | def delete_pattern(self, pattern, version=None): 9 | return None 10 | 11 | def get_or_set(self, key, default, timeout=None): 12 | return default() if callable(default) else default 13 | 14 | def reinsert_keys(self): 15 | return None 16 | 17 | def persist(self, key): 18 | return True 19 | 20 | def expire(self, key, timeout): 21 | return True 22 | -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | .. django-redis-cache documentation master file, created by 2 | sphinx-quickstart on Mon Jul 20 10:08:24 2015. 3 | You can adapt this file completely to your liking, but it should at least 4 | contain the root `toctree` directive. 5 | 6 | Welcome to django-redis-cache's documentation! 7 | ============================================== 8 | 9 | Contents: 10 | 11 | .. toctree:: 12 | :maxdepth: 2 13 | 14 | intro_quick_start 15 | api 16 | advanced_configuration 17 | 18 | 19 | 20 | Indices and tables 21 | ================== 22 | 23 | * :ref:`genindex` 24 | * :ref:`modindex` 25 | * :ref:`search` 26 | 27 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | SHELL := /bin/bash 2 | 3 | PACKAGE_NAME=redis_cache 4 | DJANGO_VERSION?=>=1.11,<4.0 5 | 6 | .PHONY: install_requirements 7 | install_requirements: requirements*.txt 8 | pip install -r requirements.txt 9 | pip install -r requirements-dev.txt 10 | pip install 'Django$(DJANGO_VERSION)' 11 | 12 | .PHONY: clean 13 | clean: 14 | python setup.py clean 15 | rm -rf build/ 16 | rm -rf dist/ 17 | rm -rf *.egg*/ 18 | rm -rf __pycache__/ 19 | rm -f MANIFEST 20 | rm -f test.db 21 | 22 | .PHONY: test 23 | test: install_requirements 24 | PYTHONPATH=$(PYTHONPATH): django-admin test --settings=tests.settings -s 25 | 26 | .PHONY: shell 27 | shell: 28 | PYTHONPATH=$(PYTHONPATH): django-admin shell --settings=tests.settings 29 | -------------------------------------------------------------------------------- /tests/settings.py: -------------------------------------------------------------------------------- 1 | DEBUG = True 2 | 3 | DATABASES = { 4 | 'default': { 5 | 'ENGINE': 'django.db.backends.sqlite3', 6 | } 7 | } 8 | 9 | INSTALLED_APPS = [ 10 | 'django_nose', 11 | 'tests.testapp', 12 | ] 13 | 14 | ROOT_URLCONF = 'tests.urls' 15 | 16 | SECRET_KEY = "shh...it's a seakret" 17 | 18 | CACHES = { 19 | 'default': { 20 | 'BACKEND': 'redis_cache.RedisCache', 21 | 'LOCATION': '127.0.0.1:6381', 22 | 'OPTIONS': { 23 | 'DB': 15, 24 | 'PASSWORD': 'yadayada', 25 | 'PARSER_CLASS': 'redis.connection.HiredisParser', 26 | 'PICKLE_VERSION': 2, 27 | 'CONNECTION_POOL_CLASS': 'redis.ConnectionPool', 28 | 'CONNECTION_POOL_CLASS_KWARGS': { 29 | 'max_connections': 2, 30 | } 31 | }, 32 | }, 33 | } 34 | TEST_RUNNER = 'django_nose.NoseTestSuiteRunner' 35 | MIDDLEWARE_CLASSES = tuple() 36 | -------------------------------------------------------------------------------- /tests/testapp/tests/socket_timeout_tests.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from django.test import TestCase, override_settings 3 | 4 | from redis.exceptions import ConnectionError 5 | from tests.testapp.tests.base_tests import SetupMixin 6 | 7 | LOCATION = "127.0.0.1:6381" 8 | 9 | 10 | @override_settings( 11 | CACHES={ 12 | 'default': { 13 | 'BACKEND': 'redis_cache.RedisCache', 14 | 'LOCATION': LOCATION, 15 | 'OPTIONS': { 16 | 'DB': 15, 17 | 'PASSWORD': 'yadayada', 18 | 'PARSER_CLASS': 'redis.connection.HiredisParser', 19 | 'PICKLE_VERSION': -1, 20 | 'SOCKET_TIMEOUT': 0, 21 | }, 22 | }, 23 | } 24 | ) 25 | class SocketTimeoutTestCase(SetupMixin, TestCase): 26 | 27 | def tearDown(self): 28 | pass 29 | 30 | def test_socket_timeout(self): 31 | self.reset_pool() 32 | cache = self.get_cache() 33 | with self.assertRaises(ConnectionError): 34 | cache.set('aaaaa', 'a') 35 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | from setuptools import setup 2 | 3 | setup( 4 | name="django-redis-cache", 5 | url="http://github.com/sebleier/django-redis-cache/", 6 | author="Sean Bleier", 7 | author_email="sebleier@gmail.com", 8 | version="3.0.1", 9 | license="BSD", 10 | packages=["redis_cache", "redis_cache.backends"], 11 | description="Redis Cache Backend for Django", 12 | install_requires=['redis<4.0'], 13 | classifiers=[ 14 | "Programming Language :: Python", 15 | "Programming Language :: Python :: 3.6", 16 | "Programming Language :: Python :: 3.7", 17 | "Programming Language :: Python :: 3.8", 18 | "License :: OSI Approved :: BSD License", 19 | "Operating System :: OS Independent", 20 | "Topic :: Software Development :: Libraries", 21 | "Topic :: Utilities", 22 | "Environment :: Web Environment", 23 | "Framework :: Django", 24 | "Framework :: Django :: 3.0", 25 | "Framework :: Django :: 3.1", 26 | "Framework :: Django :: 3.2", 27 | ], 28 | ) 29 | -------------------------------------------------------------------------------- /redis_cache/compressors.py: -------------------------------------------------------------------------------- 1 | import zlib 2 | 3 | try: 4 | import bz2 5 | except ImportError: 6 | pass 7 | 8 | 9 | class BaseCompressor(object): 10 | 11 | def __init__(self, **kwargs): 12 | super(BaseCompressor, self).__init__() 13 | 14 | def compress(self, value): 15 | raise NotImplementedError 16 | 17 | def decompress(self, value): 18 | raise NotImplementedError 19 | 20 | 21 | class NoopCompressor(BaseCompressor): 22 | 23 | def compress(self, value): 24 | return value 25 | 26 | def decompress(self, value): 27 | return value 28 | 29 | 30 | class ZLibCompressor(BaseCompressor): 31 | 32 | def __init__(self, level=6): 33 | self.level = level 34 | super(ZLibCompressor, self).__init__() 35 | 36 | def compress(self, value): 37 | return zlib.compress(value, self.level) 38 | 39 | def decompress(self, value): 40 | return zlib.decompress(value) 41 | 42 | 43 | class BZip2Compressor(BaseCompressor): 44 | 45 | def __init__(self, compresslevel=9): 46 | self.compresslevel = compresslevel 47 | super(BZip2Compressor, self).__init__() 48 | 49 | def compress(self, value): 50 | return bz2.compress(value, compresslevel=self.compresslevel) 51 | 52 | def decompress(self, value): 53 | return bz2.decompress(value) 54 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2020 Sean Bleier 2 | All rights reserved. 3 | 4 | Redistribution and use in source and binary forms, with or without 5 | modification, are permitted provided that the following conditions 6 | are met: 7 | 1. Redistributions of source code must retain the above copyright 8 | notice, this list of conditions and the following disclaimer. 9 | 2. Redistributions in binary form must reproduce the above copyright 10 | notice, this list of conditions and the following disclaimer in the 11 | documentation and/or other materials provided with the distribution. 12 | 3. The name of the author may not be used to endorse or promote products 13 | derived from this software without specific prior written permission. 14 | 15 | THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR 16 | IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES 17 | OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. 18 | IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, 19 | INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT 20 | NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 21 | DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 22 | THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 23 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF 24 | THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 25 | -------------------------------------------------------------------------------- /redis_cache/sharder.py: -------------------------------------------------------------------------------- 1 | import hashlib 2 | from bisect import insort, bisect 3 | 4 | 5 | DIGITS = 8 6 | 7 | 8 | def get_slot(key): 9 | digest = hashlib.md5(key.encode('utf-8')).hexdigest() 10 | return int(digest[-DIGITS:], 16) 11 | 12 | 13 | class Node(object): 14 | 15 | def __init__(self, node, i): 16 | self._node = node 17 | self._i = i 18 | key = f"{i}:{self._node}" 19 | self._position = get_slot(key) 20 | 21 | def __gt__(self, other): 22 | if isinstance(other, int): 23 | return self._position > other 24 | elif isinstance(other, Node): 25 | return self._position > other._position 26 | raise TypeError( 27 | 'Cannot compare this class with "%s" type' % type(other) 28 | ) 29 | 30 | 31 | class HashRing(object): 32 | 33 | def __init__(self, replicas=16): 34 | self.replicas = replicas 35 | self._nodes = [] 36 | 37 | def _add(self, node, i): 38 | insort(self._nodes, Node(node, i)) 39 | 40 | def add(self, node, weight=1): 41 | for i in range(weight * self.replicas): 42 | self._add(node, i) 43 | 44 | def remove(self, node): 45 | n = len(self._nodes) 46 | for i, _node in enumerate(reversed(self._nodes)): 47 | if node == _node._node: 48 | del self._nodes[n - i - 1] 49 | 50 | def get_node(self, key): 51 | i = bisect(self._nodes, get_slot(key)) - 1 52 | return self._nodes[i]._node 53 | -------------------------------------------------------------------------------- /AUTHORS.rst: -------------------------------------------------------------------------------- 1 | | Sean Bleier 2 | | Matt Dennewitz 3 | | Jannis Leidel 4 | | S. Angel / Twidi 5 | | Noah Kantrowitz / coderanger 6 | | Martin Mahner / bartTC 7 | | Timothée Peignier / cyberdelia 8 | | Lior Sion / liorsion 9 | | Ales Zoulek / aleszoulek 10 | | James Aylett / jaylett 11 | | Todd Boland / boland 12 | | Issac Kelly / issackelly 13 | | Saverio / mucca 14 | | Matt Robenolt / mattrobenolt 15 | | Carl Meyer / carljm 16 | | wtracyliu / wtracyliu 17 | | Florent Messa / thoas 18 | | Markus Kaiserswerth / mkai 19 | | Michael Lindemuth / mlindemu 20 | | John Furr / gnulnx 21 | | Christian Pedersen / chripede 22 | | Martey Dodoo / martey 23 | | Joona Pääkkönen / paksu 24 | | Tim Graham / timgraham 25 | | Justin Arulnathan / dinie 26 | | Mariusz Felisiak / felixxm 27 | | metamatik 28 | | Florian Zimmermann / PoByBolek 29 | -------------------------------------------------------------------------------- /redis_cache/serializers.py: -------------------------------------------------------------------------------- 1 | try: 2 | import cPickle as pickle 3 | except ImportError: 4 | import pickle 5 | 6 | import json 7 | 8 | try: 9 | import msgpack 10 | except ImportError: 11 | pass 12 | 13 | try: 14 | import yaml 15 | except ImportError: 16 | pass 17 | 18 | from django.utils.encoding import force_bytes, force_str 19 | 20 | 21 | class BaseSerializer(object): 22 | 23 | def __init__(self, **kwargs): 24 | super(BaseSerializer, self).__init__(**kwargs) 25 | 26 | def serialize(self, value): 27 | raise NotImplementedError 28 | 29 | def deserialize(self, value): 30 | raise NotImplementedError 31 | 32 | 33 | class PickleSerializer(object): 34 | 35 | def __init__(self, pickle_version=-1): 36 | self.pickle_version = pickle_version 37 | 38 | def serialize(self, value): 39 | return pickle.dumps(value, self.pickle_version) 40 | 41 | def deserialize(self, value): 42 | return pickle.loads(force_bytes(value)) 43 | 44 | 45 | class JSONSerializer(BaseSerializer): 46 | 47 | def __init__(self, **kwargs): 48 | super(JSONSerializer, self).__init__(**kwargs) 49 | 50 | def serialize(self, value): 51 | return force_bytes(json.dumps(value)) 52 | 53 | def deserialize(self, value): 54 | return json.loads(force_str(value)) 55 | 56 | 57 | class MSGPackSerializer(BaseSerializer): 58 | 59 | def serialize(self, value): 60 | return msgpack.dumps(value) 61 | 62 | def deserialize(self, value): 63 | return msgpack.loads(value, encoding='utf-8') 64 | 65 | 66 | class YAMLSerializer(BaseSerializer): 67 | 68 | def serialize(self, value): 69 | return yaml.dump(value, encoding='utf-8', Dumper=yaml.Dumper) 70 | 71 | def deserialize(self, value): 72 | return yaml.load(value, Loader=yaml.FullLoader) 73 | 74 | 75 | class DummySerializer(BaseSerializer): 76 | 77 | def __init__(self, **kwargs): 78 | super(DummySerializer, self).__init__(**kwargs) 79 | 80 | def serialize(self, value): 81 | return value 82 | 83 | def deserialize(self, value): 84 | return value 85 | -------------------------------------------------------------------------------- /docs/intro_quick_start.rst: -------------------------------------------------------------------------------- 1 | Intro and Quick Start 2 | ********************* 3 | 4 | Intro 5 | ===== 6 | 7 | `django-redis-cache`_ is a cache backend for the `Django`_ web framework. It 8 | uses the `redis`_ server, which is a in-memory key-value data structure server. 9 | Similar to the great `Memcached`_ in performance, it has several features that 10 | makes it more appealing. 11 | 12 | * Multiple data structures types, e.g. string, list, set, sorted sets, and hashes. 13 | 14 | * Atomic pipelines: guaranteed that multiple commands will run sequentially and uninterrupted. 15 | 16 | * Pub/Sub: subscribe to a channel and listen for messages from other processes. 17 | 18 | * Can back data to disk, which can keep a cache warm even if the process is killed. 19 | 20 | * Lua scripting 21 | 22 | * Clustering (as of 3.0) 23 | 24 | * Many more. 25 | 26 | Many of these features are irrelevant to caching, but can be used by other 27 | areas of a web stack and therefore offers a compelling case to simplify your 28 | infrastructure. 29 | 30 | 31 | 32 | Quick Start 33 | =========== 34 | 35 | 36 | **Recommended:** 37 | 38 | * `redis`_ >= 2.8 39 | 40 | * `redis-py`_ >= 3.0.0 41 | 42 | * `python`_ >= 2.7 43 | 44 | 45 | 1. Install `redis`_. You can use ``install_redis.sh`` to install a local copy 46 | of redis. Start the server by running ``./src/redis-server`` 47 | 48 | 2. Run ``pip install django-redis-cache``. 49 | 50 | 3. Modify your Django settings to use ``redis_cache``. 51 | 52 | .. code:: python 53 | 54 | CACHES = { 55 | 'default': { 56 | 'BACKEND': 'redis_cache.RedisCache', 57 | 'LOCATION': 'localhost:6379', 58 | }, 59 | } 60 | 61 | **Warning: By default, django-redis-cache set keys in the database 1 of Redis. By default, a session with redis-cli start on database 0. Switch to database 1 with** ``SELECT 1``. 62 | 63 | .. _Django: https://www.djangoproject.com/ 64 | .. _django-redis-cache: http://github.com/sebleier/django-redis-cache 65 | .. _redis-py: http://github.com/andymccurdy/redis-py/ 66 | .. _redis: http://github.com/antirez/redis/ 67 | .. _python: http://python.org 68 | .. _Memcached: http://memcached.org 69 | -------------------------------------------------------------------------------- /redis_cache/connection.py: -------------------------------------------------------------------------------- 1 | from redis.connection import UnixDomainSocketConnection, Connection, SSLConnection 2 | 3 | 4 | class CacheConnectionPool(object): 5 | 6 | def __init__(self): 7 | self._clients = {} 8 | self._connection_pools = {} 9 | 10 | def __contains__(self, server): 11 | return server in self._clients 12 | 13 | def __getitem__(self, server): 14 | return self._clients.get(server, None) 15 | 16 | def reset(self): 17 | for pool in self._connection_pools.values(): 18 | pool.disconnect() 19 | self._clients = {} 20 | self._connection_pools = {} 21 | 22 | def get_connection_pool( 23 | self, 24 | client, 25 | host='127.0.0.1', 26 | port=6379, 27 | ssl=False, 28 | db=1, 29 | password=None, 30 | parser_class=None, 31 | unix_socket_path=None, 32 | connection_pool_class=None, 33 | connection_pool_class_kwargs=None, 34 | socket_timeout=None, 35 | socket_connect_timeout=None, 36 | **kwargs 37 | ): 38 | connection_identifier = (host, port, db, unix_socket_path) 39 | 40 | self._clients[connection_identifier] = client 41 | 42 | pool = self._connection_pools.get(connection_identifier) 43 | 44 | if pool is None: 45 | connection_class = ( 46 | unix_socket_path and UnixDomainSocketConnection 47 | or ssl and SSLConnection 48 | or Connection 49 | ) 50 | 51 | kwargs = { 52 | 'db': db, 53 | 'password': password, 54 | 'connection_class': connection_class, 55 | 'parser_class': parser_class, 56 | 'socket_timeout': socket_timeout, 57 | } 58 | 59 | if not issubclass(connection_class, UnixDomainSocketConnection): 60 | kwargs['socket_connect_timeout'] = socket_connect_timeout 61 | 62 | kwargs.update(connection_pool_class_kwargs) 63 | 64 | if unix_socket_path in (None, ''): 65 | kwargs.update({ 66 | 'host': host, 67 | 'port': port, 68 | }) 69 | else: 70 | kwargs['path'] = unix_socket_path 71 | 72 | pool = connection_pool_class(**kwargs) 73 | 74 | self._connection_pools[connection_identifier] = pool 75 | pool.connection_identifier = connection_identifier 76 | 77 | return pool 78 | 79 | pool = CacheConnectionPool() 80 | -------------------------------------------------------------------------------- /tests/testapp/tests/multi_server_tests.py: -------------------------------------------------------------------------------- 1 | from collections import Counter 2 | from math import sqrt 3 | from redis_cache.sharder import HashRing 4 | 5 | 6 | def mean(lst): 7 | return sum(lst) / len(lst) 8 | 9 | 10 | def stddev(lst): 11 | """returns the standard deviation of lst""" 12 | avg = mean(lst) 13 | variance = sum((i - avg) ** 2 for i in lst) 14 | return sqrt(variance) 15 | 16 | 17 | class MultiServerTests(object): 18 | 19 | def test_distribution(self): 20 | nodes = [node._position for node in self.cache.sharder._nodes] 21 | nodes.sort() 22 | diffs = [(b - a) for a, b in zip(nodes[:-1], nodes[1:])] 23 | l = 16 ** 8 24 | perfect_dist = l / len(nodes) 25 | random_dist = sum(diffs) / len(diffs) 26 | _max = max([perfect_dist, random_dist]) 27 | _min = min([perfect_dist, random_dist]) 28 | percentage = (1 - _max / _min) * 100 29 | 30 | # Assert they are less than 2 percent of each other 31 | self.assertLess(percentage, 2.0) 32 | 33 | def test_make_key_distribution(self): 34 | ring = HashRing() 35 | nodes = set([str(node._node) for node in self.cache.sharder._nodes]) 36 | nodes = [ 37 | ('127.0.0.1', 6379, 15, '/tmp/redis0.sock'), 38 | ('127.0.0.1', 6379, 15, '/tmp/redis1.sock'), 39 | ('127.0.0.1', 6379, 15, '/tmp/redis2.sock'), 40 | ] 41 | for node in nodes: 42 | ring.add(str(node)) 43 | 44 | n = 50000 45 | counter = Counter( 46 | [ring.get_node(str(i)) for i in range(n)] 47 | ) 48 | self.assertLess( 49 | ((stddev(counter.values()) / n) * 100.0), 10, counter.values() 50 | ) 51 | 52 | def test_key_distribution(self): 53 | n = 10000 54 | for i in range(n): 55 | self.cache.set(i, i) 56 | keys = [ 57 | len(client.keys('*')) 58 | for client in self.cache.clients.values() 59 | ] 60 | self.assertEqual(sum(keys), n) 61 | self.assertLess(((stddev(keys) / n) * 100.0), 10) 62 | 63 | def test_removing_nodes(self): 64 | c1, c2, c3 = self.cache.clients.keys() 65 | replicas = self.cache.sharder.replicas 66 | 67 | self.assertEqual(len(self.cache.sharder._nodes), 3 * replicas) 68 | 69 | self.cache.sharder.remove(c1) 70 | self.assertEqual(len(self.cache.sharder._nodes), 2 * replicas) 71 | 72 | self.cache.sharder.remove(c2) 73 | self.assertEqual(len(self.cache.sharder._nodes), 1 * replicas) 74 | 75 | self.cache.sharder.remove(c3) 76 | self.assertEqual(len(self.cache.sharder._nodes), 0) 77 | -------------------------------------------------------------------------------- /tests/testapp/tests/master_slave_tests.py: -------------------------------------------------------------------------------- 1 | import time 2 | 3 | import django 4 | from django.core.cache import caches 5 | from django.test import TestCase, override_settings 6 | 7 | from redis_cache.connection import pool 8 | 9 | from tests.testapp.tests.base_tests import SetupMixin 10 | 11 | 12 | MASTER_LOCATION = "127.0.0.1:6387" 13 | LOCATIONS = [ 14 | '127.0.0.1:6387', 15 | '127.0.0.1:6388', 16 | '127.0.0.1:6389', 17 | ] 18 | 19 | 20 | @override_settings(CACHES={ 21 | 'default': { 22 | 'BACKEND': 'redis_cache.RedisCache', 23 | 'LOCATION': LOCATIONS, 24 | 'OPTIONS': { 25 | 'DB': 1, 26 | 'PASSWORD': 'yadayada', 27 | 'PARSER_CLASS': 'redis.connection.HiredisParser', 28 | 'PICKLE_VERSION': -1, 29 | 'MASTER_CACHE': MASTER_LOCATION, 30 | }, 31 | }, 32 | }) 33 | class MasterSlaveTestCase(SetupMixin, TestCase): 34 | 35 | def setUp(self): 36 | super(MasterSlaveTestCase, self).setUp() 37 | pool.reset() 38 | 39 | def test_master_client(self): 40 | # Reset the cache at the beginning of the test. 41 | if django.VERSION < (3, 2): 42 | del caches._caches.caches['default'] 43 | else: 44 | del caches['default'] 45 | cache = self.get_cache() 46 | client = cache.master_client 47 | self.assertEqual( 48 | client.connection_pool.connection_identifier, 49 | ('127.0.0.1', 6387, 1, None) 50 | ) 51 | self.assertEqual(len(pool._connection_pools), 3) 52 | 53 | def test_set(self): 54 | cache = self.get_cache() 55 | cache.set('a', 'a') 56 | time.sleep(.2) 57 | for client in self.cache.clients.values(): 58 | key = cache.make_key('a') 59 | self.assertIsNotNone(client.get(key)) 60 | 61 | def test_set_many(self): 62 | cache = self.get_cache() 63 | cache.set_many({'a': 'a', 'b': 'b'}) 64 | for client in self.cache.clients.values(): 65 | self.assertNotIn(None, client.mget([ 66 | cache.make_key('a'), 67 | cache.make_key('b'), 68 | ])) 69 | 70 | def test_incr(self): 71 | cache = self.get_cache() 72 | cache.set('a', 0) 73 | cache.incr('a') 74 | time.sleep(.2) 75 | key = cache.make_key('a') 76 | for client in self.cache.clients.values(): 77 | self.assertEqual(int(client.get(key)), 1) 78 | 79 | def test_delete(self): 80 | cache = self.get_cache() 81 | cache.set('a', 'a') 82 | time.sleep(.2) 83 | self.assertEqual(cache.get('a'), 'a') 84 | cache.delete('a') 85 | time.sleep(.2) 86 | key = cache.make_key('a') 87 | for client in self.cache.clients.values(): 88 | self.assertIsNone(client.get(key)) 89 | 90 | def test_clear(self): 91 | cache = self.get_cache() 92 | cache.set('a', 'a') 93 | time.sleep(.2) 94 | self.assertEqual(cache.get('a'), 'a') 95 | cache.clear() 96 | time.sleep(.2) 97 | for client in self.cache.clients.values(): 98 | self.assertEqual(len(client.keys('*')), 0) 99 | -------------------------------------------------------------------------------- /redis_cache/backends/single.py: -------------------------------------------------------------------------------- 1 | try: 2 | import cPickle as pickle 3 | except ImportError: 4 | import pickle 5 | import random 6 | 7 | from django.core.cache.backends.base import DEFAULT_TIMEOUT 8 | 9 | from redis_cache.backends.base import BaseRedisCache 10 | 11 | 12 | class RedisCache(BaseRedisCache): 13 | 14 | def __init__(self, server, params): 15 | """ 16 | Connect to Redis, and set up cache backend. 17 | """ 18 | super(RedisCache, self).__init__(server, params) 19 | 20 | for server in self.servers: 21 | client = self.create_client(server) 22 | self.clients[client.connection_pool.connection_identifier] = client 23 | 24 | self.client_list = self.clients.values() 25 | self.master_client = self.get_master_client() 26 | 27 | def get_client(self, key, write=False): 28 | if write and self.master_client is not None: 29 | return self.master_client 30 | return random.choice(list(self.client_list)) 31 | 32 | #################### 33 | # Django cache api # 34 | #################### 35 | 36 | def delete_many(self, keys, version=None): 37 | """Remove multiple keys at once.""" 38 | versioned_keys = self.make_keys(keys, version=version) 39 | if versioned_keys: 40 | self._delete_many(self.master_client, versioned_keys) 41 | 42 | def clear(self, version=None): 43 | """Flush cache keys. 44 | 45 | If version is specified, all keys belonging the version's key 46 | namespace will be deleted. Otherwise, all keys will be deleted. 47 | """ 48 | if version is None: 49 | self._clear(self.master_client) 50 | else: 51 | self.delete_pattern('*', version=version) 52 | 53 | def get_many(self, keys, version=None): 54 | versioned_keys = self.make_keys(keys, version=version) 55 | return self._get_many(self.master_client, keys, versioned_keys=versioned_keys) 56 | 57 | def set_many(self, data, timeout=DEFAULT_TIMEOUT, version=None): 58 | """ 59 | Set multiple values in the cache at once from a dict of key/value pairs. 60 | 61 | If timeout is given, that timeout will be used for the key; otherwise 62 | the default cache timeout will be used. 63 | """ 64 | timeout = self.get_timeout(timeout) 65 | 66 | pipeline = self.master_client.pipeline() 67 | for key, value in data.items(): 68 | value = self.prep_value(value) 69 | versioned_key = self.make_key(key, version=version) 70 | self._set(pipeline, versioned_key, value, timeout) 71 | pipeline.execute() 72 | 73 | def incr_version(self, key, delta=1, version=None): 74 | """ 75 | Adds delta to the cache version for the supplied key. Returns the 76 | new version. 77 | 78 | """ 79 | if version is None: 80 | version = self.version 81 | 82 | old = self.make_key(key, version) 83 | new = self.make_key(key, version=version + delta) 84 | 85 | return self._incr_version(self.master_client, old, new, key, delta, version) 86 | 87 | ##################### 88 | # Extra api methods # 89 | ##################### 90 | 91 | def delete_pattern(self, pattern, version=None): 92 | pattern = self.make_key(pattern, version=version) 93 | self._delete_pattern(self.master_client, pattern) 94 | 95 | def reinsert_keys(self): 96 | """ 97 | Reinsert cache entries using the current pickle protocol version. 98 | """ 99 | self._reinsert_keys(self.master_client) 100 | -------------------------------------------------------------------------------- /tests/testapp/tests/compressor_tests.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from django.test import TestCase, override_settings 3 | 4 | from tests.testapp.tests.base_tests import BaseRedisTestCase 5 | 6 | LOCATION = "127.0.0.1:6381" 7 | 8 | 9 | class CompressionTestCase(object): 10 | 11 | def test_compression(self): 12 | key = 'a' 13 | noop_cache = self.get_cache('noop') 14 | string = 10000 * 'a' 15 | 16 | self.cache.set(key, string) 17 | noop_cache.set(key, string) 18 | self.assertEqual(self.cache.get(key), noop_cache.get(key)) 19 | self.assertNotEqual(self.cache, noop_cache) 20 | 21 | noop_client, = list(noop_cache.clients.values()) 22 | default_client, = list(self.cache.clients.values()) 23 | versioned_key = self.cache.make_key(key) 24 | self.assertLess( 25 | len(default_client.get(versioned_key)), 26 | len(noop_client.get(versioned_key)), 27 | ) 28 | 29 | 30 | @override_settings( 31 | CACHES={ 32 | 'default': { 33 | 'BACKEND': 'redis_cache.RedisCache', 34 | 'LOCATION': LOCATION, 35 | 'OPTIONS': { 36 | 'DB': 14, 37 | 'PASSWORD': 'yadayada', 38 | 'PARSER_CLASS': 'redis.connection.HiredisParser', 39 | 'PICKLE_VERSION': -1, 40 | 'COMPRESSOR_CLASS': 'redis_cache.compressors.ZLibCompressor', 41 | 'COMPRESSOR_CLASS_KWARGS': { 42 | 'level': 5, 43 | }, 44 | 'CONNECTION_POOL_CLASS': 'redis.ConnectionPool', 45 | 'CONNECTION_POOL_CLASS_KWARGS': { 46 | 'max_connections': 2, 47 | }, 48 | }, 49 | }, 50 | 'noop': { 51 | 'BACKEND': 'redis_cache.RedisCache', 52 | 'LOCATION': LOCATION, 53 | 'OPTIONS': { 54 | 'DB': 15, 55 | 'PASSWORD': 'yadayada', 56 | 'PARSER_CLASS': 'redis.connection.HiredisParser', 57 | 'PICKLE_VERSION': -1, 58 | 'COMPRESSOR_CLASS': 'redis_cache.compressors.NoopCompressor', 59 | }, 60 | }, 61 | } 62 | ) 63 | class ZLibTestCase(CompressionTestCase, BaseRedisTestCase, TestCase): 64 | pass 65 | 66 | 67 | @override_settings( 68 | CACHES={ 69 | 'default': { 70 | 'BACKEND': 'redis_cache.RedisCache', 71 | 'LOCATION': LOCATION, 72 | 'OPTIONS': { 73 | 'DB': 14, 74 | 'PASSWORD': 'yadayada', 75 | 'PARSER_CLASS': 'redis.connection.HiredisParser', 76 | 'PICKLE_VERSION': -1, 77 | 'COMPRESSOR_CLASS': 'redis_cache.compressors.BZip2Compressor', 78 | 'COMPRESSOR_CLASS_KWARGS': { 79 | 'compresslevel': 5, 80 | }, 81 | 'CONNECTION_POOL_CLASS': 'redis.ConnectionPool', 82 | 'CONNECTION_POOL_CLASS_KWARGS': { 83 | 'max_connections': 2, 84 | }, 85 | }, 86 | }, 87 | 'noop': { 88 | 'BACKEND': 'redis_cache.RedisCache', 89 | 'LOCATION': LOCATION, 90 | 'OPTIONS': { 91 | 'DB': 15, 92 | 'PASSWORD': 'yadayada', 93 | 'PARSER_CLASS': 'redis.connection.HiredisParser', 94 | 'PICKLE_VERSION': -1, 95 | 'COMPRESSOR_CLASS': 'redis_cache.compressors.NoopCompressor', 96 | }, 97 | }, 98 | } 99 | ) 100 | class BZip2TestCase(CompressionTestCase, BaseRedisTestCase, TestCase): 101 | pass 102 | -------------------------------------------------------------------------------- /tests/testapp/tests/socket_tests.py: -------------------------------------------------------------------------------- 1 | # # -*- coding: utf-8 -*- 2 | from collections import Counter 3 | from tests.testapp.tests.base_tests import BaseRedisTestCase 4 | from tests.testapp.tests.multi_server_tests import MultiServerTests 5 | 6 | from django.test import TestCase, override_settings 7 | 8 | 9 | LOCATION = "unix://:yadayada@/tmp/redis0.sock?db=15" 10 | LOCATIONS = [ 11 | "unix://:yadayada@/tmp/redis0.sock?db=15", 12 | "unix://:yadayada@/tmp/redis1.sock?db=15", 13 | "unix://:yadayada@/tmp/redis2.sock?db=15", 14 | ] 15 | 16 | 17 | class SocketTestCase(BaseRedisTestCase, TestCase): 18 | pass 19 | 20 | 21 | @override_settings( 22 | CACHES={ 23 | 'default': { 24 | 'BACKEND': 'redis_cache.RedisCache', 25 | 'LOCATION': LOCATION, 26 | 'OPTIONS': { 27 | 'DB': 15, 28 | 'PASSWORD': 'yadayada', 29 | 'PARSER_CLASS': 'redis.connection.HiredisParser', 30 | 'PICKLE_VERSION': 2, 31 | 'CONNECTION_POOL_CLASS': 'redis.ConnectionPool', 32 | 'CONNECTION_POOL_CLASS_KWARGS': { 33 | 'max_connections': 2, 34 | } 35 | }, 36 | }, 37 | } 38 | ) 39 | class SingleHiredisTestCase(SocketTestCase): 40 | pass 41 | 42 | 43 | @override_settings( 44 | CACHES={ 45 | 'default': { 46 | 'BACKEND': 'redis_cache.RedisCache', 47 | 'LOCATION': LOCATION, 48 | 'OPTIONS': { 49 | 'DB': 15, 50 | 'PASSWORD': 'yadayada', 51 | 'PARSER_CLASS': 'redis.connection.PythonParser', 52 | 'PICKLE_VERSION': 2, 53 | 'CONNECTION_POOL_CLASS': 'redis.ConnectionPool', 54 | 'CONNECTION_POOL_CLASS_KWARGS': { 55 | 'max_connections': 2, 56 | } 57 | }, 58 | }, 59 | } 60 | ) 61 | class SinglePythonParserTestCase(SocketTestCase): 62 | pass 63 | 64 | 65 | @override_settings( 66 | CACHES={ 67 | 'default': { 68 | 'BACKEND': 'redis_cache.ShardedRedisCache', 69 | 'LOCATION': LOCATIONS, 70 | 'OPTIONS': { 71 | 'DB': 15, 72 | 'PASSWORD': 'yadayada', 73 | 'PARSER_CLASS': 'redis.connection.HiredisParser', 74 | 'PICKLE_VERSION': 2, 75 | 'CONNECTION_POOL_CLASS': 'redis.ConnectionPool', 76 | 'CONNECTION_POOL_CLASS_KWARGS': { 77 | 'max_connections': 2, 78 | } 79 | }, 80 | }, 81 | } 82 | ) 83 | class MultipleHiredisTestCase(MultiServerTests, SocketTestCase): 84 | 85 | def test_equal_number_of_nodes(self): 86 | counter = Counter( 87 | [node._node[3] for node in self.cache.sharder._nodes] 88 | ) 89 | self.assertEqual(counter, { 90 | '/tmp/redis0.sock': 16, 91 | '/tmp/redis1.sock': 16, 92 | '/tmp/redis2.sock': 16, 93 | }) 94 | 95 | 96 | @override_settings( 97 | CACHES={ 98 | 'default': { 99 | 'BACKEND': 'redis_cache.ShardedRedisCache', 100 | 'LOCATION': LOCATIONS, 101 | 'OPTIONS': { 102 | 'DB': 15, 103 | 'PASSWORD': 'yadayada', 104 | 'PARSER_CLASS': 'redis.connection.PythonParser', 105 | 'PICKLE_VERSION': 2, 106 | 'CONNECTION_POOL_CLASS': 'redis.ConnectionPool', 107 | 'CONNECTION_POOL_CLASS_KWARGS': { 108 | 'max_connections': 2, 109 | } 110 | }, 111 | }, 112 | } 113 | ) 114 | class MultiplePythonParserTestCase(MultiServerTests, SocketTestCase): 115 | pass 116 | -------------------------------------------------------------------------------- /tests/testapp/tests/tcp_tests.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from tests.testapp.tests.base_tests import BaseRedisTestCase 3 | from tests.testapp.tests.multi_server_tests import MultiServerTests 4 | from django.test import TestCase, override_settings 5 | 6 | from redis_cache.cache import ImproperlyConfigured 7 | from redis.connection import UnixDomainSocketConnection 8 | 9 | 10 | LOCATION = "127.0.0.1:6381" 11 | LOCATIONS = [ 12 | '127.0.0.1:6381', 13 | '127.0.0.1:6382', 14 | '127.0.0.1:6383', 15 | ] 16 | 17 | 18 | class TCPTestCase(BaseRedisTestCase, TestCase): 19 | 20 | def test_default_initialization(self): 21 | self.reset_pool() 22 | self.cache = self.get_cache() 23 | self.assertIn( 24 | ('127.0.0.1', 6381, 15, None), 25 | self.cache.clients, 26 | ) 27 | client = self.cache.clients[('127.0.0.1', 6381, 15, None)] 28 | connection_class = client.connection_pool.connection_class 29 | if connection_class is not UnixDomainSocketConnection: 30 | self.assertEqual(client.connection_pool.connection_kwargs['host'], '127.0.0.1') 31 | self.assertEqual(client.connection_pool.connection_kwargs['port'], 6381) 32 | self._skip_tearDown = True 33 | self.assertEqual(client.connection_pool.connection_kwargs['db'], 15) 34 | 35 | 36 | @override_settings( 37 | CACHES={ 38 | 'default': { 39 | 'BACKEND': 'redis_cache.RedisCache', 40 | 'LOCATION': LOCATION, 41 | 'OPTIONS': { 42 | 'DB': 15, 43 | 'PASSWORD': 'yadayada', 44 | 'PARSER_CLASS': 'redis.connection.HiredisParser', 45 | 'PICKLE_VERSION': 2, 46 | 'CONNECTION_POOL_CLASS': 'redis.ConnectionPool', 47 | 'CONNECTION_POOL_CLASS_KWARGS': { 48 | 'max_connections': 2, 49 | } 50 | }, 51 | }, 52 | } 53 | ) 54 | class SingleHiredisTestCase(TCPTestCase): 55 | pass 56 | 57 | 58 | @override_settings( 59 | CACHES={ 60 | 'default': { 61 | 'BACKEND': 'redis_cache.RedisCache', 62 | 'LOCATION': LOCATION, 63 | 'OPTIONS': { 64 | 'DB': 15, 65 | 'PASSWORD': 'yadayada', 66 | 'PARSER_CLASS': 'redis.connection.PythonParser', 67 | 'PICKLE_VERSION': 2, 68 | 'CONNECTION_POOL_CLASS': 'redis.ConnectionPool', 69 | 'CONNECTION_POOL_CLASS_KWARGS': { 70 | 'max_connections': 2, 71 | } 72 | }, 73 | }, 74 | } 75 | ) 76 | class SinglePythonParserTestCase(TCPTestCase): 77 | pass 78 | 79 | 80 | @override_settings( 81 | CACHES={ 82 | 'default': { 83 | 'BACKEND': 'redis_cache.ShardedRedisCache', 84 | 'LOCATION': LOCATIONS, 85 | 'OPTIONS': { 86 | 'DB': 15, 87 | 'PASSWORD': 'yadayada', 88 | 'PARSER_CLASS': 'redis.connection.HiredisParser', 89 | 'PICKLE_VERSION': 2, 90 | 'CONNECTION_POOL_CLASS': 'redis.ConnectionPool', 91 | 'CONNECTION_POOL_CLASS_KWARGS': { 92 | 'max_connections': 2, 93 | } 94 | }, 95 | }, 96 | } 97 | ) 98 | class MultipleHiredisTestCase(MultiServerTests, TCPTestCase): 99 | pass 100 | 101 | 102 | @override_settings( 103 | CACHES={ 104 | 'default': { 105 | 'BACKEND': 'redis_cache.ShardedRedisCache', 106 | 'LOCATION': LOCATIONS, 107 | 'OPTIONS': { 108 | 'DB': 15, 109 | 'PASSWORD': 'yadayada', 110 | 'PARSER_CLASS': 'redis.connection.PythonParser', 111 | 'PICKLE_VERSION': 2, 112 | 'CONNECTION_POOL_CLASS': 'redis.ConnectionPool', 113 | 'CONNECTION_POOL_CLASS_KWARGS': { 114 | 'max_connections': 2, 115 | } 116 | }, 117 | }, 118 | } 119 | ) 120 | class MultiplePythonParserTestCase(MultiServerTests, TCPTestCase): 121 | pass 122 | -------------------------------------------------------------------------------- /redis_cache/backends/multiple.py: -------------------------------------------------------------------------------- 1 | from collections import defaultdict 2 | 3 | from django.core.cache.backends.base import DEFAULT_TIMEOUT 4 | 5 | from redis_cache.backends.base import BaseRedisCache 6 | from redis_cache.sharder import HashRing 7 | 8 | 9 | class ShardedRedisCache(BaseRedisCache): 10 | 11 | def __init__(self, server, params): 12 | super(ShardedRedisCache, self).__init__(server, params) 13 | self.sharder = HashRing() 14 | 15 | for server in self.servers: 16 | client = self.create_client(server) 17 | self.clients[client.connection_pool.connection_identifier] = client 18 | self.sharder.add(client.connection_pool.connection_identifier) 19 | 20 | self.client_list = self.clients.values() 21 | 22 | def get_client(self, key, write=False): 23 | node = self.sharder.get_node(key) 24 | return self.clients[node] 25 | 26 | def shard(self, keys, write=False, version=None): 27 | """ 28 | Returns a dict of keys that belong to a cache's keyspace. 29 | """ 30 | clients = defaultdict(list) 31 | for key in keys: 32 | versioned_key = self.make_key(key, version=version) 33 | clients[self.get_client(versioned_key, write)].append(versioned_key) 34 | return clients 35 | 36 | #################### 37 | # Django cache api # 38 | #################### 39 | 40 | def delete_many(self, keys, version=None): 41 | """ 42 | Remove multiple keys at once. 43 | """ 44 | clients = self.shard(keys, write=True, version=version) 45 | for client, keys in clients.items(): 46 | self._delete_many(client, keys) 47 | 48 | def clear(self, version=None): 49 | """ 50 | Flush cache keys. 51 | 52 | If version is specified, all keys belonging the version's key 53 | namespace will be deleted. Otherwise, all keys will be deleted. 54 | """ 55 | if version is None: 56 | for client in self.clients.values(): 57 | self._clear(client) 58 | else: 59 | self.delete_pattern('*', version=version) 60 | 61 | def get_many(self, keys, version=None): 62 | data = {} 63 | clients = self.shard(keys, version=version) 64 | for client, versioned_keys in clients.items(): 65 | versioned_keys = [self.make_key(key, version=version) for key in keys] 66 | data.update( 67 | self._get_many(client, keys, versioned_keys=versioned_keys) 68 | ) 69 | return data 70 | 71 | def set_many(self, data, timeout=DEFAULT_TIMEOUT, version=None): 72 | """ 73 | Set multiple values in the cache at once from a dict of key/value pairs. 74 | 75 | If timeout is given, that timeout will be used for the key; otherwise 76 | the default cache timeout will be used. 77 | """ 78 | timeout = self.get_timeout(timeout) 79 | versioned_key_to_key = {self.make_key(key, version=version): key for key in data.keys()} 80 | clients = self.shard(versioned_key_to_key.values(), write=True, version=version) 81 | 82 | for client, versioned_keys in clients.items(): 83 | pipeline = client.pipeline() 84 | for versioned_key in versioned_keys: 85 | value = self.prep_value(data[versioned_key_to_key[versioned_key]]) 86 | self._set(pipeline, versioned_key, value, timeout) 87 | pipeline.execute() 88 | 89 | def incr_version(self, key, delta=1, version=None): 90 | """ 91 | Adds delta to the cache version for the supplied key. Returns the 92 | new version. 93 | 94 | """ 95 | if version is None: 96 | version = self.version 97 | 98 | client = self.get_client(key, write=True) 99 | old = self.make_key(key, version=version) 100 | new = self.make_key(key, version=version + delta) 101 | 102 | return self._incr_version(client, old, new, key, delta, version) 103 | 104 | ##################### 105 | # Extra api methods # 106 | ##################### 107 | 108 | def delete_pattern(self, pattern, version=None): 109 | pattern = self.make_key(pattern, version=version) 110 | for client in self.clients.values(): 111 | self._delete_pattern(client, pattern) 112 | 113 | def reinsert_keys(self): 114 | """ 115 | Reinsert cache entries using the current pickle protocol version. 116 | """ 117 | for client in self.clients.values(): 118 | self._reinsert_keys(client) 119 | -------------------------------------------------------------------------------- /tests/testapp/tests/serializers_tests.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from __future__ import unicode_literals 3 | 4 | from django.test import TestCase, override_settings 5 | 6 | from tests.testapp.tests.base_tests import SetupMixin 7 | 8 | 9 | LOCATION = "127.0.0.1:6381" 10 | 11 | 12 | # functions/classes for complex data type tests 13 | def f(): 14 | return 42 15 | 16 | 17 | class C: 18 | def m(n): 19 | return 24 20 | 21 | 22 | class BaseSerializerTestCase(SetupMixin, TestCase): 23 | converts_tuple_to_list = False 24 | serializes_objects = True 25 | 26 | def test_string(self): 27 | self.cache.set('a', 'a') 28 | self.assertEqual(self.cache.get('a'), 'a') 29 | 30 | def test_unicode(self): 31 | self.cache.set('Iñtërnâtiônàlizætiøn', 'Iñtërnâtiônàlizætiøn2') 32 | self.assertEqual( 33 | self.cache.get('Iñtërnâtiônàlizætiøn'), 34 | 'Iñtërnâtiônàlizætiøn2' 35 | ) 36 | 37 | def test_number(self): 38 | self.cache.set('a', 10) 39 | self.assertEqual(self.cache.get('a'), 10) 40 | 41 | def test_dictionary(self): 42 | stuff = { 43 | 'string': 'this is a string', 44 | 'int': 42, 45 | 'list': [1, 2, 3, 4], 46 | 'tuple': (1, 2, 3, 4), 47 | 'dict': {'A': 1, 'B': 2}, 48 | } 49 | if self.serializes_objects: 50 | stuff.update({ 51 | 'function': f, 52 | 'class': C, 53 | }) 54 | 55 | self.cache.set('a', stuff) 56 | stuff = self.cache.get('a') 57 | _tuple = [1, 2, 3, 4] if self.converts_tuple_to_list else (1, 2, 3, 4) 58 | data = { 59 | 'string': 'this is a string', 60 | 'int': 42, 61 | 'list': [1, 2, 3, 4], 62 | 'tuple': _tuple, 63 | 'dict': {'A': 1, 'B': 2}, 64 | } 65 | if self.serializes_objects: 66 | data.update({ 67 | 'function': f, 68 | 'class': C, 69 | }) 70 | self.assertEqual(stuff, data) 71 | 72 | 73 | @override_settings(CACHES={ 74 | 'default': { 75 | 'BACKEND': 'redis_cache.RedisCache', 76 | 'LOCATION': LOCATION, 77 | 'OPTIONS': { 78 | 'DB': 1, 79 | 'PASSWORD': 'yadayada', 80 | 'PARSER_CLASS': 'redis.connection.HiredisParser', 81 | 'PICKLE_VERSION': 1, 82 | 'SERIALIZER_CLASS': 'redis_cache.serializers.PickleSerializer' 83 | }, 84 | }, 85 | }) 86 | class PickleSerializerTestCase(BaseSerializerTestCase): 87 | converts_tuple_to_list = False 88 | serializes_objects = True 89 | 90 | def test_pickle_version(self): 91 | self.assertEqual(self.cache.serializer.pickle_version, 1) 92 | 93 | 94 | @override_settings(CACHES={ 95 | 'default': { 96 | 'BACKEND': 'redis_cache.RedisCache', 97 | 'LOCATION': LOCATION, 98 | 'OPTIONS': { 99 | 'DB': 1, 100 | 'PASSWORD': 'yadayada', 101 | 'PARSER_CLASS': 'redis.connection.HiredisParser', 102 | 'PICKLE_VERSION': 1, 103 | 'SERIALIZER_CLASS': 'redis_cache.serializers.PickleSerializer', 104 | 'SERIALIZER_CLASS_KWARGS': { 105 | 'pickle_version': 2 106 | } 107 | }, 108 | }, 109 | }) 110 | class PickleSerializerTestCase2(BaseSerializerTestCase): 111 | converts_tuple_to_list = False 112 | serializes_objects = True 113 | 114 | def test_serializer_pickle_version_takes_precedence(self): 115 | self.assertEqual(self.cache.serializer.pickle_version, 2) 116 | 117 | 118 | @override_settings(CACHES={ 119 | 'default': { 120 | 'BACKEND': 'redis_cache.RedisCache', 121 | 'LOCATION': LOCATION, 122 | 'OPTIONS': { 123 | 'DB': 1, 124 | 'PASSWORD': 'yadayada', 125 | 'PARSER_CLASS': 'redis.connection.HiredisParser', 126 | 'PICKLE_VERSION': 1, 127 | 'SERIALIZER_CLASS': 'redis_cache.serializers.JSONSerializer' 128 | }, 129 | }, 130 | }) 131 | class JsonSerializerTestCase(BaseSerializerTestCase): 132 | converts_tuple_to_list = True 133 | serializes_objects = False 134 | 135 | 136 | @override_settings(CACHES={ 137 | 'default': { 138 | 'BACKEND': 'redis_cache.RedisCache', 139 | 'LOCATION': LOCATION, 140 | 'OPTIONS': { 141 | 'DB': 1, 142 | 'PASSWORD': 'yadayada', 143 | 'PARSER_CLASS': 'redis.connection.HiredisParser', 144 | 'PICKLE_VERSION': -1, 145 | 'SERIALIZER_CLASS': 'redis_cache.serializers.MSGPackSerializer' 146 | }, 147 | }, 148 | }) 149 | class MSGPackSerializerTestCase(BaseSerializerTestCase): 150 | converts_tuple_to_list = True 151 | serializes_objects = False 152 | 153 | 154 | @override_settings(CACHES={ 155 | 'default': { 156 | 'BACKEND': 'redis_cache.RedisCache', 157 | 'LOCATION': LOCATION, 158 | 'OPTIONS': { 159 | 'DB': 1, 160 | 'PASSWORD': 'yadayada', 161 | 'PARSER_CLASS': 'redis.connection.HiredisParser', 162 | 'PICKLE_VERSION': -1, 163 | 'SERIALIZER_CLASS': 'redis_cache.serializers.YAMLSerializer' 164 | }, 165 | }, 166 | }) 167 | class YAMLSerializerTestCase(BaseSerializerTestCase): 168 | converts_tuple_to_list = False 169 | serializes_objects = True 170 | -------------------------------------------------------------------------------- /redis_cache/utils.py: -------------------------------------------------------------------------------- 1 | import importlib 2 | import warnings 3 | 4 | from django.core.exceptions import ImproperlyConfigured 5 | from urllib.parse import parse_qs 6 | from urllib.parse import urlparse 7 | 8 | 9 | def get_servers(location): 10 | """Returns a list of servers given the server argument passed in from 11 | Django. 12 | """ 13 | if isinstance(location, str): 14 | servers = location.split(',') 15 | elif hasattr(location, '__iter__'): 16 | servers = location 17 | else: 18 | raise ImproperlyConfigured( 19 | '"server" must be an iterable or string' 20 | ) 21 | return servers 22 | 23 | 24 | def import_class(path): 25 | module_name, class_name = path.rsplit('.', 1) 26 | try: 27 | module = importlib.import_module(module_name) 28 | except ImportError: 29 | raise ImproperlyConfigured('Could not find module "%s"' % module_name) 30 | else: 31 | try: 32 | return getattr(module, class_name) 33 | except AttributeError: 34 | raise ImproperlyConfigured('Cannot import "%s"' % class_name) 35 | 36 | 37 | def parse_connection_kwargs(server, db=None, **kwargs): 38 | """ 39 | Return a connection pool configured from the given URL. 40 | 41 | For example:: 42 | 43 | redis://[:password]@localhost:6379/0 44 | rediss://[:password]@localhost:6379/0 45 | unix://[:password]@/path/to/socket.sock?db=0 46 | 47 | Three URL schemes are supported: 48 | redis:// creates a normal TCP socket connection 49 | rediss:// creates a SSL wrapped TCP socket connection 50 | unix:// creates a Unix Domain Socket connection 51 | 52 | There are several ways to specify a database number. The parse function 53 | will return the first specified option: 54 | 1. A ``db`` querystring option, e.g. redis://localhost?db=0 55 | 2. If using the redis:// scheme, the path argument of the url, e.g. 56 | redis://localhost/0 57 | 3. The ``db`` argument to this function. 58 | 59 | If none of these options are specified, db=0 is used. 60 | 61 | Any additional querystring arguments and keyword arguments will be 62 | passed along to the ConnectionPool class's initializer. In the case 63 | of conflicting arguments, querystring arguments always win. 64 | 65 | NOTE: taken from `redis.ConnectionPool.from_url` in redis-py 66 | """ 67 | kwargs['unix_socket_path'] = '' 68 | if '://' in server: 69 | url = server 70 | url_string = url 71 | url = urlparse(url) 72 | qs = '' 73 | 74 | # in python2.6, custom URL schemes don't recognize querystring values 75 | # they're left as part of the url.path. 76 | if '?' in url.path and not url.query: 77 | # chop the querystring including the ? off the end of the url 78 | # and reparse it. 79 | qs = url.path.split('?', 1)[1] 80 | url = urlparse(url_string[:-(len(qs) + 1)]) 81 | else: 82 | qs = url.query 83 | 84 | url_options = {} 85 | 86 | for name, value in parse_qs(qs).items(): 87 | if value and len(value) > 0: 88 | url_options[name] = value[0] 89 | 90 | # We only support redis:// and unix:// schemes. 91 | if url.scheme == 'unix': 92 | url_options.update({ 93 | 'password': url.password, 94 | 'unix_socket_path': url.path, 95 | }) 96 | 97 | else: 98 | url_options.update({ 99 | 'host': url.hostname, 100 | 'port': int(url.port or 6379), 101 | 'password': url.password, 102 | }) 103 | 104 | # If there's a path argument, use it as the db argument if a 105 | # querystring value wasn't specified 106 | if 'db' not in url_options and url.path: 107 | try: 108 | url_options['db'] = int(url.path.replace('/', '')) 109 | except (AttributeError, ValueError): 110 | pass 111 | 112 | if url.scheme == 'rediss': 113 | url_options['ssl'] = True 114 | 115 | # last shot at the db value 116 | url_options['db'] = int(url_options.get('db', db or 0)) 117 | 118 | # update the arguments from the URL values 119 | kwargs.update(url_options) 120 | 121 | # backwards compatability 122 | if 'charset' in kwargs: 123 | warnings.warn(DeprecationWarning( 124 | '"charset" is deprecated. Use "encoding" instead')) 125 | kwargs['encoding'] = kwargs.pop('charset') 126 | if 'errors' in kwargs: 127 | warnings.warn(DeprecationWarning( 128 | '"errors" is deprecated. Use "encoding_errors" instead')) 129 | kwargs['encoding_errors'] = kwargs.pop('errors') 130 | else: 131 | unix_socket_path = None 132 | if ':' in server: 133 | host, port = server.rsplit(':', 1) 134 | try: 135 | port = int(port) 136 | except (ValueError, TypeError): 137 | raise ImproperlyConfigured( 138 | "{0} from {1} must be an integer".format( 139 | repr(port), 140 | server 141 | ) 142 | ) 143 | else: 144 | host, port = None, None 145 | unix_socket_path = server 146 | 147 | kwargs.update( 148 | host=host, 149 | port=port, 150 | unix_socket_path=unix_socket_path, 151 | db=db, 152 | ) 153 | 154 | return kwargs 155 | -------------------------------------------------------------------------------- /docs/api.rst: -------------------------------------------------------------------------------- 1 | API Usage 2 | ********* 3 | 4 | Standard Django Cache API 5 | ------------------------- 6 | 7 | .. function:: get(self, key[, default=None]): 8 | 9 | Retrieves a value from the cache. 10 | 11 | :param key: Location of the value 12 | :param default: Value to return if key does not exist in cache. 13 | :rtype: Value that was cached. 14 | 15 | 16 | .. function:: add(self, key, value[, timeout=DEFAULT_TIMEOUT]): 17 | 18 | Add a value to the cache, failing if the key already exists. 19 | 20 | :param key: Location of the value 21 | :param value: Value to cache 22 | :param timeout: Number of seconds to hold value in cache. 23 | :type timeout: Number of seconds or DEFAULT_TIMEOUT 24 | :rtype: True if object was added and False if it already exists. 25 | 26 | 27 | .. function:: set(self, key, value, timeout=DEFAULT_TIMEOUT): 28 | 29 | Sets a value to the cache, regardless of whether it exists. 30 | 31 | If ``timeout == None``, then cache is set indefinitely. Otherwise, timeout defaults to the defined ``DEFAULT_TIMEOUT``. 32 | 33 | :param key: Location of the value 34 | :param value: Value to cache 35 | :param timeout: Number of seconds to hold value in cache. 36 | :type timeout: Number of seconds or DEFAULT_TIMEOUT 37 | 38 | 39 | .. function:: delete(self, key): 40 | 41 | Removes a key from the cache 42 | 43 | :param key: Location of the value 44 | 45 | 46 | .. function:: delete_many(self, keys[, version=None]): 47 | 48 | Removes multiple keys at once. 49 | 50 | :param key: Location of the value 51 | :param version: Version of keys 52 | 53 | 54 | .. function:: clear(self[, version=None]): 55 | 56 | Flushes the cache. If version is provided, all keys under the version number will be deleted. Otherwise, all keys will be flushed. 57 | 58 | :param version: Version of keys 59 | 60 | 61 | .. function:: get_many(self, keys[, version=None]): 62 | 63 | Retrieves many keys at once. 64 | 65 | :param keys: an iterable of keys to retrieve. 66 | :rtype: Dict of keys mapping to their values. 67 | 68 | 69 | .. function:: set_many(self, data[, timeout=None, version=None]): 70 | 71 | Set many values in the cache at once from a dict of key/value pairs. This is much more efficient than calling set() multiple times and is atomic. 72 | 73 | :param data: dict of key/value pairs to cache. 74 | :param timeout: Number of seconds to hold value in cache. 75 | :type timeout: Number of seconds or None 76 | 77 | 78 | .. function:: incr(self, key[, delta=1]): 79 | 80 | Add delta to value in the cache. If the key does not exist, raise a `ValueError` exception. 81 | 82 | :param key: Location of the value 83 | :param delta: Integer used to increment a value. 84 | :type delta: Integer 85 | 86 | .. function:: incr_version(self, key[, delta=1, version=None]): 87 | 88 | Adds delta to the cache version for the supplied key. Returns the new version. 89 | 90 | :param key: Location of the value 91 | :param delta: Integer used to increment a value. 92 | :type delta: Integer 93 | :param version: Version of key 94 | :type version: Integer or None 95 | 96 | .. function:: touch(self, key, timeout): 97 | 98 | Updates the timeout on a key. 99 | 100 | :param key: Location of the value 101 | :rtype: bool 102 | 103 | 104 | 105 | Cache Methods Provided by django-redis-cache 106 | -------------------------------------------- 107 | 108 | 109 | .. function:: has_key(self, key): 110 | 111 | Returns True if the key is in the cache and has not expired. 112 | 113 | :param key: Location of the value 114 | :rtype: bool 115 | 116 | 117 | .. function:: ttl(self, key): 118 | 119 | Returns the 'time-to-live' of a key. If the key is not volatile, i.e. it has not set an expiration, then the value returned is None. 120 | Otherwise, the value is the number of seconds remaining. If the key does not exist, 0 is returned. 121 | 122 | :param key: Location of the value 123 | :rtype: Integer or None 124 | 125 | 126 | .. function:: delete_pattern(pattern[, version=None]): 127 | 128 | Deletes keys matching the glob-style pattern provided. 129 | 130 | :param pattern: Glob-style pattern used to select keys to delete. 131 | :param version: Version of the keys 132 | 133 | 134 | .. function:: get_or_set(self, key, default[, timeout=None, lock_timeout=None, stale_cache_timeout=None]): 135 | 136 | Get a value from the cache or use ``default`` to set it and return it. 137 | 138 | If ``default`` is a callable, call it without arguments and store its return value in the cache instead. 139 | 140 | This implementation is slightly more advanced that Django's. It provides thundering herd 141 | protection, which prevents multiple threads/processes from calling the value-generating 142 | function at the same time. 143 | 144 | :param key: Location of the value 145 | :param default: Used to set the value if key does not exist. 146 | :param timeout: Time in seconds that value at key is considered fresh. 147 | :type timeout: Number of seconds or None 148 | :param lock_timeout: Time in seconds that the lock will stay active and prevent other threads from acquiring the lock. 149 | :type lock_timeout: Number of seconds or None 150 | :param stale_cache_timeout: Time in seconds that the stale cache will remain after the key has expired. If ``None`` is specified, the stale value will remain indefinitely. 151 | :type stale_cache_timeout: Number of seconds or None 152 | 153 | 154 | .. function:: reinsert_keys(self): 155 | 156 | Helper function to reinsert keys using a different pickle protocol version. 157 | 158 | 159 | .. function:: persist(self, key): 160 | 161 | Removes the timeout on a key. 162 | 163 | Equivalent to setting a timeout of None in a set command. 164 | :param key: Location of the value 165 | :rtype: bool 166 | 167 | .. function:: lock(self, key, timeout=None, sleep=0.1, blocking_timeout=None, thread_local=True) 168 | 169 | See docs for `redis-py`_. 170 | 171 | 172 | .. _redis-py: https://redis-py.readthedocs.io/en/latest/_modules/redis/client.html#Redis.lock 173 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | PAPER = 8 | BUILDDIR = _build 9 | 10 | # User-friendly check for sphinx-build 11 | ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) 12 | $(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) 13 | endif 14 | 15 | # Internal variables. 16 | PAPEROPT_a4 = -D latex_paper_size=a4 17 | PAPEROPT_letter = -D latex_paper_size=letter 18 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 19 | # the i18n builder cannot share the environment and doctrees with the others 20 | I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 21 | 22 | .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest coverage gettext 23 | 24 | help: 25 | @echo "Please use \`make ' where is one of" 26 | @echo " html to make standalone HTML files" 27 | @echo " dirhtml to make HTML files named index.html in directories" 28 | @echo " singlehtml to make a single large HTML file" 29 | @echo " pickle to make pickle files" 30 | @echo " json to make JSON files" 31 | @echo " htmlhelp to make HTML files and a HTML help project" 32 | @echo " qthelp to make HTML files and a qthelp project" 33 | @echo " applehelp to make an Apple Help Book" 34 | @echo " devhelp to make HTML files and a Devhelp project" 35 | @echo " epub to make an epub" 36 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" 37 | @echo " latexpdf to make LaTeX files and run them through pdflatex" 38 | @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" 39 | @echo " text to make text files" 40 | @echo " man to make manual pages" 41 | @echo " texinfo to make Texinfo files" 42 | @echo " info to make Texinfo files and run them through makeinfo" 43 | @echo " gettext to make PO message catalogs" 44 | @echo " changes to make an overview of all changed/added/deprecated items" 45 | @echo " xml to make Docutils-native XML files" 46 | @echo " pseudoxml to make pseudoxml-XML files for display purposes" 47 | @echo " linkcheck to check all external links for integrity" 48 | @echo " doctest to run all doctests embedded in the documentation (if enabled)" 49 | @echo " coverage to run coverage check of the documentation (if enabled)" 50 | 51 | clean: 52 | rm -rf $(BUILDDIR)/* 53 | 54 | html: 55 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html 56 | @echo 57 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." 58 | 59 | dirhtml: 60 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml 61 | @echo 62 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." 63 | 64 | singlehtml: 65 | $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml 66 | @echo 67 | @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." 68 | 69 | pickle: 70 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle 71 | @echo 72 | @echo "Build finished; now you can process the pickle files." 73 | 74 | json: 75 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json 76 | @echo 77 | @echo "Build finished; now you can process the JSON files." 78 | 79 | htmlhelp: 80 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp 81 | @echo 82 | @echo "Build finished; now you can run HTML Help Workshop with the" \ 83 | ".hhp project file in $(BUILDDIR)/htmlhelp." 84 | 85 | qthelp: 86 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp 87 | @echo 88 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \ 89 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:" 90 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/django-redis-cache.qhcp" 91 | @echo "To view the help file:" 92 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/django-redis-cache.qhc" 93 | 94 | applehelp: 95 | $(SPHINXBUILD) -b applehelp $(ALLSPHINXOPTS) $(BUILDDIR)/applehelp 96 | @echo 97 | @echo "Build finished. The help book is in $(BUILDDIR)/applehelp." 98 | @echo "N.B. You won't be able to view it unless you put it in" \ 99 | "~/Library/Documentation/Help or install it in your application" \ 100 | "bundle." 101 | 102 | devhelp: 103 | $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp 104 | @echo 105 | @echo "Build finished." 106 | @echo "To view the help file:" 107 | @echo "# mkdir -p $$HOME/.local/share/devhelp/django-redis-cache" 108 | @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/django-redis-cache" 109 | @echo "# devhelp" 110 | 111 | epub: 112 | $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub 113 | @echo 114 | @echo "Build finished. The epub file is in $(BUILDDIR)/epub." 115 | 116 | latex: 117 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 118 | @echo 119 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." 120 | @echo "Run \`make' in that directory to run these through (pdf)latex" \ 121 | "(use \`make latexpdf' here to do that automatically)." 122 | 123 | latexpdf: 124 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 125 | @echo "Running LaTeX files through pdflatex..." 126 | $(MAKE) -C $(BUILDDIR)/latex all-pdf 127 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 128 | 129 | latexpdfja: 130 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 131 | @echo "Running LaTeX files through platex and dvipdfmx..." 132 | $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja 133 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 134 | 135 | text: 136 | $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text 137 | @echo 138 | @echo "Build finished. The text files are in $(BUILDDIR)/text." 139 | 140 | man: 141 | $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man 142 | @echo 143 | @echo "Build finished. The manual pages are in $(BUILDDIR)/man." 144 | 145 | texinfo: 146 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 147 | @echo 148 | @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." 149 | @echo "Run \`make' in that directory to run these through makeinfo" \ 150 | "(use \`make info' here to do that automatically)." 151 | 152 | info: 153 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 154 | @echo "Running Texinfo files through makeinfo..." 155 | make -C $(BUILDDIR)/texinfo info 156 | @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." 157 | 158 | gettext: 159 | $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale 160 | @echo 161 | @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." 162 | 163 | changes: 164 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes 165 | @echo 166 | @echo "The overview file is in $(BUILDDIR)/changes." 167 | 168 | linkcheck: 169 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck 170 | @echo 171 | @echo "Link check complete; look for any errors in the above output " \ 172 | "or in $(BUILDDIR)/linkcheck/output.txt." 173 | 174 | doctest: 175 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest 176 | @echo "Testing of doctests in the sources finished, look at the " \ 177 | "results in $(BUILDDIR)/doctest/output.txt." 178 | 179 | coverage: 180 | $(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage 181 | @echo "Testing of coverage in the sources finished, look at the " \ 182 | "results in $(BUILDDIR)/coverage/python.txt." 183 | 184 | xml: 185 | $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml 186 | @echo 187 | @echo "Build finished. The XML files are in $(BUILDDIR)/xml." 188 | 189 | pseudoxml: 190 | $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml 191 | @echo 192 | @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." 193 | -------------------------------------------------------------------------------- /docs/advanced_configuration.rst: -------------------------------------------------------------------------------- 1 | Advanced Configuration 2 | ********************** 3 | 4 | Example Setting 5 | --------------- 6 | 7 | .. code:: python 8 | 9 | CACHES = { 10 | 'default': { 11 | 'BACKEND': 'redis_cache.RedisCache', 12 | 'LOCATION': '127.0.0.1:6379', 13 | 'OPTIONS': { 14 | 'DB': 1, 15 | 'PASSWORD': 'yadayada', 16 | 'PARSER_CLASS': 'redis.connection.HiredisParser', 17 | 'CONNECTION_POOL_CLASS': 'redis.BlockingConnectionPool', 18 | 'PICKLE_VERSION': -1, 19 | }, 20 | }, 21 | } 22 | 23 | Pluggable Backends 24 | ------------------ 25 | 26 | django-redis-cache comes with a couple pluggable backends, one for a unified 27 | keyspace and one for a sharded keyspace. The former can be in the form of a 28 | single redis server or several redis servers setup in a primary/secondary 29 | configuration. The primary is used for writing and secondaries are 30 | replicated versions of the primary for read-access. 31 | 32 | **Default Backend:** ``redis_cache.RedisCache`` 33 | 34 | .. code:: python 35 | 36 | # Unified keyspace 37 | CACHES = { 38 | 'default': { 39 | 'BACKEND': 'redis_cache.RedisCache', 40 | ... 41 | } 42 | } 43 | 44 | # Sharded keyspace 45 | CACHES = { 46 | 'default': { 47 | 'BACKEND': 'redis_cache.ShardedRedisCache', 48 | ... 49 | } 50 | } 51 | 52 | 53 | Location Schemes 54 | ---------------- 55 | 56 | The ``LOCATION`` contains the information for the redis server's location, 57 | which can be the address/port or the server path to the unix domain socket. The 58 | location can be a single string or a list of strings. Several schemes for 59 | defining the location can be used. Here is a list of example schemes: 60 | 61 | * ``127.0.0.1:6379`` 62 | 63 | * ``/path/to/socket`` 64 | 65 | * ``redis://[:password]@localhost:6379/0`` 66 | 67 | * ``rediss://[:password]@localhost:6379/0`` 68 | 69 | * ``unix://[:password]@/path/to/socket.sock?db=0`` 70 | 71 | 72 | Database Number 73 | --------------- 74 | 75 | The ``DB`` option will allow key/values to exist in a different keyspace. The 76 | ``DB`` value can either be defined in the ``OPTIONS`` or in the ``LOCATION`` 77 | scheme. Note that in the default config of redis, you have only 16 databases and the valid values are from 0 to 15. 78 | 79 | **Default DB:** ``1`` 80 | 81 | .. code:: python 82 | 83 | CACHES = { 84 | 'default': { 85 | 'OPTIONS': { 86 | 'DB': 1, 87 | .. 88 | }, 89 | ... 90 | } 91 | } 92 | 93 | 94 | Password 95 | -------- 96 | 97 | If the redis server is password protected, you can specify the ``PASSWORD`` 98 | option. 99 | 100 | .. code:: python 101 | 102 | CACHES = { 103 | 'default': { 104 | 'OPTIONS': { 105 | 'PASSWORD': 'yadayada', 106 | ... 107 | }, 108 | ... 109 | } 110 | } 111 | 112 | 113 | Master/Slave Setup 114 | ------------------ 115 | 116 | It's possible to have multiple redis servers in a master/slave or 117 | primary/secondary configuration. Here we have the primary server acting as a 118 | read/write server and secondary servers as read-only. 119 | 120 | .. code:: python 121 | 122 | CACHES = { 123 | 'default': { 124 | 'LOCATION': [ 125 | '127.0.0.1:6379', # Primary 126 | '127.0.0.1:6380', # Secondary 127 | '127.0.0.1:6381', # Secondary 128 | ], 129 | 'OPTIONS': { 130 | 'PASSWORD': 'yadayada', 131 | 'MASTER_CACHE': '127.0.0.1:6379', 132 | ... 133 | }, 134 | ... 135 | } 136 | } 137 | 138 | 139 | 140 | 141 | Pluggable Parser Classes 142 | ------------------------ 143 | 144 | `redis-py`_ comes with two parsers: ``HiredisParser`` and ``PythonParser``. 145 | The former uses the `hiredis`_ library to parse responses from the redis 146 | server, while the latter uses Python. Hiredis is a library that uses C, so it 147 | is much faster than the python parser, but requires installing the library 148 | separately. 149 | 150 | **Default Parser:** ``redis.connection.PythonParser`` 151 | 152 | The default parser is the Python parser because there is no other dependency, 153 | but I would recommend using `hiredis`_: 154 | 155 | ``pip install hiredis`` 156 | 157 | 158 | .. code:: python 159 | 160 | CACHES = { 161 | 'default': { 162 | 'OPTIONS': { 163 | 'PARSER_CLASS': 'redis.connection.HiredisParser', 164 | ... 165 | }, 166 | ... 167 | } 168 | } 169 | 170 | 171 | Pickle Version 172 | -------------- 173 | 174 | When using the pickle serializer, you can use ``PICKLE_VERSION`` to specify 175 | the protocol version of pickle you want to use to serialize your python objects. 176 | 177 | **Default Pickle Version:** `-1` 178 | 179 | The default pickle protocol is -1, which is the highest and latest version. 180 | This value should be pinned to a specific protocol number, since ``-1`` means 181 | different things between versions of Python. 182 | 183 | .. code:: python 184 | 185 | CACHES = { 186 | 'default': { 187 | 'OPTIONS': { 188 | 'PICKLE_VERSION': 2, 189 | ... 190 | }, 191 | ... 192 | }, 193 | } 194 | 195 | 196 | Socket Timeout and Socket Create Timeout 197 | ---------------------------------------- 198 | 199 | When working with a TCP connection, it may be beneficial to set the 200 | ``SOCKET_TIMEOUT`` and ``SOCKET_CONNECT_TIMEOUT`` options to prevent your 201 | app from blocking indefinitely. 202 | 203 | If provided, the socket will time out when the established connection exceeds 204 | ``SOCKET_TIMEOUT`` seconds. 205 | 206 | Similarly, the socket will time out if it takes more than 207 | ``SOCKET_CONNECT_TIMEOUT`` seconds to establish. 208 | 209 | **Default Socket Timeout:** ``None`` 210 | 211 | **Default Socket Connect Timeout:** ``None`` 212 | 213 | .. code:: python 214 | 215 | CACHES={ 216 | 'default': { 217 | 'OPTIONS': { 218 | 'SOCKET_TIMEOUT': 5, 219 | 'SOCKET_CONNECT_TIMEOUT': 5, 220 | ... 221 | } 222 | ... 223 | } 224 | } 225 | 226 | 227 | Connection Pool 228 | --------------- 229 | 230 | There is an associated overhead when creating connections to a redis server. 231 | Therefore, it's beneficial to create a pool of connections that the cache can 232 | reuse to send or retrieve data from the redis server. 233 | 234 | ``CONNECTION_POOL_CLASS`` can be used to specify a class to use for the 235 | connection pool. In addition, you can provide custom keyword arguments using 236 | the ``CONNECTION_POOL_CLASS_KWARGS`` option that will be passed into the class 237 | when it's initialized. 238 | 239 | **Default Connection Pool:** ``redis.ConnectionPool`` 240 | 241 | .. code:: python 242 | 243 | CACHES = { 244 | 'default': { 245 | 'OPTIONS': { 246 | 'CONNECTION_POOL_CLASS': 'redis.BlockingConnectionPool', 247 | 'CONNECTION_POOL_CLASS_KWARGS': { 248 | 'max_connections': 50, 249 | 'timeout': 20, 250 | ... 251 | }, 252 | ... 253 | }, 254 | ... 255 | } 256 | } 257 | 258 | 259 | Pluggable Serializers 260 | --------------------- 261 | 262 | You can use ``SERIALIZER_CLASS`` to specify a class that will 263 | serialize/deserialize data. In addition, you can provide custom keyword 264 | arguments using the ``SERIALIZER_CLASS_KWARGS`` option that will be passed into 265 | the class when it's initialized. 266 | 267 | The default serializer in django-redis-cache is the pickle serializer. It can 268 | serialize most python objects, but is slow and not always safe. Also included 269 | are serializer using json, msgpack, and yaml. Not all serializers can handle 270 | Python objects, so they are limited to primitive data types. 271 | 272 | 273 | **Default Serializer:** ``redis_cache.serializers.PickleSerializer`` 274 | 275 | .. code:: python 276 | 277 | CACHES = { 278 | 'default': { 279 | 'OPTIONS': { 280 | 'SERIALIZER_CLASS': 'redis_cache.serializers.PickleSerializer', 281 | 'SERIALIZER_CLASS_KWARGS': { 282 | 'pickle_version': -1 283 | }, 284 | ... 285 | }, 286 | ... 287 | } 288 | } 289 | 290 | 291 | Pluggable Compressors 292 | --------------------- 293 | 294 | You can use ``COMPRESSOR_CLASS`` to specify a class that will 295 | compress/decompress data. Use the ``COMPRESSOR_CLASS_KWARGS`` option to 296 | initialize the compressor class. 297 | 298 | The default compressor is ``NoopCompressor`` which does not compress your data. 299 | However, if you want to compress your data, you can use one of the included 300 | compressor classes: 301 | 302 | 303 | **Default Compressor:** ``redis_cache.compressors.NoopCompressor`` 304 | 305 | .. code:: python 306 | 307 | # zlib compressor 308 | CACHES = { 309 | 'default': { 310 | 'OPTIONS': { 311 | 'COMPRESSOR_CLASS': 'redis_cache.compressors.ZLibCompressor', 312 | 'COMPRESSOR_CLASS_KWARGS': { 313 | 'level': 5, # 0 - 9; 0 - no compression; 1 - fastest, biggest; 9 - slowest, smallest 314 | }, 315 | ... 316 | }, 317 | ... 318 | } 319 | } 320 | 321 | # bzip2 compressor 322 | CACHES = { 323 | 'default': { 324 | 'OPTIONS': { 325 | 'COMPRESSOR_CLASS': 'redis_cache.compressors.BZip2Compressor', 326 | 'COMPRESSOR_CLASS_KWARGS': { 327 | 'compresslevel': 5, # 1 - 9; 1 - fastest, biggest; 9 - slowest, smallest 328 | }, 329 | ... 330 | }, 331 | ... 332 | } 333 | } 334 | 335 | 336 | .. _redis-py: http://github.com/andymccurdy/redis-py/ 337 | .. _hiredis: https://pypi.python.org/pypi/hiredis/ 338 | -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | # 4 | # django-redis-cache documentation build configuration file, created by 5 | # sphinx-quickstart on Mon Jul 20 10:08:24 2015. 6 | # 7 | # This file is execfile()d with the current directory set to its 8 | # containing dir. 9 | # 10 | # Note that not all possible configuration values are present in this 11 | # autogenerated file. 12 | # 13 | # All configuration values have a default; values that are commented out 14 | # serve to show the default. 15 | 16 | import sys 17 | import os 18 | import shlex 19 | 20 | # If extensions (or modules to document with autodoc) are in another directory, 21 | # add these directories to sys.path here. If the directory is relative to the 22 | # documentation root, use os.path.abspath to make it absolute, like shown here. 23 | #sys.path.insert(0, os.path.abspath('.')) 24 | 25 | # -- General configuration ------------------------------------------------ 26 | 27 | # If your documentation needs a minimal Sphinx version, state it here. 28 | #needs_sphinx = '1.0' 29 | 30 | # Add any Sphinx extension module names here, as strings. They can be 31 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 32 | # ones. 33 | extensions = [] 34 | 35 | # Add any paths that contain templates here, relative to this directory. 36 | templates_path = ['_templates'] 37 | 38 | # The suffix(es) of source filenames. 39 | # You can specify multiple suffix as a list of string: 40 | # source_suffix = ['.rst', '.md'] 41 | source_suffix = '.rst' 42 | 43 | # The encoding of source files. 44 | #source_encoding = 'utf-8-sig' 45 | 46 | # The master toctree document. 47 | master_doc = 'index' 48 | 49 | # General information about the project. 50 | project = 'django-redis-cache' 51 | copyright = '2015, Sean Bleier' 52 | author = 'Sean Bleier' 53 | 54 | # The version info for the project you're documenting, acts as replacement for 55 | # |version| and |release|, also used in various other places throughout the 56 | # built documents. 57 | # 58 | # The short X.Y version. 59 | version = '1.5.2' 60 | # The full version, including alpha/beta/rc tags. 61 | release = '1.5.2' 62 | 63 | # The language for content autogenerated by Sphinx. Refer to documentation 64 | # for a list of supported languages. 65 | # 66 | # This is also used if you do content translation via gettext catalogs. 67 | # Usually you set "language" from the command line for these cases. 68 | language = None 69 | 70 | # There are two options for replacing |today|: either, you set today to some 71 | # non-false value, then it is used: 72 | #today = '' 73 | # Else, today_fmt is used as the format for a strftime call. 74 | #today_fmt = '%B %d, %Y' 75 | 76 | # List of patterns, relative to source directory, that match files and 77 | # directories to ignore when looking for source files. 78 | exclude_patterns = ['_build'] 79 | 80 | # The reST default role (used for this markup: `text`) to use for all 81 | # documents. 82 | #default_role = None 83 | 84 | # If true, '()' will be appended to :func: etc. cross-reference text. 85 | #add_function_parentheses = True 86 | 87 | # If true, the current module name will be prepended to all description 88 | # unit titles (such as .. function::). 89 | #add_module_names = True 90 | 91 | # If true, sectionauthor and moduleauthor directives will be shown in the 92 | # output. They are ignored by default. 93 | #show_authors = False 94 | 95 | # The name of the Pygments (syntax highlighting) style to use. 96 | pygments_style = 'sphinx' 97 | 98 | # A list of ignored prefixes for module index sorting. 99 | #modindex_common_prefix = [] 100 | 101 | # If true, keep warnings as "system message" paragraphs in the built documents. 102 | #keep_warnings = False 103 | 104 | # If true, `todo` and `todoList` produce output, else they produce nothing. 105 | todo_include_todos = False 106 | 107 | 108 | # -- Options for HTML output ---------------------------------------------- 109 | 110 | # The theme to use for HTML and HTML Help pages. See the documentation for 111 | # a list of builtin themes. 112 | html_theme = 'alabaster' 113 | 114 | # Theme options are theme-specific and customize the look and feel of a theme 115 | # further. For a list of options available for each theme, see the 116 | # documentation. 117 | #html_theme_options = {} 118 | 119 | # Add any paths that contain custom themes here, relative to this directory. 120 | #html_theme_path = [] 121 | 122 | # The name for this set of Sphinx documents. If None, it defaults to 123 | # " v documentation". 124 | #html_title = None 125 | 126 | # A shorter title for the navigation bar. Default is the same as html_title. 127 | #html_short_title = None 128 | 129 | # The name of an image file (relative to this directory) to place at the top 130 | # of the sidebar. 131 | #html_logo = None 132 | 133 | # The name of an image file (within the static path) to use as favicon of the 134 | # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 135 | # pixels large. 136 | #html_favicon = None 137 | 138 | # Add any paths that contain custom static files (such as style sheets) here, 139 | # relative to this directory. They are copied after the builtin static files, 140 | # so a file named "default.css" will overwrite the builtin "default.css". 141 | html_static_path = ['_static'] 142 | 143 | # Add any extra paths that contain custom files (such as robots.txt or 144 | # .htaccess) here, relative to this directory. These files are copied 145 | # directly to the root of the documentation. 146 | #html_extra_path = [] 147 | 148 | # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, 149 | # using the given strftime format. 150 | #html_last_updated_fmt = '%b %d, %Y' 151 | 152 | # If true, SmartyPants will be used to convert quotes and dashes to 153 | # typographically correct entities. 154 | #html_use_smartypants = True 155 | 156 | # Custom sidebar templates, maps document names to template names. 157 | #html_sidebars = {} 158 | 159 | # Additional templates that should be rendered to pages, maps page names to 160 | # template names. 161 | #html_additional_pages = {} 162 | 163 | # If false, no module index is generated. 164 | #html_domain_indices = True 165 | 166 | # If false, no index is generated. 167 | #html_use_index = True 168 | 169 | # If true, the index is split into individual pages for each letter. 170 | #html_split_index = False 171 | 172 | # If true, links to the reST sources are added to the pages. 173 | #html_show_sourcelink = True 174 | 175 | # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. 176 | #html_show_sphinx = True 177 | 178 | # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. 179 | #html_show_copyright = True 180 | 181 | # If true, an OpenSearch description file will be output, and all pages will 182 | # contain a tag referring to it. The value of this option must be the 183 | # base URL from which the finished HTML is served. 184 | #html_use_opensearch = '' 185 | 186 | # This is the file name suffix for HTML files (e.g. ".xhtml"). 187 | #html_file_suffix = None 188 | 189 | # Language to be used for generating the HTML full-text search index. 190 | # Sphinx supports the following languages: 191 | # 'da', 'de', 'en', 'es', 'fi', 'fr', 'h', 'it', 'ja' 192 | # 'nl', 'no', 'pt', 'ro', 'r', 'sv', 'tr' 193 | #html_search_language = 'en' 194 | 195 | # A dictionary with options for the search language support, empty by default. 196 | # Now only 'ja' uses this config value 197 | #html_search_options = {'type': 'default'} 198 | 199 | # The name of a javascript file (relative to the configuration directory) that 200 | # implements a search results scorer. If empty, the default will be used. 201 | #html_search_scorer = 'scorer.js' 202 | 203 | # Output file base name for HTML help builder. 204 | htmlhelp_basename = 'django-redis-cachedoc' 205 | 206 | # -- Options for LaTeX output --------------------------------------------- 207 | 208 | latex_elements = { 209 | # The paper size ('letterpaper' or 'a4paper'). 210 | #'papersize': 'letterpaper', 211 | 212 | # The font size ('10pt', '11pt' or '12pt'). 213 | #'pointsize': '10pt', 214 | 215 | # Additional stuff for the LaTeX preamble. 216 | #'preamble': '', 217 | 218 | # Latex figure (float) alignment 219 | #'figure_align': 'htbp', 220 | } 221 | 222 | # Grouping the document tree into LaTeX files. List of tuples 223 | # (source start file, target name, title, 224 | # author, documentclass [howto, manual, or own class]). 225 | latex_documents = [ 226 | (master_doc, 'django-redis-cache.tex', 'django-redis-cache Documentation', 227 | 'Sean Bleier', 'manual'), 228 | ] 229 | 230 | # The name of an image file (relative to this directory) to place at the top of 231 | # the title page. 232 | #latex_logo = None 233 | 234 | # For "manual" documents, if this is true, then toplevel headings are parts, 235 | # not chapters. 236 | #latex_use_parts = False 237 | 238 | # If true, show page references after internal links. 239 | #latex_show_pagerefs = False 240 | 241 | # If true, show URL addresses after external links. 242 | #latex_show_urls = False 243 | 244 | # Documents to append as an appendix to all manuals. 245 | #latex_appendices = [] 246 | 247 | # If false, no module index is generated. 248 | #latex_domain_indices = True 249 | 250 | 251 | # -- Options for manual page output --------------------------------------- 252 | 253 | # One entry per manual page. List of tuples 254 | # (source start file, name, description, authors, manual section). 255 | man_pages = [ 256 | (master_doc, 'django-redis-cache', 'django-redis-cache Documentation', 257 | [author], 1) 258 | ] 259 | 260 | # If true, show URL addresses after external links. 261 | #man_show_urls = False 262 | 263 | 264 | # -- Options for Texinfo output ------------------------------------------- 265 | 266 | # Grouping the document tree into Texinfo files. List of tuples 267 | # (source start file, target name, title, author, 268 | # dir menu entry, description, category) 269 | texinfo_documents = [ 270 | (master_doc, 'django-redis-cache', 'django-redis-cache Documentation', 271 | author, 'django-redis-cache', 'One line description of project.', 272 | 'Miscellaneous'), 273 | ] 274 | 275 | # Documents to append as an appendix to all manuals. 276 | #texinfo_appendices = [] 277 | 278 | # If false, no module index is generated. 279 | #texinfo_domain_indices = True 280 | 281 | # How to display URL addresses: 'footnote', 'no', or 'inline'. 282 | #texinfo_show_urls = 'footnote' 283 | 284 | # If true, do not generate a @detailmenu in the "Top" node's menu. 285 | #texinfo_no_detailmenu = False 286 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | ========================== 2 | Redis Django Cache Backend 3 | ========================== 4 | 5 | .. image:: https://pepy.tech/badge/django-redis-cache 6 | :target: https://pepy.tech/project/django-redis-cache 7 | :alt: Downloads 8 | 9 | .. image:: https://img.shields.io/pypi/v/django-redis-cache.svg 10 | :target: https://pypi.python.org/pypi/django-redis-cache/ 11 | :alt: Latest Version 12 | 13 | .. image:: https://img.shields.io/travis/sebleier/django-redis-cache.svg 14 | :target: https://travis-ci.org/sebleier/django-redis-cache 15 | :alt: Travis-ci Build 16 | 17 | A Redis cache backend for Django 18 | 19 | Docs can be found at http://django-redis-cache.readthedocs.org/en/latest/. 20 | 21 | Changelog 22 | ========= 23 | 24 | 3.0.1 25 | ----- 26 | 27 | * Confirms support for Django 3.2 (no code changes required). 28 | 29 | 3.0.0 30 | ----- 31 | 32 | * Adds support for Python 3.8 33 | * Drops support for Python 2.X and Python 3.5 34 | * Drops support for Django < 3.0 35 | 36 | 37 | 2.1.2 38 | ----- 39 | 40 | * Confirms support for Django 3.1 (no code changes required). 41 | 42 | 2.1.1 43 | ----- 44 | 45 | * Fixes URL scheme for `rediss://`. 46 | 47 | 2.1.0 48 | ----- 49 | 50 | * Adds support for Django 3.0. 51 | 52 | 2.0.0 53 | ----- 54 | 55 | * Adds support for redis-py >= 3.0. 56 | * Drops support for Redis 2.6. 57 | * Drops support for Python 3.4. 58 | * Removes custom ``expire`` method in lieu of Django's ``touch``. 59 | * Removes ``CacheKey`` in favor of string literals. 60 | * Adds testing for Django 2.2 and Python 3.7 (no code changes required). 61 | 62 | 63 | 1.8.0 64 | ----- 65 | 66 | * Confirms support for Django 1.11, 2.0, and 2.1 (no code changes required). 67 | * Drops support for Django < 1.11. 68 | 69 | 1.7.1 70 | ----- 71 | 72 | * Confirms support for Django 1.9 and 1.10. 73 | 74 | 75 | 1.7.0 76 | ----- 77 | 78 | * Drops support for Django < 1.8 and Python 3.2. 79 | 80 | 1.6.4 81 | ----- 82 | 83 | * Adds a default timeout to ``set_many``. 84 | 85 | 1.6.3 86 | ----- 87 | 88 | * Fixes ``get_many`` and ``set_many`` to work with empty parameters. 89 | 90 | 1.6.2 91 | ----- 92 | 93 | * Fixes ``set_many`` to set cache key version. 94 | 95 | 1.6.1 96 | ----- 97 | 98 | * Allows ``delete_many`` to fail silently with an empty list. 99 | 100 | 1.6.0 101 | ----- 102 | 103 | * Adds dummy cache. 104 | 105 | 1.5.5 106 | ----- 107 | 108 | * Cleans up ``get_or_set``. 109 | 110 | 1.5.4 111 | ----- 112 | 113 | * Updates importlib import statement for better Django 1.9 compatibility. 114 | 115 | 1.5.3 116 | ----- 117 | 118 | * Adds initial documentation. 119 | * Updates function signatures to use ``DEFAULT_TIMEOUT``. 120 | * Fixes issue with redis urls and unix_socket_path key error. 121 | 122 | 1.5.2 123 | ----- 124 | 125 | * Adds ``SOCKET_CONNECT_TIMEOUT`` option. 126 | 127 | 1.5.1 128 | ----- 129 | 130 | * Refactors class importing. 131 | 132 | 1.5.0 133 | ----- 134 | 135 | * Adds ability to compress/decompress cache values using pluggable compressors 136 | including zlib, bzip2, or a custom implementation. 137 | 138 | 1.4.0 139 | ----- 140 | 141 | * Adds support for providing a socket timeout on the redis-py client. 142 | 143 | 1.3.0 144 | ----- 145 | 146 | * Adds support for pluggable serializers including pickle(default), json, 147 | msgpack, and yaml. 148 | 149 | 1.2.0 150 | ----- 151 | 152 | * Deprecate support for Python 2.6. The cache should still work, but tests 153 | will fail and compatibility will not be guaranteed going forward. 154 | 155 | **Backward incompatibilities:** 156 | 157 | * The ``HashRing`` behavior has changed to maintain a proper keyspace balance. 158 | This will lead to some cache misses, so be aware. 159 | 160 | * Now requires `redis-py`_ >= 2.10.3 161 | 162 | 1.0.0 163 | ----- 164 | 165 | * Deprecate support for django < 1.3 and redis < 2.4. If you need support for those versions, 166 | pin django-redis-cache to a version less than 1.0, i.e. pip install django-redis-cache<1.0 167 | * Application level sharding when a list of locations is provided in the settings. 168 | * Delete keys using wildcard syntax. 169 | * Clear cache using version to delete only keys under that namespace. 170 | * Ability to select pickle protocol version. 171 | * Support for Master-Slave setup 172 | * Thundering herd protection 173 | * Add expiration to key using `expire` command. 174 | * Add persistence to key using `persist` command. 175 | 176 | 177 | 0.13.0 178 | ------ 179 | 180 | * Adds custom `has_key` implementation that uses Redis's `exists` command. 181 | This will speed `has_key` up drastically if the key under question is 182 | extremely large. 183 | 184 | 0.12.0 185 | ------ 186 | 187 | * Keys can now be kept alive indefinitely by setting the timeout to None, 188 | e.g. `cache.set('key', 'value', timeout=None)` 189 | * Adds `ttl` method to the cache. `cache.ttl(key)` will return the number of 190 | seconds before it expires or None if the key is not volatile. 191 | 192 | 0.11.0 193 | ------ 194 | 195 | * Adds support for specifying the connection pool class. 196 | * Adds ability to set the max connections for the connection pool. 197 | 198 | 199 | 0.10.0 200 | ------ 201 | 202 | Adds Support for Python 3.3 and Django 1.5 and 1.6. Huge thanks to Carl Meyer 203 | for his work. 204 | 205 | 0.9.0 206 | ----- 207 | 208 | Redis cache now allows you to use either a TCP connection or Unix domain 209 | socket to connect to your redis server. Using a TCP connection is useful for 210 | when you have your redis server separate from your app server and/or within 211 | a distributed environment. Unix domain sockets are useful if you have your 212 | redis server and application running on the same machine and want the fastest 213 | possible connection. 214 | 215 | You can now specify (optionally) what parser class you want redis-py to use 216 | when parsing messages from the redis server. redis-py will pick the best 217 | parser for you implicitly, but using the ``PARSER_CLASS`` setting gives you 218 | control and the option to roll your own parser class if you are so bold. 219 | 220 | 221 | Requirements 222 | ============ 223 | 224 | `redis-py`_ >= 2.10.3 225 | `redis`_ >= 2.4 226 | `hiredis`_ 227 | `python`_ >= 2.7 228 | 229 | 1. Run ``pip install django-redis-cache``. 230 | 231 | 2. Modify your Django settings to use ``redis_cache``. 232 | 233 | .. code:: python 234 | 235 | # When using TCP connections 236 | CACHES = { 237 | 'default': { 238 | 'BACKEND': 'redis_cache.RedisCache', 239 | 'LOCATION': [ 240 | ':', 241 | ':', 242 | ':', 243 | ], 244 | 'OPTIONS': { 245 | 'DB': 1, 246 | 'PASSWORD': 'yadayada', 247 | 'PARSER_CLASS': 'redis.connection.HiredisParser', 248 | 'CONNECTION_POOL_CLASS': 'redis.BlockingConnectionPool', 249 | 'CONNECTION_POOL_CLASS_KWARGS': { 250 | 'max_connections': 50, 251 | 'timeout': 20, 252 | }, 253 | 'MAX_CONNECTIONS': 1000, 254 | 'PICKLE_VERSION': -1, 255 | }, 256 | }, 257 | } 258 | 259 | # When using unix domain sockets 260 | # Note: ``LOCATION`` needs to be the same as the ``unixsocket`` setting 261 | # in your redis.conf 262 | CACHES = { 263 | 'default': { 264 | 'BACKEND': 'redis_cache.RedisCache', 265 | 'LOCATION': '/path/to/socket/file', 266 | 'OPTIONS': { 267 | 'DB': 1, 268 | 'PASSWORD': 'yadayada', 269 | 'PARSER_CLASS': 'redis.connection.HiredisParser', 270 | 'PICKLE_VERSION': 2, 271 | }, 272 | }, 273 | } 274 | 275 | # For Master-Slave Setup, specify the host:port of the master 276 | # redis-server instance. 277 | CACHES = { 278 | 'default': { 279 | 'BACKEND': 'redis_cache.RedisCache', 280 | 'LOCATION': [ 281 | ':', 282 | ':', 283 | ':', 284 | ], 285 | 'OPTIONS': { 286 | 'DB': 1, 287 | 'PASSWORD': 'yadayada', 288 | 'PARSER_CLASS': 'redis.connection.HiredisParser', 289 | 'PICKLE_VERSION': 2, 290 | 'MASTER_CACHE': ':', 291 | }, 292 | }, 293 | } 294 | 295 | 296 | 297 | Usage 298 | ===== 299 | 300 | django-redis-cache shares the same API as django's built-in cache backends, 301 | with a few exceptions. 302 | 303 | ``cache.delete_pattern`` 304 | 305 | Delete keys using glob-style pattern. 306 | 307 | example:: 308 | 309 | >>> from news.models import Story 310 | >>> 311 | >>> most_viewed = Story.objects.most_viewed() 312 | >>> highest_rated = Story.objects.highest_rated() 313 | >>> cache.set('news.stories.most_viewed', most_viewed) 314 | >>> cache.set('news.stories.highest_rated', highest_rated) 315 | >>> data = cache.get_many(['news.stories.highest_rated', 'news.stories.most_viewed']) 316 | >>> len(data) 317 | 2 318 | >>> cache.delete_pattern('news.stores.*') 319 | >>> data = cache.get_many(['news.stories.highest_rated', 'news.stories.most_viewed']) 320 | >>> len(data) 321 | 0 322 | 323 | ``cache.clear`` 324 | 325 | Same as django's ``cache.clear``, except that you can optionally specify a 326 | version and all keys with that version will be deleted. If no version is 327 | provided, all keys are flushed from the cache. 328 | 329 | ``cache.reinsert_keys`` 330 | 331 | This helper method retrieves all keys and inserts them back into the cache. This 332 | is useful when changing the pickle protocol number of all the cache entries. 333 | As of django-redis-cache < 1.0, all cache entries were pickled using version 0. 334 | To reduce the memory footprint of the redis-server, simply run this method to 335 | upgrade cache entries to the latest protocol. 336 | 337 | 338 | Thundering Herd Protection 339 | ========================== 340 | 341 | A common problem with caching is that you can sometimes get into a situation 342 | where you have a value that takes a long time to compute or retrieve, but have 343 | clients accessing it a lot. For example, if you wanted to retrieve the latest 344 | tweets from the twitter api, you probably want to cache the response for a number 345 | of minutes so you don't exceed your rate limit. However, when the cache entry 346 | expires you can have mulitple clients that see there is no entry and try to 347 | simultaneously fetch the latest results from the api. 348 | 349 | The way to get around this problem you pass in a callable and timeout to 350 | ``get_or_set``, which will check the cache to see if you need to compute the 351 | value. If it does, then the cache sets a placeholder that tells future clients 352 | to serve data from the stale cache until the new value is created. 353 | 354 | Example:: 355 | 356 | tweets = cache.get_or_set('tweets', twitter.get_newest, timeout=300) 357 | 358 | 359 | Running Tests 360 | ============= 361 | 362 | ``./install_redis.sh`` 363 | 364 | ``make test`` 365 | 366 | .. _redis-py: http://github.com/andymccurdy/redis-py/ 367 | .. _redis: http://github.com/antirez/redis/ 368 | .. _hiredis: http://github.com/antirez/hiredis/ 369 | .. _python: http://python.org 370 | -------------------------------------------------------------------------------- /redis_cache/backends/base.py: -------------------------------------------------------------------------------- 1 | from functools import wraps 2 | 3 | from django.core.cache.backends.base import ( 4 | BaseCache, DEFAULT_TIMEOUT, InvalidCacheBackendError, 5 | ) 6 | from django.core.exceptions import ImproperlyConfigured 7 | 8 | try: 9 | import redis 10 | except ImportError: 11 | raise InvalidCacheBackendError( 12 | "Redis cache backend requires the 'redis-py' library" 13 | ) 14 | 15 | from redis.connection import DefaultParser 16 | from redis_cache.constants import KEY_EXPIRED, KEY_NON_VOLATILE 17 | from redis_cache.connection import pool 18 | from redis_cache.utils import get_servers, parse_connection_kwargs, import_class 19 | 20 | 21 | def get_client(write=False): 22 | 23 | def wrapper(method): 24 | 25 | @wraps(method) 26 | def wrapped(self, key, *args, **kwargs): 27 | version = kwargs.pop('version', None) 28 | key = self.make_key(key, version=version) 29 | client = self.get_client(key, write=write) 30 | return method(self, client, key, *args, **kwargs) 31 | 32 | return wrapped 33 | 34 | return wrapper 35 | 36 | 37 | class BaseRedisCache(BaseCache): 38 | 39 | def __init__(self, server, params): 40 | """ 41 | Connect to Redis, and set up cache backend. 42 | """ 43 | super(BaseRedisCache, self).__init__(params) 44 | self.server = server 45 | self.servers = get_servers(server) 46 | self.params = params or {} 47 | self.options = params.get('OPTIONS', {}) 48 | self.clients = {} 49 | self.client_list = [] 50 | 51 | self.db = self.get_db() 52 | self.password = self.get_password() 53 | self.parser_class = self.get_parser_class() 54 | self.pickle_version = self.get_pickle_version() 55 | self.socket_timeout = self.get_socket_timeout() 56 | self.socket_connect_timeout = self.get_socket_connect_timeout() 57 | self.connection_pool_class = self.get_connection_pool_class() 58 | self.connection_pool_class_kwargs = ( 59 | self.get_connection_pool_class_kwargs() 60 | ) 61 | 62 | # Serializer 63 | self.serializer_class = self.get_serializer_class() 64 | self.serializer_class_kwargs = self.get_serializer_class_kwargs() 65 | self.serializer = self.serializer_class( 66 | **self.serializer_class_kwargs 67 | ) 68 | 69 | # Compressor 70 | self.compressor_class = self.get_compressor_class() 71 | self.compressor_class_kwargs = self.get_compressor_class_kwargs() 72 | self.compressor = self.compressor_class( 73 | **self.compressor_class_kwargs 74 | ) 75 | 76 | redis_py_version = tuple(int(part) for part in redis.__version__.split('.')) 77 | if redis_py_version < (3, 0, 0): 78 | self.Redis = redis.StrictRedis 79 | else: 80 | self.Redis = redis.Redis 81 | 82 | def __getstate__(self): 83 | return {'params': self.params, 'server': self.server} 84 | 85 | def __setstate__(self, state): 86 | self.__init__(**state) 87 | 88 | def get_db(self): 89 | _db = self.params.get('db', self.options.get('DB', 1)) 90 | try: 91 | return int(_db) 92 | except (ValueError, TypeError): 93 | raise ImproperlyConfigured("db value must be an integer") 94 | 95 | def get_password(self): 96 | return self.params.get('password', self.options.get('PASSWORD', None)) 97 | 98 | def get_parser_class(self): 99 | parser_class = self.options.get('PARSER_CLASS', None) 100 | if parser_class is None: 101 | return DefaultParser 102 | return import_class(parser_class) 103 | 104 | def get_pickle_version(self): 105 | """ 106 | Get the pickle version from the settings and save it for future use 107 | """ 108 | _pickle_version = self.options.get('PICKLE_VERSION', -1) 109 | try: 110 | return int(_pickle_version) 111 | except (ValueError, TypeError): 112 | raise ImproperlyConfigured( 113 | "pickle version value must be an integer" 114 | ) 115 | 116 | def get_socket_timeout(self): 117 | return self.options.get('SOCKET_TIMEOUT', None) 118 | 119 | def get_socket_connect_timeout(self): 120 | return self.options.get('SOCKET_CONNECT_TIMEOUT', None) 121 | 122 | def get_connection_pool_class(self): 123 | pool_class = self.options.get( 124 | 'CONNECTION_POOL_CLASS', 125 | 'redis.ConnectionPool' 126 | ) 127 | return import_class(pool_class) 128 | 129 | def get_connection_pool_class_kwargs(self): 130 | return self.options.get('CONNECTION_POOL_CLASS_KWARGS', {}) 131 | 132 | def get_serializer_class(self): 133 | serializer_class = self.options.get( 134 | 'SERIALIZER_CLASS', 135 | 'redis_cache.serializers.PickleSerializer' 136 | ) 137 | return import_class(serializer_class) 138 | 139 | def get_serializer_class_kwargs(self): 140 | kwargs = self.options.get('SERIALIZER_CLASS_KWARGS', {}) 141 | serializer_class = self.options.get( 142 | 'SERIALIZER_CLASS', 143 | 'redis_cache.serializers.PickleSerializer' 144 | ) 145 | if serializer_class == 'redis_cache.serializers.PickleSerializer': 146 | kwargs['pickle_version'] = kwargs.get( 147 | 'pickle_version', 148 | self.pickle_version 149 | ) 150 | return kwargs 151 | 152 | def get_compressor_class(self): 153 | compressor_class = self.options.get( 154 | 'COMPRESSOR_CLASS', 155 | 'redis_cache.compressors.NoopCompressor' 156 | ) 157 | return import_class(compressor_class) 158 | 159 | def get_compressor_class_kwargs(self): 160 | return self.options.get('COMPRESSOR_CLASS_KWARGS', {}) 161 | 162 | def get_master_client(self): 163 | """ 164 | Get the write server:port of the master cache 165 | """ 166 | cache = self.options.get('MASTER_CACHE', None) 167 | if cache is None: 168 | return next(iter(self.client_list)) 169 | 170 | kwargs = parse_connection_kwargs(cache, db=self.db) 171 | return self.clients[( 172 | kwargs['host'], 173 | kwargs['port'], 174 | kwargs['db'], 175 | kwargs['unix_socket_path'], 176 | )] 177 | 178 | def create_client(self, server): 179 | kwargs = parse_connection_kwargs( 180 | server, 181 | db=self.db, 182 | password=self.password, 183 | socket_timeout=self.socket_timeout, 184 | socket_connect_timeout=self.socket_connect_timeout, 185 | ) 186 | 187 | # remove socket-related connection arguments 188 | if kwargs.get('ssl', False): 189 | del kwargs['socket_timeout'] 190 | del kwargs['socket_connect_timeout'] 191 | del kwargs['unix_socket_path'] 192 | 193 | client = redis.Redis(**kwargs) 194 | kwargs.update( 195 | parser_class=self.parser_class, 196 | connection_pool_class=self.connection_pool_class, 197 | connection_pool_class_kwargs=self.connection_pool_class_kwargs, 198 | ) 199 | connection_pool = pool.get_connection_pool(client, **kwargs) 200 | client.connection_pool = connection_pool 201 | return client 202 | 203 | def serialize(self, value): 204 | return self.serializer.serialize(value) 205 | 206 | def deserialize(self, value): 207 | return self.serializer.deserialize(value) 208 | 209 | def compress(self, value): 210 | return self.compressor.compress(value) 211 | 212 | def decompress(self, value): 213 | return self.compressor.decompress(value) 214 | 215 | def get_value(self, original): 216 | try: 217 | value = int(original) 218 | except (ValueError, TypeError): 219 | value = self.decompress(original) 220 | value = self.deserialize(value) 221 | return value 222 | 223 | def prep_value(self, value): 224 | if isinstance(value, int) and not isinstance(value, bool): 225 | return value 226 | value = self.serialize(value) 227 | return self.compress(value) 228 | 229 | def make_keys(self, keys, version=None): 230 | return [self.make_key(key, version=version) for key in keys] 231 | 232 | def get_timeout(self, timeout): 233 | if timeout is DEFAULT_TIMEOUT: 234 | timeout = self.default_timeout 235 | 236 | if timeout is not None: 237 | timeout = int(timeout) 238 | 239 | return timeout 240 | 241 | #################### 242 | # Django cache api # 243 | #################### 244 | 245 | @get_client(write=True) 246 | def add(self, client, key, value, timeout=DEFAULT_TIMEOUT): 247 | """Add a value to the cache, failing if the key already exists. 248 | 249 | Returns ``True`` if the object was added, ``False`` if not. 250 | """ 251 | timeout = self.get_timeout(timeout) 252 | return self._set(client, key, self.prep_value(value), timeout, _add_only=True) 253 | 254 | def _get(self, client, key, default=None): 255 | value = client.get(key) 256 | if value is None: 257 | return default 258 | value = self.get_value(value) 259 | return value 260 | 261 | @get_client() 262 | def get(self, client, key, default=None): 263 | """Retrieve a value from the cache. 264 | 265 | Returns deserialized value if key is found, the default if not. 266 | """ 267 | return self._get(client, key, default) 268 | 269 | def _set(self, client, key, value, timeout, _add_only=False): 270 | if timeout is not None and timeout < 0: 271 | return False 272 | elif timeout == 0: 273 | return client.expire(key, 0) 274 | return client.set(key, value, nx=_add_only, ex=timeout) 275 | 276 | @get_client(write=True) 277 | def set(self, client, key, value, timeout=DEFAULT_TIMEOUT): 278 | """Persist a value to the cache, and set an optional expiration time. 279 | """ 280 | timeout = self.get_timeout(timeout) 281 | result = self._set(client, key, self.prep_value(value), timeout, _add_only=False) 282 | return result 283 | 284 | @get_client(write=True) 285 | def delete(self, client, key): 286 | """Remove a key from the cache.""" 287 | return client.delete(key) 288 | 289 | def _delete_many(self, client, keys): 290 | return client.delete(*keys) 291 | 292 | def delete_many(self, keys, version=None): 293 | """ 294 | Remove multiple keys at once. 295 | """ 296 | raise NotImplementedError 297 | 298 | def _clear(self, client): 299 | return client.flushdb() 300 | 301 | def clear(self, version=None): 302 | """Flush cache keys. 303 | 304 | If version is specified, all keys belonging the version's key 305 | namespace will be deleted. Otherwise, all keys will be deleted. 306 | """ 307 | raise NotImplementedError 308 | 309 | def _get_many(self, client, original_keys, versioned_keys): 310 | recovered_data = {} 311 | map_keys = dict(zip(versioned_keys, original_keys)) 312 | 313 | # Only try to mget if we actually received any keys to get 314 | if map_keys: 315 | results = client.mget(versioned_keys) 316 | 317 | for key, value in zip(versioned_keys, results): 318 | if value is None: 319 | continue 320 | recovered_data[map_keys[key]] = self.get_value(value) 321 | 322 | return recovered_data 323 | 324 | def get_many(self, keys, version=None): 325 | """Retrieve many keys.""" 326 | raise NotImplementedError 327 | 328 | def set_many(self, data, timeout=DEFAULT_TIMEOUT, version=None): 329 | """Set a bunch of values in the cache at once from a dict of key/value 330 | pairs. This is much more efficient than calling set() multiple times. 331 | 332 | If timeout is given, that timeout will be used for the key; otherwise 333 | the default cache timeout will be used. 334 | """ 335 | raise NotImplementedError 336 | 337 | @get_client(write=True) 338 | def incr(self, client, key, delta=1): 339 | """Add delta to value in the cache. If the key does not exist, raise a 340 | `ValueError` exception. 341 | """ 342 | exists = client.exists(key) 343 | if not exists: 344 | raise ValueError("Key '%s' not found" % key) 345 | 346 | value = client.incr(key, delta) 347 | 348 | return value 349 | 350 | def _incr_version(self, client, old, new, original, delta, version): 351 | try: 352 | client.rename(old, new) 353 | except redis.ResponseError: 354 | raise ValueError("Key '%s' not found" % original) 355 | return version + delta 356 | 357 | def incr_version(self, key, delta=1, version=None): 358 | """Adds delta to the cache version for the supplied key. Returns the 359 | new version. 360 | """ 361 | 362 | @get_client(write=True) 363 | def touch(self, client, key, timeout=DEFAULT_TIMEOUT): 364 | """Reset the timeout of a key to `timeout` seconds.""" 365 | return client.expire(key, timeout) 366 | 367 | ##################### 368 | # Extra api methods # 369 | ##################### 370 | 371 | @get_client() 372 | def has_key(self, client, key): 373 | """Returns True if the key is in the cache and has not expired.""" 374 | return client.exists(key) 375 | 376 | @get_client() 377 | def ttl(self, client, key): 378 | """Returns the 'time-to-live' of a key. If the key is not volatile, 379 | i.e. it has not set expiration, then the value returned is None. 380 | Otherwise, the value is the number of seconds remaining. If the key 381 | does not exist, 0 is returned. 382 | """ 383 | ttl = client.ttl(key) 384 | if ttl == KEY_NON_VOLATILE: 385 | return None 386 | elif ttl == KEY_EXPIRED: 387 | return 0 388 | else: 389 | return ttl 390 | 391 | def _delete_pattern(self, client, pattern): 392 | keys = list(client.scan_iter(match=pattern)) 393 | if keys: 394 | client.delete(*keys) 395 | 396 | def delete_pattern(self, pattern, version=None): 397 | raise NotImplementedError 398 | 399 | def lock( 400 | self, 401 | key, 402 | timeout=None, 403 | sleep=0.1, 404 | blocking_timeout=None, 405 | thread_local=True): 406 | client = self.get_client(key, write=True) 407 | return client.lock( 408 | key, 409 | timeout=timeout, 410 | sleep=sleep, 411 | blocking_timeout=blocking_timeout, 412 | thread_local=thread_local 413 | ) 414 | 415 | @get_client(write=True) 416 | def get_or_set( 417 | self, 418 | client, 419 | key, 420 | default, 421 | timeout=DEFAULT_TIMEOUT, 422 | lock_timeout=None, 423 | stale_cache_timeout=None): 424 | """Get a value from the cache or use ``default`` to set it and return it. 425 | 426 | If ``default`` is a callable, call it without arguments and store its return value in the cache instead. 427 | 428 | This implementation is slightly more advanced that Django's. It provides thundering herd 429 | protection, which prevents multiple threads/processes from calling the value-generating 430 | function too much. 431 | 432 | There are three timeouts you can specify: 433 | 434 | ``timeout``: Time in seconds that value at ``key`` is considered fresh. 435 | ``lock_timeout``: Time in seconds that the lock will stay active and prevent other threads 436 | or processes from acquiring the lock. 437 | ``stale_cache_timeout``: Time in seconds that the stale cache will remain after the key has 438 | expired. If ``None`` is specified, the stale value will remain indefinitely. 439 | 440 | """ 441 | lock_key = "__lock__" + key 442 | fresh_key = "__fresh__" + key 443 | 444 | is_fresh = self._get(client, fresh_key) 445 | value = self._get(client, key) 446 | 447 | if is_fresh: 448 | return value 449 | 450 | timeout = self.get_timeout(timeout) 451 | lock = self.lock(lock_key, timeout=lock_timeout) 452 | 453 | acquired = lock.acquire(blocking=False) 454 | 455 | if acquired: 456 | try: 457 | value = default() if callable(default) else default 458 | except Exception: 459 | raise 460 | else: 461 | key_timeout = ( 462 | None if stale_cache_timeout is None else timeout + stale_cache_timeout 463 | ) 464 | pipeline = client.pipeline() 465 | pipeline.set(key, self.prep_value(value), key_timeout) 466 | pipeline.set(fresh_key, 1, timeout) 467 | pipeline.execute() 468 | finally: 469 | lock.release() 470 | 471 | return value 472 | 473 | def _reinsert_keys(self, client): 474 | keys = list(client.scan_iter(match='*')) 475 | for key in keys: 476 | timeout = client.ttl(key) 477 | value = self.get_value(client.get(key)) 478 | if timeout is None: 479 | client.set(key, self.prep_value(value)) 480 | 481 | def reinsert_keys(self): 482 | """ 483 | Reinsert cache entries using the current pickle protocol version. 484 | """ 485 | raise NotImplementedError 486 | 487 | @get_client(write=True) 488 | def persist(self, client, key): 489 | """Remove the timeout on a key. 490 | 491 | Equivalent to setting a timeout of None in a set command. 492 | 493 | Returns True if successful and False if not. 494 | """ 495 | return client.persist(key) 496 | -------------------------------------------------------------------------------- /tests/testapp/tests/base_tests.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from __future__ import unicode_literals 3 | 4 | from hashlib import sha1 5 | import os 6 | import subprocess 7 | import threading 8 | import time 9 | 10 | 11 | try: 12 | import cPickle as pickle 13 | except ImportError: 14 | import pickle 15 | 16 | import django 17 | from django.core.cache import caches 18 | from django.core.exceptions import ImproperlyConfigured 19 | from django.test import TestCase, override_settings 20 | from django.utils.encoding import force_bytes 21 | 22 | import redis 23 | 24 | from tests.testapp.models import Poll, expensive_calculation 25 | from redis_cache.cache import RedisCache, pool 26 | from redis_cache.constants import KEY_EXPIRED, KEY_NON_VOLATILE 27 | from redis_cache.utils import get_servers, parse_connection_kwargs 28 | 29 | 30 | REDIS_PASSWORD = 'yadayada' 31 | 32 | 33 | LOCATION = "127.0.0.1:6381" 34 | 35 | 36 | # functions/classes for complex data type tests 37 | def f(): 38 | return 42 39 | 40 | 41 | class C: 42 | def m(n): 43 | return 24 44 | 45 | 46 | def start_redis_servers(servers, db=None, master=None): 47 | """Creates redis instances using specified locations from the settings. 48 | 49 | Returns list of Popen objects 50 | """ 51 | processes = [] 52 | devnull = open(os.devnull, 'w') 53 | master_connection_kwargs = master and parse_connection_kwargs( 54 | master, 55 | db=db, 56 | password=REDIS_PASSWORD 57 | ) 58 | for i, server in enumerate(servers): 59 | connection_kwargs = parse_connection_kwargs( 60 | server, 61 | db=db, 62 | password=REDIS_PASSWORD, # will be overridden if specified in `server` 63 | ) 64 | parameters = dict( 65 | port=connection_kwargs.get('port', 0), 66 | requirepass=connection_kwargs['password'], 67 | ) 68 | is_socket = server.startswith('unix://') or server.startswith('/') 69 | if is_socket: 70 | parameters.update( 71 | port=0, 72 | unixsocket='/tmp/redis{0}.sock'.format(i), 73 | unixsocketperm=755, 74 | ) 75 | if master and not connection_kwargs == master_connection_kwargs: 76 | parameters.update( 77 | masterauth=master_connection_kwargs['password'], 78 | slaveof="{host} {port}".format( 79 | host=master_connection_kwargs['host'], 80 | port=master_connection_kwargs['port'], 81 | ) 82 | ) 83 | 84 | args = ['./redis/src/redis-server'] + [ 85 | "--{parameter} {value}".format(parameter=parameter, value=value) 86 | for parameter, value in parameters.items() 87 | ] 88 | p = subprocess.Popen(args, stdout=devnull) 89 | processes.append(p) 90 | 91 | return processes 92 | 93 | 94 | class SetupMixin(object): 95 | processes = None 96 | 97 | @classmethod 98 | def tearDownClass(cls): 99 | for p in cls.processes: 100 | p.kill() 101 | cls.processes = None 102 | 103 | # Give redis processes some time to shutdown 104 | # time.sleep(.1) 105 | 106 | def setUp(self): 107 | if self.__class__.processes is None: 108 | from django.conf import settings 109 | 110 | cache_settings = settings.CACHES['default'] 111 | servers = get_servers(cache_settings['LOCATION']) 112 | options = cache_settings.get('OPTIONS', {}) 113 | db = options.get('db', 0) 114 | master = options.get('MASTER_CACHE') 115 | self.__class__.processes = start_redis_servers( 116 | servers, 117 | db=db, 118 | master=master 119 | ) 120 | 121 | # Give redis processes some time to startup 122 | time.sleep(.1) 123 | 124 | self.reset_pool() 125 | self.cache = self.get_cache() 126 | 127 | def tearDown(self): 128 | # clear caches to allow @override_settings(CACHES=...) to work. 129 | if django.VERSION < (3, 2): 130 | caches._caches.caches = {} 131 | else: 132 | for alias in caches: 133 | if hasattr(caches._connections, alias): 134 | del caches[alias] 135 | # Sometimes it will be necessary to skip this method because we need to 136 | # test default initialization and that may be using a different port 137 | # than the test redis server. 138 | if hasattr(self, '_skip_tearDown') and self._skip_tearDown: 139 | self._skip_tearDown = False 140 | return 141 | self.cache.clear() 142 | 143 | def reset_pool(self): 144 | pool.reset() 145 | 146 | def get_cache(self, backend=None): 147 | return caches[backend or 'default'] 148 | 149 | 150 | class BaseRedisTestCase(SetupMixin): 151 | 152 | def test_simple(self): 153 | # Simple cache set/get works 154 | self.cache.set("key", "value") 155 | self.assertEqual(self.cache.get("key"), "value") 156 | 157 | def test_add(self): 158 | # A key can be added to a cache 159 | self.cache.add("addkey1", "value") 160 | result = self.cache.add("addkey1", "newvalue") 161 | self.assertFalse(result) 162 | self.assertEqual(self.cache.get("addkey1"), "value") 163 | 164 | def test_non_existent(self): 165 | # Non-existent cache keys return as None/default 166 | # get with non-existent keys 167 | self.assertIsNone(self.cache.get("does_not_exist")) 168 | self.assertEqual(self.cache.get("does_not_exist", "bang!"), "bang!") 169 | 170 | def test_get_many(self): 171 | # Multiple cache keys can be returned using get_many 172 | self.cache.set('a', 'a') 173 | self.cache.set('b', 'b') 174 | self.cache.set('c', 'c') 175 | self.cache.set('d', 'd') 176 | self.assertEqual(self.cache.get_many(['a', 'c', 'd']), {'a': 'a', 'c': 'c', 'd': 'd'}) 177 | self.assertEqual(self.cache.get_many(['a', 'b', 'e']), {'a': 'a', 'b': 'b'}) 178 | 179 | def test_get_many_works_with_empty_keys_array(self): 180 | self.assertEqual(self.cache.get_many([]), {}) 181 | 182 | def test_get_many_with_manual_integer_insertion(self): 183 | keys = ['a', 'b', 'c', 'd'] 184 | for i, key in enumerate(keys): 185 | self.cache.set(key, i) 186 | self.assertEqual(self.cache.get_many(keys), {'a': 0, 'b': 1, 'c': 2, 'd': 3}) 187 | 188 | def test_get_many_with_automatic_integer_insertion(self): 189 | keys = ['a', 'b', 'c', 'd'] 190 | for i, key in enumerate(keys): 191 | self.cache.set(key, i) 192 | self.assertEqual(self.cache.get_many(keys), {'a': 0, 'b': 1, 'c': 2, 'd': 3}) 193 | 194 | def test_delete(self): 195 | # Cache keys can be deleted 196 | self.cache.set("key1", "spam") 197 | self.cache.set("key2", "eggs") 198 | self.assertEqual(self.cache.get("key1"), "spam") 199 | self.cache.delete("key1") 200 | self.assertIsNone(self.cache.get("key1")) 201 | self.assertEqual(self.cache.get("key2"), "eggs") 202 | 203 | def test_has_key(self): 204 | # The cache can be inspected for cache keys 205 | self.cache.set("hello1", "goodbye1") 206 | self.assertIn("hello1", self.cache) 207 | self.assertNotIn("goodbye1", self.cache) 208 | 209 | def test_in(self): 210 | # The in operator can be used to inspet cache contents 211 | self.cache.set("hello2", "goodbye2") 212 | self.assertIn("hello2", self.cache) 213 | self.assertNotIn("goodbye2", self.cache) 214 | 215 | def test_incr(self): 216 | # Cache values can be incremented 217 | self.cache.set('answer', 41) 218 | self.assertEqual(self.cache.get('answer'), 41) 219 | self.assertEqual(self.cache.incr('answer'), 42) 220 | self.assertEqual(self.cache.get('answer'), 42) 221 | self.assertEqual(self.cache.incr('answer', 10), 52) 222 | self.assertEqual(self.cache.get('answer'), 52) 223 | self.assertRaises(ValueError, self.cache.incr, 'does_not_exist') 224 | 225 | def test_decr(self): 226 | # Cache values can be decremented 227 | self.cache.set('answer', 43) 228 | self.assertEqual(self.cache.decr('answer'), 42) 229 | self.assertEqual(self.cache.get('answer'), 42) 230 | self.assertEqual(self.cache.decr('answer', 10), 32) 231 | self.assertEqual(self.cache.get('answer'), 32) 232 | self.assertRaises(ValueError, self.cache.decr, 'does_not_exist') 233 | 234 | def test_data_types(self): 235 | # Many different data types can be cached 236 | stuff = { 237 | 'string': 'this is a string', 238 | 'int': 42, 239 | 'list': [1, 2, 3, 4], 240 | 'tuple': (1, 2, 3, 4), 241 | 'dict': {'A': 1, 'B': 2}, 242 | 'function': f, 243 | 'class': C, 244 | } 245 | self.cache.set("stuff", stuff) 246 | self.assertEqual(self.cache.get("stuff"), stuff) 247 | 248 | def test_cache_read_for_model_instance(self): 249 | # Don't want fields with callable as default to be called on cache read 250 | expensive_calculation.num_runs = 0 251 | Poll.objects.all().delete() 252 | my_poll = Poll.objects.create(question="Well?") 253 | self.assertEqual(Poll.objects.count(), 1) 254 | pub_date = my_poll.pub_date 255 | self.cache.set('question', my_poll) 256 | cached_poll = self.cache.get('question') 257 | self.assertEqual(cached_poll.pub_date, pub_date) 258 | # We only want the default expensive calculation run once 259 | self.assertEqual(expensive_calculation.num_runs, 1) 260 | 261 | def test_cache_write_for_model_instance_with_deferred(self): 262 | # Don't want fields with callable as default to be called on cache write 263 | expensive_calculation.num_runs = 0 264 | Poll.objects.all().delete() 265 | Poll.objects.create(question="What?") 266 | self.assertEqual(expensive_calculation.num_runs, 1) 267 | defer_qs = Poll.objects.all().defer('question') 268 | self.assertEqual(defer_qs.count(), 1) 269 | self.assertEqual(expensive_calculation.num_runs, 1) 270 | self.cache.set('deferred_queryset', defer_qs) 271 | # cache set should not re-evaluate default functions 272 | self.assertEqual(expensive_calculation.num_runs, 1) 273 | 274 | def test_cache_read_for_model_instance_with_deferred(self): 275 | # Don't want fields with callable as default to be called on cache read 276 | expensive_calculation.num_runs = 0 277 | Poll.objects.all().delete() 278 | Poll.objects.create(question="What?") 279 | self.assertEqual(expensive_calculation.num_runs, 1) 280 | defer_qs = Poll.objects.all().defer('question') 281 | self.assertEqual(defer_qs.count(), 1) 282 | self.cache.set('deferred_queryset', defer_qs) 283 | self.assertEqual(expensive_calculation.num_runs, 1) 284 | runs_before_cache_read = expensive_calculation.num_runs 285 | self.cache.get('deferred_queryset') 286 | # We only want the default expensive calculation run on creation and set 287 | self.assertEqual(expensive_calculation.num_runs, runs_before_cache_read) 288 | 289 | def test_expiration(self): 290 | # Cache values can be set to expire 291 | self.cache.set('expire1', 'very quickly', 1) 292 | self.cache.set('expire2', 'very quickly', 1) 293 | self.cache.set('expire3', 'very quickly', 1) 294 | 295 | time.sleep(2) 296 | self.assertEqual(self.cache.get("expire1"), None) 297 | 298 | self.cache.add("expire2", "newvalue") 299 | self.assertEqual(self.cache.get("expire2"), "newvalue") 300 | self.assertEqual("expire3" in self.cache, False) 301 | 302 | def test_set_expiration_timeout_None(self): 303 | key, value = 'key', 'value' 304 | self.cache.set(key, value, timeout=None) 305 | self.assertIsNone(self.cache.ttl(key)) 306 | 307 | def test_set_expiration_timeout_zero(self): 308 | key, value = self.cache.make_key('key'), 'value' 309 | self.cache.set(key, value, timeout=0) 310 | self.assertEqual(self.cache.get_client(key).ttl(key), KEY_EXPIRED) 311 | self.assertNotIn(key, self.cache) 312 | 313 | def test_set_expiration_timeout_negative(self): 314 | key, value = self.cache.make_key('key'), 'value' 315 | self.cache.set(key, value, timeout=-1) 316 | self.assertNotIn(key, self.cache) 317 | 318 | def test_unicode(self): 319 | # Unicode values can be cached 320 | stuff = { 321 | 'ascii': 'ascii_value', 322 | 'unicode_ascii': 'Iñtërnâtiônàlizætiøn1', 323 | 'Iñtërnâtiônàlizætiøn': 'Iñtërnâtiônàlizætiøn2', 324 | 'ascii': {'x': 1} 325 | } 326 | for (key, value) in stuff.items(): 327 | self.cache.set(key, value) 328 | self.assertEqual(self.cache.get(key), value) 329 | 330 | def test_binary_string(self): 331 | # Binary strings should be cachable 332 | from zlib import compress, decompress 333 | value = b'value_to_be_compressed' 334 | compressed_value = compress(value) 335 | self.cache.set('binary1', compressed_value) 336 | compressed_result = self.cache.get('binary1') 337 | self.assertEqual(compressed_value, compressed_result) 338 | self.assertEqual(value, decompress(compressed_result)) 339 | 340 | def test_set_many(self): 341 | # Multiple keys can be set using set_many 342 | self.cache.set_many({"key1": "spam", "key2": "eggs"}) 343 | self.assertEqual(self.cache.get("key1"), "spam") 344 | self.assertEqual(self.cache.get("key2"), "eggs") 345 | 346 | def test_set_many_works_with_empty_dict(self): 347 | # This test passes if no exception is raised 348 | self.cache.set_many({}) 349 | self.cache.set_many({}, version=2) 350 | 351 | def test_set_many_expiration(self): 352 | # set_many takes a second ``timeout`` parameter 353 | self.cache.set_many({"key1": "spam", "key2": "eggs"}, 1) 354 | time.sleep(2) 355 | self.assertIsNone(self.cache.get("key1")) 356 | self.assertIsNone(self.cache.get("key2")) 357 | 358 | def test_set_many_version(self): 359 | self.cache.set_many({"key1": "spam", "key2": "eggs"}, version=2) 360 | self.assertEqual(self.cache.get("key1", version=2), "spam") 361 | self.assertEqual(self.cache.get("key2", version=2), "eggs") 362 | 363 | def test_delete_many(self): 364 | # Multiple keys can be deleted using delete_many 365 | self.cache.set("key1", "spam") 366 | self.cache.set("key2", "eggs") 367 | self.cache.set("key3", "ham") 368 | self.cache.delete_many(["key1", "key2"]) 369 | self.assertIsNone(self.cache.get("key1")) 370 | self.assertIsNone(self.cache.get("key2")) 371 | self.assertEqual(self.cache.get("key3"), "ham") 372 | # Test that passing an empty list fails silently 373 | self.cache.delete_many([]) 374 | 375 | def test_clear(self): 376 | # The cache can be emptied using clear 377 | self.cache.set("key1", "spam") 378 | self.cache.set("key2", "eggs") 379 | self.cache.clear() 380 | self.assertIsNone(self.cache.get("key1")) 381 | self.assertIsNone(self.cache.get("key2")) 382 | 383 | def test_long_timeout(self): 384 | """Using a timeout greater than 30 days makes memcached think 385 | it is an absolute expiration timestamp instead of a relative 386 | offset. Test that we honour this convention. Refs #12399. 387 | """ 388 | self.cache.set('key1', 'eggs', 60 * 60 * 24 * 30 + 1) # 30 days + 1 second 389 | self.assertEqual(self.cache.get('key1'), 'eggs') 390 | 391 | self.cache.add('key2', 'ham', 60 * 60 * 24 * 30 + 1) 392 | self.assertEqual(self.cache.get('key2'), 'ham') 393 | 394 | self.cache.set_many({'key3': 'sausage', 'key4': 'lobster bisque'}, 60 * 60 * 24 * 30 + 1) 395 | self.assertEqual(self.cache.get('key3'), 'sausage') 396 | self.assertEqual(self.cache.get('key4'), 'lobster bisque') 397 | 398 | def test_incr_version(self): 399 | if isinstance(self.cache, RedisCache): 400 | key = "key1" 401 | self.cache.set(key, "spam", version=1) 402 | self.assertEqual(self.cache.make_key(key), ':1:key1') 403 | new_version = self.cache.incr_version(key, 1) 404 | self.assertEqual(new_version, 2) 405 | new_key = self.cache.make_key(key, version=new_version) 406 | self.assertEqual(new_key, ':2:key1') 407 | self.assertIsNone(self.cache.get(key, version=1)) 408 | self.assertEqual(self.cache.get(key, version=2), 'spam') 409 | 410 | def test_pickling_cache_object(self): 411 | p = pickle.dumps(self.cache) 412 | cache = pickle.loads(p) 413 | # Now let's do a simple operation using the unpickled cache object 414 | cache.add("addkey1", "value") 415 | result = cache.add("addkey1", "newvalue") 416 | self.assertFalse(result) 417 | self.assertEqual(cache.get("addkey1"), "value") 418 | 419 | def test_float_caching(self): 420 | self.cache.set('a', 1.1) 421 | a = self.cache.get('a') 422 | self.assertEqual(a, 1.1) 423 | 424 | def test_string_float_caching(self): 425 | self.cache.set('a', '1.1') 426 | a = self.cache.get('a') 427 | self.assertEqual(a, '1.1') 428 | 429 | def test_setting_string_integer_retrieves_string(self): 430 | self.assertTrue(self.cache.set("foo", "1")) 431 | self.assertEqual(self.cache.get("foo"), "1") 432 | 433 | def test_setting_bool_retrieves_bool(self): 434 | self.assertTrue(self.cache.set("bool_t", True)) 435 | self.assertTrue(self.cache.get("bool_t")) 436 | self.assertTrue(self.cache.set("bool_f", False)) 437 | self.assertFalse(self.cache.get("bool_f")) 438 | 439 | def test_delete_pattern(self): 440 | data = { 441 | 'a': 'a', 442 | 'b': 'b', 443 | 'aa': 'aa', 444 | 'bb': 'bb', 445 | 'aaa': 'aaa', 446 | 'bbb': 'bbb', 447 | } 448 | self.cache.set_many(data) 449 | self.cache.delete_pattern('aa*') 450 | items = self.cache.get_many(data.keys()) 451 | self.assertEqual(len(items), 4) 452 | 453 | self.cache.delete_pattern('b?b') 454 | items = self.cache.get_many(data.keys()) 455 | self.assertEqual(len(items), 3) 456 | 457 | def test_clearing_using_version(self): 458 | self.cache.set('a', 'a', version=1) 459 | self.cache.set('b', 'b', version=1) 460 | self.cache.set('a', 'a', version=2) 461 | self.cache.set('b', 'b', version=2) 462 | 463 | values = self.cache.get_many(['a', 'b'], version=1) 464 | self.assertEqual(len(values), 2) 465 | 466 | values = self.cache.get_many(['a', 'b'], version=2) 467 | self.assertEqual(len(values), 2) 468 | 469 | self.cache.clear(version=2) 470 | 471 | values = self.cache.get_many(['a', 'b'], version=1) 472 | self.assertEqual(len(values), 2) 473 | 474 | values = self.cache.get_many(['a', 'b'], version=2) 475 | self.assertEqual(len(values), 0) 476 | 477 | def test_reinsert_keys(self): 478 | self.cache._pickle_version = 0 479 | for i in range(2000): 480 | s = sha1(force_bytes(i)).hexdigest() 481 | self.cache.set(s, self.cache) 482 | self.cache._pickle_version = -1 483 | self.cache.reinsert_keys() 484 | 485 | def test_ttl_of_reinsert_keys(self): 486 | self.cache.set('a', 'a', 5) 487 | self.assertEqual(self.cache.get('a'), 'a') 488 | self.cache.set('b', 'b', 5) 489 | self.cache.reinsert_keys() 490 | self.assertEqual(self.cache.get('a'), 'a') 491 | self.assertGreater(self.cache.ttl('a'), 1) 492 | self.assertEqual(self.cache.get('b'), 'b') 493 | self.assertGreater(self.cache.ttl('a'), 1) 494 | 495 | def test_get_or_set_with_callable(self): 496 | 497 | def expensive_function(): 498 | expensive_function.num_calls += 1 499 | return 42 500 | 501 | expensive_function.num_calls = 0 502 | self.assertEqual(expensive_function.num_calls, 0) 503 | value = self.cache.get_or_set('a', expensive_function, 1) 504 | self.assertEqual(expensive_function.num_calls, 1) 505 | self.assertEqual(value, 42) 506 | 507 | value = self.cache.get_or_set('a', expensive_function, 1) 508 | self.assertEqual(expensive_function.num_calls, 1) 509 | self.assertEqual(value, 42) 510 | 511 | value = self.cache.get_or_set('a', expensive_function, 1) 512 | self.assertEqual(expensive_function.num_calls, 1) 513 | self.assertEqual(value, 42) 514 | 515 | time.sleep(2) 516 | value = self.cache.get_or_set('a', expensive_function, 1) 517 | self.assertEqual(expensive_function.num_calls, 2) 518 | self.assertEqual(value, 42) 519 | 520 | def test_get_or_set_with_value(self): 521 | self.assertEqual(self.cache.get_or_set('a', 42, 1), 42) 522 | self.assertEqual(self.cache.get_or_set('a', 43, 1), 42) 523 | self.assertEqual(self.cache.get_or_set('a', 44, 1), 42) 524 | time.sleep(2) 525 | self.assertEqual(self.cache.get_or_set('a', 45, 1), 45) 526 | self.assertEqual(self.cache.get_or_set('a', 46, 1), 45) 527 | self.assertEqual(self.cache.get_or_set('a', 47, 1), 45) 528 | 529 | def test_get_or_set_serving_from_stale_value(self): 530 | 531 | def expensive_function(x): 532 | time.sleep(.5) 533 | expensive_function.num_calls += 1 534 | return x 535 | 536 | expensive_function.num_calls = 0 537 | self.assertEqual(expensive_function.num_calls, 0) 538 | results = {} 539 | 540 | def thread_worker(thread_id, return_value, timeout, lock_timeout, stale_cache_timeout): 541 | value = self.cache.get_or_set( 542 | 'key', 543 | lambda: expensive_function(return_value), 544 | timeout, 545 | lock_timeout, 546 | stale_cache_timeout 547 | ) 548 | results[thread_id] = value 549 | 550 | thread_0 = threading.Thread(target=thread_worker, args=(0, 'a', 1, None, 1)) 551 | thread_1 = threading.Thread(target=thread_worker, args=(1, 'b', 1, None, 1)) 552 | thread_2 = threading.Thread(target=thread_worker, args=(2, 'c', 1, None, 1)) 553 | thread_3 = threading.Thread(target=thread_worker, args=(3, 'd', 1, None, 1)) 554 | thread_4 = threading.Thread(target=thread_worker, args=(4, 'e', 1, None, 1)) 555 | 556 | # First thread should complete and return its value 557 | thread_0.start() # t = 0, valid from t = .5 - 1.5, stale from t = 1.5 - 2.5 558 | 559 | # Second thread will start while the first thread is still working and return None. 560 | time.sleep(.25) # t = .25 561 | thread_1.start() 562 | # Third thread will start after the first value is computed, but before it expires. 563 | # its value. 564 | time.sleep(.5) # t = .75 565 | thread_2.start() 566 | # Fourth thread will start after the first value has expired and will re-compute its value. 567 | # valid from t = 2.25 - 3.25, stale from t = 3.75 - 4.75. 568 | time.sleep(1) # t = 1.75 569 | thread_3.start() 570 | # Fifth thread will start after the fourth thread has started to compute its value, but 571 | # before the first thread's stale cache has expired. 572 | time.sleep(.25) # t = 2 573 | thread_4.start() 574 | 575 | thread_0.join() 576 | thread_1.join() 577 | thread_2.join() 578 | thread_3.join() 579 | thread_4.join() 580 | 581 | self.assertEqual(results, { 582 | 0: 'a', 583 | 1: None, 584 | 2: 'a', 585 | 3: 'd', 586 | 4: 'a' 587 | }) 588 | 589 | def assertMaxConnection(self, cache, max_num): 590 | for client in cache.clients.values(): 591 | self.assertLessEqual(client.connection_pool._created_connections, max_num) 592 | 593 | def test_max_connections(self): 594 | pool._connection_pools = {} 595 | cache = caches['default'] 596 | 597 | def noop(*args, **kwargs): 598 | pass 599 | 600 | releases = {} 601 | for client in cache.clients.values(): 602 | releases[client.connection_pool] = client.connection_pool.release 603 | client.connection_pool.release = noop 604 | self.assertEqual(client.connection_pool.max_connections, 2) 605 | 606 | cache.set('a', 'a') 607 | self.assertMaxConnection(cache, 1) 608 | 609 | cache.set('a', 'a') 610 | self.assertMaxConnection(cache, 2) 611 | 612 | with self.assertRaises(redis.ConnectionError): 613 | cache.set('a', 'a') 614 | 615 | self.assertMaxConnection(cache, 2) 616 | 617 | for client in cache.clients.values(): 618 | client.connection_pool.release = releases[client.connection_pool] 619 | client.connection_pool.max_connections = 2 ** 31 620 | 621 | def test_has_key_with_no_key(self): 622 | self.assertFalse(self.cache.has_key('does_not_exist')) 623 | 624 | def test_has_key_with_key(self): 625 | self.cache.set('a', 'a') 626 | self.assertTrue(self.cache.has_key('a')) 627 | 628 | def test_ttl_set_expiry(self): 629 | self.cache.set('a', 'a', 10) 630 | ttl = self.cache.ttl('a') 631 | self.assertAlmostEqual(ttl, 10) 632 | 633 | def test_ttl_no_expiry(self): 634 | self.cache.set('a', 'a', timeout=None) 635 | ttl = self.cache.ttl('a') 636 | self.assertIsNone(ttl) 637 | 638 | def test_ttl_past_expiry(self): 639 | self.cache.set('a', 'a', timeout=1) 640 | ttl = self.cache.ttl('a') 641 | self.assertAlmostEqual(ttl, 1) 642 | 643 | time.sleep(1.1) 644 | 645 | ttl = self.cache.ttl('a') 646 | self.assertEqual(ttl, 0) 647 | 648 | def test_non_existent_key(self): 649 | """Non-existent keys are semantically the same as keys that have 650 | expired. 651 | """ 652 | ttl = self.cache.ttl('does_not_exist') 653 | self.assertEqual(ttl, 0) 654 | 655 | def test_persist_expire_to_persist(self): 656 | self.cache.set('a', 'a', timeout=10) 657 | self.cache.persist('a') 658 | self.assertIsNone(self.cache.ttl('a')) 659 | 660 | def test_touch_no_expiry_to_expire(self): 661 | self.cache.set('a', 'a', timeout=None) 662 | self.cache.touch('a', 10) 663 | ttl = self.cache.ttl('a') 664 | self.assertAlmostEqual(ttl, 10) 665 | 666 | def test_touch_less(self): 667 | self.cache.set('a', 'a', timeout=20) 668 | self.cache.touch('a', 10) 669 | ttl = self.cache.ttl('a') 670 | self.assertAlmostEqual(ttl, 10) 671 | 672 | def test_touch_more(self): 673 | self.cache.set('a', 'a', timeout=10) 674 | self.cache.touch('a', 20) 675 | ttl = self.cache.ttl('a') 676 | self.assertAlmostEqual(ttl, 20) 677 | 678 | 679 | class ConfigurationTestCase(SetupMixin, TestCase): 680 | 681 | @override_settings( 682 | CACHES={ 683 | 'default': { 684 | 'BACKEND': 'redis_cache.RedisCache', 685 | 'LOCATION': LOCATION, 686 | 'OPTIONS': { 687 | 'DB': 15, 688 | 'PASSWORD': 'yadayada', 689 | 'PARSER_CLASS': 'path.to.unknown.class', 690 | 'PICKLE_VERSION': 2, 691 | 'CONNECTION_POOL_CLASS': 'redis.ConnectionPool', 692 | 'CONNECTION_POOL_CLASS_KWARGS': { 693 | 'max_connections': 2, 694 | } 695 | }, 696 | }, 697 | } 698 | ) 699 | def test_bad_parser_import(self): 700 | with self.assertRaises(ImproperlyConfigured): 701 | caches['default'] 702 | 703 | 704 | @override_settings(CACHES={ 705 | 'default': { 706 | 'BACKEND': 'redis_cache.RedisCache', 707 | 'LOCATION': [ 708 | 'redis://:yadayada@localhost:6381/15', 709 | 'redis://:yadayada@localhost:6382/15', 710 | 'redis://:yadayada@localhost:6383/15', 711 | ], 712 | 'OPTIONS': { 713 | 'DB': 1, 714 | 'PASSWORD': 'yadayada', 715 | 'PARSER_CLASS': 'redis.connection.HiredisParser', 716 | 'PICKLE_VERSION': -1, 717 | 'MASTER_CACHE': 'redis://:yadayada@localhost:6381/15', 718 | }, 719 | }, 720 | }) 721 | class RedisUrlRegressionTests(SetupMixin, TestCase): 722 | 723 | def test_unix_path_error_using_redis_url(self): 724 | pass 725 | --------------------------------------------------------------------------------