├── tests ├── __init__.py ├── settings │ ├── __init__.py │ ├── sqlite_herd.py │ ├── sqlite_usock.py │ ├── sqlite_sharding.py │ ├── sqlite.py │ ├── sqlite_sentinel.py │ ├── sqlite_lz4.py │ ├── sqlite_gzip.py │ ├── sqlite_json.py │ ├── sqlite_zlib.py │ ├── sqlite_zstd.py │ ├── sqlite_msgpack.py │ └── sqlite_sentinel_opts.py ├── README.rst ├── test_connection_string.py ├── test_hashring.py ├── test_serializers.py ├── settings_wrapper.py ├── test_connection_factory.py ├── conftest.py ├── test_cache_options.py ├── test_client.py ├── test_backend_sorted_sets.py └── test_session.py ├── changelog.d ├── .gitkeep ├── 781.misc ├── 779.misc ├── 782.misc └── 797.feature ├── django_redis ├── compressors │ ├── __init__.py │ ├── identity.py │ ├── base.py │ ├── gzip.py │ ├── zstd.py │ ├── zlib.py │ ├── lzma.py │ └── lz4.py ├── serializers │ ├── __init__.py │ ├── base.py │ ├── msgpack.py │ ├── json.py │ └── pickle.py ├── client │ ├── mixins │ │ ├── __init__.py │ │ ├── protocols.py │ │ └── sorted_sets.py │ ├── __init__.py │ ├── sentinel.py │ ├── herd.py │ └── sharded.py ├── util.py ├── exceptions.py ├── __init__.py ├── hash_ring.py ├── pool.py └── cache.py ├── setup.py ├── .github ├── ISSUE_TEMPLATE │ ├── question.md │ ├── feature_request.md │ └── bug_report.md ├── codecov.yml ├── dependabot.yml └── workflows │ ├── release.yml │ └── ci.yml ├── MANIFEST.in ├── docker ├── sentinel.conf └── docker-compose.yml ├── CONTRIBUTING.rst ├── .gitignore ├── .pre-commit-config.yaml ├── pyproject.toml ├── RELEASING.rst ├── AUTHORS.rst ├── LICENSE ├── CODE_OF_CONDUCT.md ├── setup.cfg ├── .ruff.toml ├── CHANGELOG.rst └── README.rst /tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /changelog.d/.gitkeep: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/settings/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /django_redis/compressors/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /django_redis/serializers/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /changelog.d/781.misc: -------------------------------------------------------------------------------- 1 | Clean & organize ruff config 2 | -------------------------------------------------------------------------------- /changelog.d/779.misc: -------------------------------------------------------------------------------- 1 | Drop support for Django 5.0 (EOL) 2 | -------------------------------------------------------------------------------- /changelog.d/782.misc: -------------------------------------------------------------------------------- 1 | Replace black with ruff format 2 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | from setuptools import setup 2 | 3 | setup() 4 | -------------------------------------------------------------------------------- /changelog.d/797.feature: -------------------------------------------------------------------------------- 1 | Add sorted set operations (zadd, zrange, zrem, etc.) and mixins for RedisCache 2 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/question.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Question 3 | about: Create question to us 4 | title: '' 5 | labels: question 6 | assignees: '' 7 | 8 | --- -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include LICENSE 2 | include AUTHORS.rst 3 | include README.rst 4 | include CHANGES.txt 5 | recursive-include tests README.txt *.py 6 | recursive-include doc Makefile *.adoc *.html 7 | -------------------------------------------------------------------------------- /docker/sentinel.conf: -------------------------------------------------------------------------------- 1 | sentinel monitor default_service 127.0.0.1 6379 1 2 | sentinel down-after-milliseconds default_service 3200 3 | sentinel failover-timeout default_service 10000 4 | sentinel parallel-syncs default_service 1 -------------------------------------------------------------------------------- /django_redis/client/mixins/__init__.py: -------------------------------------------------------------------------------- 1 | from django_redis.client.mixins.protocols import ClientProtocol 2 | from django_redis.client.mixins.sorted_sets import SortedSetMixin 3 | 4 | __all__ = ["ClientProtocol", "SortedSetMixin"] 5 | -------------------------------------------------------------------------------- /tests/README.rst: -------------------------------------------------------------------------------- 1 | Running the test suite 2 | ---------------------- 3 | 4 | .. code-block:: bash 5 | 6 | # start redis and a sentinel (uses docker with image redis:latest) 7 | docker compose -f docker/docker-compose.yml up -d --wait -------------------------------------------------------------------------------- /django_redis/compressors/identity.py: -------------------------------------------------------------------------------- 1 | from django_redis.compressors.base import BaseCompressor 2 | 3 | 4 | class IdentityCompressor(BaseCompressor): 5 | def compress(self, value: bytes) -> bytes: 6 | return value 7 | 8 | def decompress(self, value: bytes) -> bytes: 9 | return value 10 | -------------------------------------------------------------------------------- /django_redis/compressors/base.py: -------------------------------------------------------------------------------- 1 | class BaseCompressor: 2 | def __init__(self, options): 3 | self._options = options 4 | 5 | def compress(self, value: bytes) -> bytes: 6 | raise NotImplementedError 7 | 8 | def decompress(self, value: bytes) -> bytes: 9 | raise NotImplementedError 10 | -------------------------------------------------------------------------------- /django_redis/client/__init__.py: -------------------------------------------------------------------------------- 1 | from django_redis.client.default import DefaultClient 2 | from django_redis.client.herd import HerdClient 3 | from django_redis.client.sentinel import SentinelClient 4 | from django_redis.client.sharded import ShardClient 5 | 6 | __all__ = ["DefaultClient", "HerdClient", "SentinelClient", "ShardClient"] 7 | -------------------------------------------------------------------------------- /django_redis/serializers/base.py: -------------------------------------------------------------------------------- 1 | from typing import Any 2 | 3 | 4 | class BaseSerializer: 5 | def __init__(self, options): 6 | pass 7 | 8 | def dumps(self, value: Any) -> bytes: 9 | raise NotImplementedError 10 | 11 | def loads(self, value: bytes) -> Any: 12 | raise NotImplementedError 13 | -------------------------------------------------------------------------------- /django_redis/util.py: -------------------------------------------------------------------------------- 1 | class CacheKey(str): 2 | """ 3 | A stub string class that we can use to check if a key was created already. 4 | """ 5 | 6 | def original_key(self) -> str: 7 | return self.rsplit(":", 1)[1] 8 | 9 | 10 | def default_reverse_key(key: str) -> str: 11 | return key.split(":", 2)[2] 12 | -------------------------------------------------------------------------------- /.github/codecov.yml: -------------------------------------------------------------------------------- 1 | coverage: 2 | # small changes should not fail coverage 3 | precision: 1 4 | round: up 5 | 6 | status: 7 | project: 8 | default: 9 | target: auto 10 | mypy: 11 | target: auto 12 | flags: 13 | - mypy 14 | 15 | tests: 16 | target: auto 17 | flags: 18 | - tests 19 | -------------------------------------------------------------------------------- /CONTRIBUTING.rst: -------------------------------------------------------------------------------- 1 | .. image:: https://jazzband.co/static/img/jazzband.svg 2 | :target: https://jazzband.co/ 3 | :alt: Jazzband 4 | 5 | This is a `Jazzband `_ project. By contributing you agree 6 | to abide by the `Contributor Code of Conduct 7 | `_ and follow the `guidelines 8 | `_. 9 | -------------------------------------------------------------------------------- /django_redis/serializers/msgpack.py: -------------------------------------------------------------------------------- 1 | from typing import Any 2 | 3 | import msgpack 4 | 5 | from django_redis.serializers.base import BaseSerializer 6 | 7 | 8 | class MSGPackSerializer(BaseSerializer): 9 | def dumps(self, value: Any) -> bytes: 10 | return msgpack.dumps(value) 11 | 12 | def loads(self, value: bytes) -> Any: 13 | return msgpack.loads(value, raw=False) 14 | -------------------------------------------------------------------------------- /django_redis/exceptions.py: -------------------------------------------------------------------------------- 1 | class ConnectionInterrupted(Exception): 2 | def __init__(self, connection, parent=None): 3 | self.connection = connection 4 | 5 | def __str__(self) -> str: 6 | error_type = type(self.__cause__).__name__ 7 | error_msg = str(self.__cause__) 8 | return f"Redis {error_type}: {error_msg}" 9 | 10 | 11 | class CompressorError(Exception): 12 | pass 13 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.py[c|o] 2 | .DS_Store 3 | *.sql 4 | *.bz2 5 | *~ 6 | *.log 7 | *.json 8 | *.wsgi 9 | local_settings.py 10 | development_settings.py 11 | *.egg-info 12 | .project 13 | .pydevproject 14 | .settings 15 | versiontools* 16 | _build* 17 | doc/index.html 18 | /build/ 19 | /dist/ 20 | *.swp 21 | \#* 22 | .\#* 23 | .tox 24 | dump.rdb 25 | .idea 26 | .venv 27 | .coverage 28 | coverage.xml 29 | cobertura.xml 30 | CLAUDE.md 31 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | repos: 2 | - repo: https://github.com/pre-commit/pre-commit-hooks 3 | rev: v6.0.0 4 | hooks: 5 | - id: check-ast 6 | - id: check-case-conflict 7 | - id: check-docstring-first 8 | - id: check-merge-conflict 9 | - id: check-symlinks 10 | - id: debug-statements 11 | 12 | - repo: https://github.com/astral-sh/ruff-pre-commit 13 | rev: v0.12.9 14 | hooks: 15 | - id: ruff-check 16 | args: [ --fix, --exit-non-zero-on-fix , --show-fixes] 17 | - id: ruff-format 18 | -------------------------------------------------------------------------------- /django_redis/serializers/json.py: -------------------------------------------------------------------------------- 1 | import json 2 | from typing import Any 3 | 4 | from django.core.serializers.json import DjangoJSONEncoder 5 | 6 | from django_redis.serializers.base import BaseSerializer 7 | 8 | 9 | class JSONSerializer(BaseSerializer): 10 | encoder_class = DjangoJSONEncoder 11 | 12 | def dumps(self, value: Any) -> bytes: 13 | return json.dumps(value, cls=self.encoder_class).encode() 14 | 15 | def loads(self, value: bytes) -> Any: 16 | return json.loads(value.decode()) 17 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | # To get started with Dependabot version updates, you'll need to specify which 2 | # package ecosystems to update and where the package manifests are located. 3 | # Please see the documentation for all configuration options: 4 | # https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates 5 | 6 | version: 2 7 | updates: 8 | - package-ecosystem: "github-actions" 9 | directory: "/" # Location of package manifests 10 | schedule: 11 | interval: "daily" -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Feature request 3 | about: Suggest an idea for this project 4 | 5 | --- 6 | 7 | **Problem Statement** 8 | A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] 9 | 10 | **Describe the solution you'd like** 11 | A clear and concise description of what you want to happen. 12 | 13 | **Describe alternatives you've considered** 14 | A clear and concise description of any alternative solutions or features you've considered. 15 | 16 | **Additional context** 17 | Add any other context or screenshots about the feature request here. 18 | -------------------------------------------------------------------------------- /django_redis/compressors/gzip.py: -------------------------------------------------------------------------------- 1 | import gzip 2 | 3 | from django_redis.compressors.base import BaseCompressor 4 | from django_redis.exceptions import CompressorError 5 | 6 | 7 | class GzipCompressor(BaseCompressor): 8 | min_length = 15 9 | 10 | def compress(self, value: bytes) -> bytes: 11 | if len(value) > self.min_length: 12 | return gzip.compress(value) 13 | return value 14 | 15 | def decompress(self, value: bytes) -> bytes: 16 | try: 17 | return gzip.decompress(value) 18 | except gzip.BadGzipFile as e: 19 | raise CompressorError from e 20 | -------------------------------------------------------------------------------- /django_redis/compressors/zstd.py: -------------------------------------------------------------------------------- 1 | import pyzstd 2 | 3 | from django_redis.compressors.base import BaseCompressor 4 | from django_redis.exceptions import CompressorError 5 | 6 | 7 | class ZStdCompressor(BaseCompressor): 8 | min_length = 15 9 | 10 | def compress(self, value: bytes) -> bytes: 11 | if len(value) > self.min_length: 12 | return pyzstd.compress(value) 13 | return value 14 | 15 | def decompress(self, value: bytes) -> bytes: 16 | try: 17 | return pyzstd.decompress(value) 18 | except pyzstd.ZstdError as e: 19 | raise CompressorError from e 20 | -------------------------------------------------------------------------------- /django_redis/compressors/zlib.py: -------------------------------------------------------------------------------- 1 | import zlib 2 | 3 | from django_redis.compressors.base import BaseCompressor 4 | from django_redis.exceptions import CompressorError 5 | 6 | 7 | class ZlibCompressor(BaseCompressor): 8 | min_length = 15 9 | preset = 6 10 | 11 | def compress(self, value: bytes) -> bytes: 12 | if len(value) > self.min_length: 13 | return zlib.compress(value, self.preset) 14 | return value 15 | 16 | def decompress(self, value: bytes) -> bytes: 17 | try: 18 | return zlib.decompress(value) 19 | except zlib.error as e: 20 | raise CompressorError from e 21 | -------------------------------------------------------------------------------- /django_redis/__init__.py: -------------------------------------------------------------------------------- 1 | VERSION = (6, 0, 0) 2 | __version__ = ".".join(map(str, VERSION)) 3 | 4 | 5 | def get_redis_connection(alias="default", write=True): 6 | """ 7 | Helper used for obtaining a raw redis client. 8 | """ 9 | 10 | from django.core.cache import caches 11 | 12 | cache = caches[alias] 13 | 14 | error_message = "This backend does not support this feature" 15 | if not hasattr(cache, "client"): 16 | raise NotImplementedError(error_message) 17 | 18 | if not hasattr(cache.client, "get_client"): 19 | raise NotImplementedError(error_message) 20 | 21 | return cache.client.get_client(write) 22 | -------------------------------------------------------------------------------- /django_redis/compressors/lzma.py: -------------------------------------------------------------------------------- 1 | import lzma 2 | 3 | from django_redis.compressors.base import BaseCompressor 4 | from django_redis.exceptions import CompressorError 5 | 6 | 7 | class LzmaCompressor(BaseCompressor): 8 | min_length = 100 9 | preset = 4 10 | 11 | def compress(self, value: bytes) -> bytes: 12 | if len(value) > self.min_length: 13 | return lzma.compress(value, preset=self.preset) 14 | return value 15 | 16 | def decompress(self, value: bytes) -> bytes: 17 | try: 18 | return lzma.decompress(value) 19 | except lzma.LZMAError as e: 20 | raise CompressorError from e 21 | -------------------------------------------------------------------------------- /django_redis/compressors/lz4.py: -------------------------------------------------------------------------------- 1 | from lz4.frame import compress as _compress 2 | from lz4.frame import decompress as _decompress 3 | 4 | from django_redis.compressors.base import BaseCompressor 5 | from django_redis.exceptions import CompressorError 6 | 7 | 8 | class Lz4Compressor(BaseCompressor): 9 | min_length = 15 10 | 11 | def compress(self, value: bytes) -> bytes: 12 | if len(value) > self.min_length: 13 | return _compress(value) 14 | return value 15 | 16 | def decompress(self, value: bytes) -> bytes: 17 | try: 18 | return _decompress(value) 19 | except Exception as e: 20 | raise CompressorError from e 21 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.towncrier] 2 | directory = "changelog.d" 3 | filename = "CHANGELOG.rst" 4 | issue_format = "`#{issue} `_" 5 | name = "django-redis" 6 | package = "django_redis" 7 | type = [ 8 | { name = "Features", directory = "feature", showcontent = true }, 9 | { name = "Bug Fixes", directory = "bugfix", showcontent = true }, 10 | { name = "Miscellaneous", directory = "misc", showcontent = true }, 11 | { name = "Documentation", directory = "doc", showcontent = true }, 12 | { name = "Deprecations and Removals", directory = "removal", showcontent = true }, 13 | ] 14 | -------------------------------------------------------------------------------- /tests/test_connection_string.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from django_redis import pool 4 | 5 | 6 | @pytest.mark.parametrize( 7 | "connection_string", 8 | [ 9 | "unix://tmp/foo.bar?db=1", 10 | "redis://localhost/2", 11 | "redis://redis-master/0?is_master=0", 12 | "redis://redis-master/2?is_master=False", 13 | "rediss://localhost:3333?db=2", 14 | ], 15 | ) 16 | def test_connection_strings(connection_string: str): 17 | cf = pool.get_connection_factory( 18 | path="django_redis.pool.ConnectionFactory", 19 | options={}, 20 | ) 21 | res = cf.make_connection_params(connection_string) 22 | assert res["url"] == connection_string 23 | -------------------------------------------------------------------------------- /RELEASING.rst: -------------------------------------------------------------------------------- 1 | Preparing a Release 2 | =================== 3 | 4 | The following steps are needed to prepare a release: 5 | 6 | 1. Make sure the VERSION in ``django_redis/__init__.py`` has been updated. 7 | 2. Run ``towncrier build`` to update the ``CHANGELOG.rst`` with the 8 | news fragments for the release. 9 | 3. Commit the changes for steps 1 and 2. 10 | 4. Tag the commit with the same version as specified for VERSION in step 1. 11 | 5. Wait for the `release action`_ to complete, which will upload the package 12 | to `django-redis jazzband`_, and when it's complete you can then release 13 | the package to PyPI. 14 | 15 | .. _release action: https://github.com/jazzband/django-redis/actions/workflows/release.yml 16 | .. _django-redis jazzband: https://jazzband.co/projects/django-redis 17 | -------------------------------------------------------------------------------- /docker/docker-compose.yml: -------------------------------------------------------------------------------- 1 | services: 2 | 3 | redis: 4 | image: redis:latest 5 | container_name: redis-primary 6 | command: redis-server --enable-debug-command yes --protected-mode no 7 | ports: 8 | - 6379:6379 9 | healthcheck: 10 | test: redis-cli ping 11 | interval: 5s 12 | timeout: 5s 13 | retries: 5 14 | 15 | sentinel: 16 | image: redis:latest 17 | container_name: redis-sentinel 18 | depends_on: 19 | redis: 20 | condition: service_healthy 21 | entrypoint: "redis-sentinel /redis.conf --port 26379" 22 | ports: 23 | - 26379:26379 24 | volumes: 25 | - "./sentinel.conf:/redis.conf" 26 | healthcheck: 27 | test: redis-cli -p 26379 ping 28 | interval: 5s 29 | timeout: 5s 30 | retries: 5 -------------------------------------------------------------------------------- /tests/test_hashring.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from django_redis.hash_ring import HashRing 4 | 5 | 6 | class Node: 7 | def __init__(self, identifier): 8 | self.identifier = identifier 9 | 10 | def __str__(self): 11 | return f"node:{self.identifier}" 12 | 13 | def __repr__(self): 14 | return f"" 15 | 16 | 17 | @pytest.fixture 18 | def hash_ring(): 19 | return HashRing([Node(i) for i in range(3)]) 20 | 21 | 22 | def test_hashring(hash_ring): 23 | ids = [] 24 | 25 | for key in [f"test{x}" for x in range(10)]: 26 | node = hash_ring.get_node(key) 27 | ids.append(node.identifier) 28 | 29 | assert ids == [0, 2, 1, 2, 2, 2, 2, 0, 1, 1] 30 | 31 | 32 | def test_hashring_brute_force(hash_ring): 33 | for key in (f"test{x}" for x in range(10000)): 34 | assert hash_ring.get_node(key) 35 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug_report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug report 3 | about: Create a report to help us improve 4 | labels: bug 5 | 6 | --- 7 | 8 | **Describe the bug** 9 | A clear and concise description of what the bug is. 10 | 11 | **To Reproduce** 12 | Steps to reproduce the behavior: 13 | 1. Go to '...' 14 | 2. Click on '....' 15 | 3. Scroll down to '....' 16 | 4. See error 17 | 18 | **Expected behavior** 19 | A clear and concise description of what you expected to happen. 20 | 21 | **Stack trace** 22 | If applicable, add stack trace to help explain your problem. 23 | 24 | **Environment (please complete the following information):** 25 | - Python version: [e.g. 3.5] 26 | - Django Redis Version: [e.g. 1.9.1] 27 | - Django Version: [e.g. 1.11.11] 28 | - Redis Version: [e.g. 2.5.0] 29 | - redis-py Version: [e.g. 3.5.0] 30 | 31 | **Additional context** 32 | Add any other context about the problem here. 33 | -------------------------------------------------------------------------------- /AUTHORS.rst: -------------------------------------------------------------------------------- 1 | Andrei Antoukh / niwibe 2 | Sean Bleier 3 | Matt Dennewitz 4 | Jannis Leidel 5 | S. Angel / Twidi 6 | Noah Kantrowitz / coderanger 7 | Martin Mahner / bartTC 8 | Timothée Peignier / cyberdelia 9 | Lior Sion / liorsion 10 | Ales Zoulek / aleszoulek 11 | James Aylett / jaylett 12 | Todd Boland / boland 13 | David Zderic / dzderic 14 | Kirill Zaitsev / teferi 15 | Jon Dufresne 16 | Anès Foufa 17 | Segyo Myung -------------------------------------------------------------------------------- /django_redis/client/mixins/protocols.py: -------------------------------------------------------------------------------- 1 | from typing import Any, Optional, Protocol, Union 2 | 3 | from redis import Redis 4 | from redis.typing import KeyT 5 | 6 | 7 | class ClientProtocol(Protocol): 8 | """ 9 | Protocol for client methods required by mixins. 10 | 11 | Any class using django-redis mixins must implement these methods. 12 | """ 13 | 14 | def make_key( 15 | self, 16 | key: KeyT, 17 | version: Optional[int] = None, 18 | prefix: Optional[str] = None, 19 | ) -> KeyT: 20 | """Create a cache key with optional version and prefix.""" 21 | ... 22 | 23 | def encode(self, value: Any) -> Union[bytes, int]: 24 | """Encode a value for storage in Redis.""" 25 | ... 26 | 27 | def decode(self, value: Union[bytes, int]) -> Any: 28 | """Decode a value retrieved from Redis.""" 29 | ... 30 | 31 | def get_client(self, write: bool = False) -> Redis: 32 | """Get a Redis client instance for read or write operations.""" 33 | ... 34 | -------------------------------------------------------------------------------- /tests/test_serializers.py: -------------------------------------------------------------------------------- 1 | import pickle 2 | 3 | import pytest 4 | from django.core.exceptions import ImproperlyConfigured 5 | 6 | from django_redis.serializers.pickle import PickleSerializer 7 | 8 | 9 | class TestPickleSerializer: 10 | def test_invalid_pickle_version_provided(self): 11 | with pytest.raises( 12 | ImproperlyConfigured, 13 | match="PICKLE_VERSION value must be an integer", 14 | ): 15 | PickleSerializer({"PICKLE_VERSION": "not-an-integer"}) 16 | 17 | def test_setup_pickle_version_not_explicitly_specified(self): 18 | serializer = PickleSerializer({}) 19 | assert serializer._pickle_version == pickle.DEFAULT_PROTOCOL 20 | 21 | def test_setup_pickle_version_too_high(self): 22 | with pytest.raises( 23 | ImproperlyConfigured, 24 | match=f"PICKLE_VERSION can't be higher than pickle.HIGHEST_PROTOCOL:" 25 | f" {pickle.HIGHEST_PROTOCOL}", 26 | ): 27 | PickleSerializer({"PICKLE_VERSION": pickle.HIGHEST_PROTOCOL + 1}) 28 | -------------------------------------------------------------------------------- /tests/settings/sqlite_herd.py: -------------------------------------------------------------------------------- 1 | SECRET_KEY = "django_tests_secret_key" 2 | 3 | CACHES = { 4 | "default": { 5 | "BACKEND": "django_redis.cache.RedisCache", 6 | "LOCATION": ["redis://127.0.0.1:6379?db=3"], 7 | "OPTIONS": {"CLIENT_CLASS": "django_redis.client.HerdClient"}, 8 | }, 9 | "doesnotexist": { 10 | "BACKEND": "django_redis.cache.RedisCache", 11 | "LOCATION": "redis://127.0.0.1:56379?db=3", 12 | "OPTIONS": {"CLIENT_CLASS": "django_redis.client.HerdClient"}, 13 | }, 14 | "sample": { 15 | "BACKEND": "django_redis.cache.RedisCache", 16 | "LOCATION": "redis://127.0.0.1:6379?db=3,redis://127.0.0.1:6379?db=3", 17 | "OPTIONS": {"CLIENT_CLASS": "django_redis.client.HerdClient"}, 18 | }, 19 | "with_prefix": { 20 | "BACKEND": "django_redis.cache.RedisCache", 21 | "LOCATION": "redis://127.0.0.1:6379?db=3", 22 | "OPTIONS": {"CLIENT_CLASS": "django_redis.client.HerdClient"}, 23 | "KEY_PREFIX": "test-prefix", 24 | }, 25 | } 26 | 27 | INSTALLED_APPS = ["django.contrib.sessions"] 28 | 29 | USE_TZ = False 30 | 31 | CACHE_HERD_TIMEOUT = 2 32 | -------------------------------------------------------------------------------- /tests/settings/sqlite_usock.py: -------------------------------------------------------------------------------- 1 | SECRET_KEY = "django_tests_secret_key" 2 | 3 | CACHES = { 4 | "default": { 5 | "BACKEND": "django_redis.cache.RedisCache", 6 | "LOCATION": ["unix:///tmp/redis.sock?db=11", "unix:///tmp/redis.sock?db=11"], 7 | "OPTIONS": {"CLIENT_CLASS": "django_redis.client.DefaultClient"}, 8 | }, 9 | "doesnotexist": { 10 | "BACKEND": "django_redis.cache.RedisCache", 11 | "LOCATION": "redis://127.0.0.1:56379?db=11", 12 | "OPTIONS": {"CLIENT_CLASS": "django_redis.client.DefaultClient"}, 13 | }, 14 | "sample": { 15 | "BACKEND": "django_redis.cache.RedisCache", 16 | "LOCATION": "redis://127.0.0.1:6379?db=11,redis://127.0.0.1:6379?db=11", 17 | "OPTIONS": {"CLIENT_CLASS": "django_redis.client.DefaultClient"}, 18 | }, 19 | "with_prefix": { 20 | "BACKEND": "django_redis.cache.RedisCache", 21 | "LOCATION": "redis://127.0.0.1:6379?db=11", 22 | "OPTIONS": {"CLIENT_CLASS": "django_redis.client.DefaultClient"}, 23 | "KEY_PREFIX": "test-prefix", 24 | }, 25 | } 26 | 27 | INSTALLED_APPS = ["django.contrib.sessions"] 28 | 29 | USE_TZ = False 30 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | name: Release 2 | 3 | on: 4 | push: 5 | tags: 6 | - '*' 7 | 8 | jobs: 9 | build: 10 | if: github.repository == 'jazzband/django-redis' 11 | runs-on: ubuntu-latest 12 | 13 | steps: 14 | - uses: actions/checkout@v5 15 | with: 16 | fetch-depth: 0 17 | 18 | - name: Set up Python 19 | uses: actions/setup-python@v6 20 | with: 21 | python-version: 3.9 22 | 23 | - name: Install dependencies 24 | run: | 25 | python -m pip install -U pip 26 | python -m pip install -U setuptools twine wheel 27 | 28 | - name: Build package 29 | run: | 30 | python setup.py --version 31 | python setup.py sdist --format=gztar bdist_wheel 32 | twine check dist/* 33 | 34 | - name: Upload packages to Jazzband 35 | if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags') 36 | uses: pypa/gh-action-pypi-publish@master 37 | with: 38 | user: jazzband 39 | password: ${{ secrets.JAZZBAND_RELEASE_KEY }} 40 | repository_url: https://jazzband.co/projects/django-redis/upload 41 | -------------------------------------------------------------------------------- /tests/settings/sqlite_sharding.py: -------------------------------------------------------------------------------- 1 | SECRET_KEY = "django_tests_secret_key" 2 | 3 | CACHES = { 4 | "default": { 5 | "BACKEND": "django_redis.cache.RedisCache", 6 | "LOCATION": ["redis://127.0.0.1:6379?db=9", "redis://127.0.0.1:6379?db=10"], 7 | "OPTIONS": {"CLIENT_CLASS": "django_redis.client.ShardClient"}, 8 | }, 9 | "doesnotexist": { 10 | "BACKEND": "django_redis.cache.RedisCache", 11 | "LOCATION": ["redis://127.0.0.1:56379?db=9", "redis://127.0.0.1:56379?db=10"], 12 | "OPTIONS": {"CLIENT_CLASS": "django_redis.client.ShardClient"}, 13 | }, 14 | "sample": { 15 | "BACKEND": "django_redis.cache.RedisCache", 16 | "LOCATION": "redis://127.0.0.1:6379?db=9,redis://127.0.0.1:6379?db=9", 17 | "OPTIONS": {"CLIENT_CLASS": "django_redis.client.ShardClient"}, 18 | }, 19 | "with_prefix": { 20 | "BACKEND": "django_redis.cache.RedisCache", 21 | "LOCATION": "redis://127.0.0.1:6379?db=9", 22 | "OPTIONS": {"CLIENT_CLASS": "django_redis.client.ShardClient"}, 23 | "KEY_PREFIX": "test-prefix", 24 | }, 25 | } 26 | 27 | INSTALLED_APPS = ["django.contrib.sessions"] 28 | 29 | USE_TZ = False 30 | -------------------------------------------------------------------------------- /tests/settings_wrapper.py: -------------------------------------------------------------------------------- 1 | from typing import TYPE_CHECKING 2 | 3 | if TYPE_CHECKING: 4 | from django.test import override_settings 5 | 6 | 7 | class SettingsWrapper: 8 | def __init__(self) -> None: 9 | self._to_restore: list[override_settings] 10 | object.__setattr__(self, "_to_restore", []) 11 | 12 | def __delattr__(self, attr: str) -> None: 13 | from django.test import override_settings 14 | 15 | override = override_settings() 16 | override.enable() 17 | from django.conf import settings 18 | 19 | delattr(settings, attr) 20 | 21 | self._to_restore.append(override) 22 | 23 | def __setattr__(self, attr: str, value) -> None: 24 | from django.test import override_settings 25 | 26 | override = override_settings(**{attr: value}) 27 | override.enable() 28 | self._to_restore.append(override) 29 | 30 | def __getattr__(self, attr: str): 31 | from django.conf import settings 32 | 33 | return getattr(settings, attr) 34 | 35 | def finalize(self) -> None: 36 | for override in reversed(self._to_restore): 37 | override.disable() 38 | 39 | del self._to_restore[:] 40 | -------------------------------------------------------------------------------- /django_redis/serializers/pickle.py: -------------------------------------------------------------------------------- 1 | import pickle 2 | from typing import Any 3 | 4 | from django.core.exceptions import ImproperlyConfigured 5 | 6 | from django_redis.serializers.base import BaseSerializer 7 | 8 | 9 | class PickleSerializer(BaseSerializer): 10 | def __init__(self, options) -> None: 11 | self._pickle_version = pickle.DEFAULT_PROTOCOL 12 | self.setup_pickle_version(options) 13 | 14 | super().__init__(options=options) 15 | 16 | def setup_pickle_version(self, options) -> None: 17 | if "PICKLE_VERSION" in options: 18 | try: 19 | self._pickle_version = int(options["PICKLE_VERSION"]) 20 | if self._pickle_version > pickle.HIGHEST_PROTOCOL: 21 | error_message = ( 22 | f"PICKLE_VERSION can't be higher than pickle.HIGHEST_PROTOCOL:" 23 | f" {pickle.HIGHEST_PROTOCOL}" 24 | ) 25 | raise ImproperlyConfigured(error_message) 26 | except (ValueError, TypeError) as e: 27 | error_message = "PICKLE_VERSION value must be an integer" 28 | raise ImproperlyConfigured(error_message) from e 29 | 30 | def dumps(self, value: Any) -> bytes: 31 | return pickle.dumps(value, self._pickle_version) 32 | 33 | def loads(self, value: bytes) -> Any: 34 | return pickle.loads(value) 35 | -------------------------------------------------------------------------------- /tests/settings/sqlite.py: -------------------------------------------------------------------------------- 1 | SECRET_KEY = "django_tests_secret_key" 2 | CACHES = { 3 | "default": { 4 | "BACKEND": "django_redis.cache.RedisCache", 5 | "LOCATION": ["redis://127.0.0.1:6379?db=1", "redis://127.0.0.1:6379?db=1"], 6 | "OPTIONS": {"CLIENT_CLASS": "django_redis.client.DefaultClient"}, 7 | }, 8 | "doesnotexist": { 9 | "BACKEND": "django_redis.cache.RedisCache", 10 | "LOCATION": "redis://127.0.0.1:56379?db=1", 11 | "OPTIONS": {"CLIENT_CLASS": "django_redis.client.DefaultClient"}, 12 | }, 13 | "sample": { 14 | "BACKEND": "django_redis.cache.RedisCache", 15 | "LOCATION": "redis://127.0.0.1:6379:1,redis://127.0.0.1:6379:1", 16 | "OPTIONS": {"CLIENT_CLASS": "django_redis.client.DefaultClient"}, 17 | }, 18 | "with_prefix": { 19 | "BACKEND": "django_redis.cache.RedisCache", 20 | "LOCATION": "redis://127.0.0.1:6379?db=1", 21 | "OPTIONS": {"CLIENT_CLASS": "django_redis.client.DefaultClient"}, 22 | "KEY_PREFIX": "test-prefix", 23 | }, 24 | } 25 | 26 | # Include `django.contrib.auth` and `django.contrib.contenttypes` for mypy / 27 | # django-stubs. 28 | 29 | # See: 30 | # - https://github.com/typeddjango/django-stubs/issues/318 31 | # - https://github.com/typeddjango/django-stubs/issues/534 32 | INSTALLED_APPS = [ 33 | "django.contrib.auth", 34 | "django.contrib.contenttypes", 35 | "django.contrib.sessions", 36 | ] 37 | 38 | USE_TZ = False 39 | -------------------------------------------------------------------------------- /tests/settings/sqlite_sentinel.py: -------------------------------------------------------------------------------- 1 | SECRET_KEY = "django_tests_secret_key" 2 | 3 | DJANGO_REDIS_CONNECTION_FACTORY = "django_redis.pool.SentinelConnectionFactory" 4 | 5 | SENTINELS = [("127.0.0.1", 26379)] 6 | 7 | CACHES = { 8 | "default": { 9 | "BACKEND": "django_redis.cache.RedisCache", 10 | "LOCATION": ["redis://127.0.0.1?db=7"], 11 | "OPTIONS": { 12 | "CLIENT_CLASS": "django_redis.client.DefaultClient", 13 | "SENTINELS": SENTINELS, 14 | }, 15 | }, 16 | "doesnotexist": { 17 | "BACKEND": "django_redis.cache.RedisCache", 18 | "LOCATION": "redis://missing_service?db=7", 19 | "OPTIONS": { 20 | "CLIENT_CLASS": "django_redis.client.DefaultClient", 21 | "SENTINELS": SENTINELS, 22 | }, 23 | }, 24 | "sample": { 25 | "BACKEND": "django_redis.cache.RedisCache", 26 | "LOCATION": "redis://127.0.0.1?db=7", 27 | "OPTIONS": { 28 | "CLIENT_CLASS": "django_redis.client.SentinelClient", 29 | "SENTINELS": SENTINELS, 30 | }, 31 | }, 32 | "with_prefix": { 33 | "BACKEND": "django_redis.cache.RedisCache", 34 | "LOCATION": "redis://127.0.0.1?db=7", 35 | "KEY_PREFIX": "test-prefix", 36 | "OPTIONS": { 37 | "CLIENT_CLASS": "django_redis.client.DefaultClient", 38 | "SENTINELS": SENTINELS, 39 | }, 40 | }, 41 | } 42 | 43 | INSTALLED_APPS = ["django.contrib.sessions"] 44 | 45 | USE_TZ = False 46 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2011-2016 Andrey Antukh 2 | Copyright (c) 2011 Sean Bleier 3 | 4 | All rights reserved. 5 | 6 | Redistribution and use in source and binary forms, with or without 7 | modification, are permitted provided that the following conditions 8 | are met: 9 | 1. Redistributions of source code must retain the above copyright 10 | notice, this list of conditions and the following disclaimer. 11 | 2. Redistributions in binary form must reproduce the above copyright 12 | notice, this list of conditions and the following disclaimer in the 13 | documentation and/or other materials provided with the distribution. 14 | 3. The name of the author may not be used to endorse or promote products 15 | derived from this software without specific prior written permission. 16 | 17 | THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS`` AND ANY EXPRESS OR 18 | IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES 19 | OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. 20 | IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, 21 | INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT 22 | NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 23 | DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 24 | THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 25 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF 26 | THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 27 | -------------------------------------------------------------------------------- /tests/settings/sqlite_lz4.py: -------------------------------------------------------------------------------- 1 | SECRET_KEY = "django_tests_secret_key" 2 | 3 | CACHES = { 4 | "default": { 5 | "BACKEND": "django_redis.cache.RedisCache", 6 | "LOCATION": ["redis://127.0.0.1:6379?db=5", "redis://127.0.0.1:6379?db=5"], 7 | "OPTIONS": { 8 | "CLIENT_CLASS": "django_redis.client.DefaultClient", 9 | "COMPRESSOR": "django_redis.compressors.lz4.Lz4Compressor", 10 | }, 11 | }, 12 | "doesnotexist": { 13 | "BACKEND": "django_redis.cache.RedisCache", 14 | "LOCATION": "redis://127.0.0.1:56379?db=5", 15 | "OPTIONS": { 16 | "CLIENT_CLASS": "django_redis.client.DefaultClient", 17 | "COMPRESSOR": "django_redis.compressors.lz4.Lz4Compressor", 18 | }, 19 | }, 20 | "sample": { 21 | "BACKEND": "django_redis.cache.RedisCache", 22 | "LOCATION": "127.0.0.1:6379?db=5,127.0.0.1:6379?db=5", 23 | "OPTIONS": { 24 | "CLIENT_CLASS": "django_redis.client.DefaultClient", 25 | "COMPRESSOR": "django_redis.compressors.lz4.Lz4Compressor", 26 | }, 27 | }, 28 | "with_prefix": { 29 | "BACKEND": "django_redis.cache.RedisCache", 30 | "LOCATION": "redis://127.0.0.1:6379?db=5", 31 | "OPTIONS": { 32 | "CLIENT_CLASS": "django_redis.client.DefaultClient", 33 | "COMPRESSOR": "django_redis.compressors.lz4.Lz4Compressor", 34 | }, 35 | "KEY_PREFIX": "test-prefix", 36 | }, 37 | } 38 | 39 | INSTALLED_APPS = ["django.contrib.sessions"] 40 | 41 | USE_TZ = False 42 | -------------------------------------------------------------------------------- /tests/settings/sqlite_gzip.py: -------------------------------------------------------------------------------- 1 | SECRET_KEY = "django_tests_secret_key" 2 | 3 | CACHES = { 4 | "default": { 5 | "BACKEND": "django_redis.cache.RedisCache", 6 | "LOCATION": ["redis://127.0.0.1:6379?db=2", "redis://127.0.0.1:6379?db=2"], 7 | "OPTIONS": { 8 | "CLIENT_CLASS": "django_redis.client.DefaultClient", 9 | "COMPRESSOR": "django_redis.compressors.gzip.GzipCompressor", 10 | }, 11 | }, 12 | "doesnotexist": { 13 | "BACKEND": "django_redis.cache.RedisCache", 14 | "LOCATION": "redis://127.0.0.1:56379?db=2", 15 | "OPTIONS": { 16 | "CLIENT_CLASS": "django_redis.client.DefaultClient", 17 | "COMPRESSOR": "django_redis.compressors.gzip.GzipCompressor", 18 | }, 19 | }, 20 | "sample": { 21 | "BACKEND": "django_redis.cache.RedisCache", 22 | "LOCATION": "redis://127.0.0.1:6379?db=2,redis://127.0.0.1:6379?db=2", 23 | "OPTIONS": { 24 | "CLIENT_CLASS": "django_redis.client.DefaultClient", 25 | "COMPRESSOR": "django_redis.compressors.gzip.GzipCompressor", 26 | }, 27 | }, 28 | "with_prefix": { 29 | "BACKEND": "django_redis.cache.RedisCache", 30 | "LOCATION": "redis://127.0.0.1:6379?db=2", 31 | "OPTIONS": { 32 | "CLIENT_CLASS": "django_redis.client.DefaultClient", 33 | "COMPRESSOR": "django_redis.compressors.gzip.GzipCompressor", 34 | }, 35 | "KEY_PREFIX": "test-prefix", 36 | }, 37 | } 38 | 39 | INSTALLED_APPS = ["django.contrib.sessions"] 40 | 41 | USE_TZ = False 42 | -------------------------------------------------------------------------------- /tests/settings/sqlite_json.py: -------------------------------------------------------------------------------- 1 | SECRET_KEY = "django_tests_secret_key" 2 | 3 | CACHES = { 4 | "default": { 5 | "BACKEND": "django_redis.cache.RedisCache", 6 | "LOCATION": ["redis://127.0.0.1:6379?db=4", "redis://127.0.0.1:6379?db=4"], 7 | "OPTIONS": { 8 | "CLIENT_CLASS": "django_redis.client.DefaultClient", 9 | "SERIALIZER": "django_redis.serializers.json.JSONSerializer", 10 | }, 11 | }, 12 | "doesnotexist": { 13 | "BACKEND": "django_redis.cache.RedisCache", 14 | "LOCATION": "redis://127.0.0.1:56379?db=4", 15 | "OPTIONS": { 16 | "CLIENT_CLASS": "django_redis.client.DefaultClient", 17 | "SERIALIZER": "django_redis.serializers.json.JSONSerializer", 18 | }, 19 | }, 20 | "sample": { 21 | "BACKEND": "django_redis.cache.RedisCache", 22 | "LOCATION": "redis://127.0.0.1:6379?db=4,redis://127.0.0.1:6379?db=4", 23 | "OPTIONS": { 24 | "CLIENT_CLASS": "django_redis.client.DefaultClient", 25 | "SERIALIZER": "django_redis.serializers.json.JSONSerializer", 26 | }, 27 | }, 28 | "with_prefix": { 29 | "BACKEND": "django_redis.cache.RedisCache", 30 | "LOCATION": "redis://127.0.0.1:6379?db=4", 31 | "OPTIONS": { 32 | "CLIENT_CLASS": "django_redis.client.DefaultClient", 33 | "SERIALIZER": "django_redis.serializers.json.JSONSerializer", 34 | }, 35 | "KEY_PREFIX": "test-prefix", 36 | }, 37 | } 38 | 39 | INSTALLED_APPS = ["django.contrib.sessions"] 40 | 41 | USE_TZ = False 42 | -------------------------------------------------------------------------------- /tests/settings/sqlite_zlib.py: -------------------------------------------------------------------------------- 1 | SECRET_KEY = "django_tests_secret_key" 2 | 3 | CACHES = { 4 | "default": { 5 | "BACKEND": "django_redis.cache.RedisCache", 6 | "LOCATION": ["redis://127.0.0.1:6379?db=12", "redis://127.0.0.1:6379?db=12"], 7 | "OPTIONS": { 8 | "CLIENT_CLASS": "django_redis.client.DefaultClient", 9 | "COMPRESSOR": "django_redis.compressors.zlib.ZlibCompressor", 10 | }, 11 | }, 12 | "doesnotexist": { 13 | "BACKEND": "django_redis.cache.RedisCache", 14 | "LOCATION": "redis://127.0.0.1:56379?db=12", 15 | "OPTIONS": { 16 | "CLIENT_CLASS": "django_redis.client.DefaultClient", 17 | "COMPRESSOR": "django_redis.compressors.zlib.ZlibCompressor", 18 | }, 19 | }, 20 | "sample": { 21 | "BACKEND": "django_redis.cache.RedisCache", 22 | "LOCATION": "redis://127.0.0.1:6379?db=12,redis://127.0.0.1:6379?db=12", 23 | "OPTIONS": { 24 | "CLIENT_CLASS": "django_redis.client.DefaultClient", 25 | "COMPRESSOR": "django_redis.compressors.zlib.ZlibCompressor", 26 | }, 27 | }, 28 | "with_prefix": { 29 | "BACKEND": "django_redis.cache.RedisCache", 30 | "LOCATION": "redis://127.0.0.1:6379?db=12", 31 | "OPTIONS": { 32 | "CLIENT_CLASS": "django_redis.client.DefaultClient", 33 | "COMPRESSOR": "django_redis.compressors.zlib.ZlibCompressor", 34 | }, 35 | "KEY_PREFIX": "test-prefix", 36 | }, 37 | } 38 | 39 | INSTALLED_APPS = ["django.contrib.sessions"] 40 | 41 | USE_TZ = False 42 | -------------------------------------------------------------------------------- /tests/settings/sqlite_zstd.py: -------------------------------------------------------------------------------- 1 | SECRET_KEY = "django_tests_secret_key" 2 | 3 | CACHES = { 4 | "default": { 5 | "BACKEND": "django_redis.cache.RedisCache", 6 | "LOCATION": ["redis://127.0.0.1:6379?db=13", "redis://127.0.0.1:6379?db=13"], 7 | "OPTIONS": { 8 | "CLIENT_CLASS": "django_redis.client.DefaultClient", 9 | "COMPRESSOR": "django_redis.compressors.zstd.ZStdCompressor", 10 | }, 11 | }, 12 | "doesnotexist": { 13 | "BACKEND": "django_redis.cache.RedisCache", 14 | "LOCATION": "redis://127.0.0.1:56379?db=13", 15 | "OPTIONS": { 16 | "CLIENT_CLASS": "django_redis.client.DefaultClient", 17 | "COMPRESSOR": "django_redis.compressors.zstd.ZStdCompressor", 18 | }, 19 | }, 20 | "sample": { 21 | "BACKEND": "django_redis.cache.RedisCache", 22 | "LOCATION": "redis://127.0.0.1:6379?db=13,redis://127.0.0.1:6379?db=13", 23 | "OPTIONS": { 24 | "CLIENT_CLASS": "django_redis.client.DefaultClient", 25 | "COMPRESSOR": "django_redis.compressors.zstd.ZStdCompressor", 26 | }, 27 | }, 28 | "with_prefix": { 29 | "BACKEND": "django_redis.cache.RedisCache", 30 | "LOCATION": "redis://127.0.0.1:6379?db=13", 31 | "OPTIONS": { 32 | "CLIENT_CLASS": "django_redis.client.DefaultClient", 33 | "COMPRESSOR": "django_redis.compressors.zstd.ZStdCompressor", 34 | }, 35 | "KEY_PREFIX": "test-prefix", 36 | }, 37 | } 38 | 39 | INSTALLED_APPS = ["django.contrib.sessions"] 40 | 41 | USE_TZ = False 42 | -------------------------------------------------------------------------------- /tests/settings/sqlite_msgpack.py: -------------------------------------------------------------------------------- 1 | SECRET_KEY = "django_tests_secret_key" 2 | 3 | CACHES = { 4 | "default": { 5 | "BACKEND": "django_redis.cache.RedisCache", 6 | "LOCATION": ["redis://127.0.0.1:6379?db=6", "redis://127.0.0.1:6379?db=6"], 7 | "OPTIONS": { 8 | "CLIENT_CLASS": "django_redis.client.DefaultClient", 9 | "SERIALIZER": "django_redis.serializers.msgpack.MSGPackSerializer", 10 | }, 11 | }, 12 | "doesnotexist": { 13 | "BACKEND": "django_redis.cache.RedisCache", 14 | "LOCATION": "redis://127.0.0.1:56379?db=6", 15 | "OPTIONS": { 16 | "CLIENT_CLASS": "django_redis.client.DefaultClient", 17 | "SERIALIZER": "django_redis.serializers.msgpack.MSGPackSerializer", 18 | }, 19 | }, 20 | "sample": { 21 | "BACKEND": "django_redis.cache.RedisCache", 22 | "LOCATION": "redis://127.0.0.1:6379?db=6,redis://127.0.0.1:6379?db=6", 23 | "OPTIONS": { 24 | "CLIENT_CLASS": "django_redis.client.DefaultClient", 25 | "SERIALIZER": "django_redis.serializers.msgpack.MSGPackSerializer", 26 | }, 27 | }, 28 | "with_prefix": { 29 | "BACKEND": "django_redis.cache.RedisCache", 30 | "LOCATION": "redis://127.0.0.1:6379?db=6", 31 | "OPTIONS": { 32 | "CLIENT_CLASS": "django_redis.client.DefaultClient", 33 | "SERIALIZER": "django_redis.serializers.msgpack.MSGPackSerializer", 34 | }, 35 | "KEY_PREFIX": "test-prefix", 36 | }, 37 | } 38 | 39 | INSTALLED_APPS = ["django.contrib.sessions"] 40 | 41 | USE_TZ = False 42 | -------------------------------------------------------------------------------- /tests/settings/sqlite_sentinel_opts.py: -------------------------------------------------------------------------------- 1 | SECRET_KEY = "django_tests_secret_key" 2 | 3 | SENTINELS = [("127.0.0.1", 26379)] 4 | 5 | conn_factory = "django_redis.pool.SentinelConnectionFactory" 6 | 7 | CACHES = { 8 | "default": { 9 | "BACKEND": "django_redis.cache.RedisCache", 10 | "LOCATION": ["redis://default_service?db=8"], 11 | "OPTIONS": { 12 | "CLIENT_CLASS": "django_redis.client.DefaultClient", 13 | "SENTINELS": SENTINELS, 14 | "CONNECTION_FACTORY": conn_factory, 15 | }, 16 | }, 17 | "doesnotexist": { 18 | "BACKEND": "django_redis.cache.RedisCache", 19 | "LOCATION": "redis://missing_service?db=8", 20 | "OPTIONS": { 21 | "CLIENT_CLASS": "django_redis.client.DefaultClient", 22 | "SENTINELS": SENTINELS, 23 | "CONNECTION_FACTORY": conn_factory, 24 | }, 25 | }, 26 | "sample": { 27 | "BACKEND": "django_redis.cache.RedisCache", 28 | "LOCATION": "redis://default_service?db=8", 29 | "OPTIONS": { 30 | "CLIENT_CLASS": "django_redis.client.SentinelClient", 31 | "SENTINELS": SENTINELS, 32 | "CONNECTION_FACTORY": conn_factory, 33 | }, 34 | }, 35 | "with_prefix": { 36 | "BACKEND": "django_redis.cache.RedisCache", 37 | "LOCATION": "redis://default_service?db=8", 38 | "KEY_PREFIX": "test-prefix", 39 | "OPTIONS": { 40 | "CLIENT_CLASS": "django_redis.client.DefaultClient", 41 | "SENTINELS": SENTINELS, 42 | "CONNECTION_FACTORY": conn_factory, 43 | }, 44 | }, 45 | } 46 | 47 | INSTALLED_APPS = ["django.contrib.sessions"] 48 | 49 | USE_TZ = False 50 | -------------------------------------------------------------------------------- /django_redis/client/sentinel.py: -------------------------------------------------------------------------------- 1 | from urllib.parse import parse_qs, urlencode, urlparse, urlunparse 2 | 3 | from django.core.exceptions import ImproperlyConfigured 4 | from redis.sentinel import SentinelConnectionPool 5 | 6 | from django_redis.client.default import DefaultClient 7 | 8 | 9 | def replace_query(url, query): 10 | return urlunparse((*url[:4], urlencode(query, doseq=True), url[5])) 11 | 12 | 13 | class SentinelClient(DefaultClient): 14 | """ 15 | Sentinel client which uses the single redis URL specified by the CACHE's 16 | LOCATION to create a LOCATION configuration for two connection pools; One 17 | pool for the primaries and another pool for the replicas, and upon 18 | connecting ensures the connection pool factory is configured correctly. 19 | """ 20 | 21 | def __init__(self, server, params, backend): 22 | if isinstance(server, str): 23 | url = urlparse(server) 24 | primary_query = parse_qs(url.query, keep_blank_values=True) 25 | replica_query = dict(primary_query) 26 | primary_query["is_master"] = [1] 27 | replica_query["is_master"] = [0] 28 | 29 | server = [replace_query(url, i) for i in (primary_query, replica_query)] 30 | 31 | super().__init__(server, params, backend) 32 | 33 | def connect(self, *args, **kwargs): 34 | connection = super().connect(*args, **kwargs) 35 | if not isinstance(connection.connection_pool, SentinelConnectionPool): 36 | error_message = ( 37 | "Settings DJANGO_REDIS_CONNECTION_FACTORY or " 38 | "CACHE[].OPTIONS.CONNECTION_POOL_CLASS is not configured correctly." 39 | ) 40 | raise ImproperlyConfigured(error_message) 41 | 42 | return connection 43 | -------------------------------------------------------------------------------- /tests/test_connection_factory.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from django.core.exceptions import ImproperlyConfigured 3 | 4 | from django_redis import pool 5 | 6 | 7 | def test_connection_factory_redefine_from_opts(): 8 | cf = pool.get_connection_factory( 9 | path="django_redis.pool.ConnectionFactory", 10 | options={ 11 | "CONNECTION_FACTORY": "django_redis.pool.SentinelConnectionFactory", 12 | "SENTINELS": [("127.0.0.1", "26739")], 13 | }, 14 | ) 15 | assert cf.__class__.__name__ == "SentinelConnectionFactory" 16 | 17 | 18 | @pytest.mark.parametrize( 19 | "conn_factory,expected", 20 | [ 21 | ("django_redis.pool.SentinelConnectionFactory", pool.SentinelConnectionFactory), 22 | ("django_redis.pool.ConnectionFactory", pool.ConnectionFactory), 23 | ], 24 | ) 25 | def test_connection_factory_opts(conn_factory: str, expected): 26 | cf = pool.get_connection_factory( 27 | path=None, 28 | options={ 29 | "CONNECTION_FACTORY": conn_factory, 30 | "SENTINELS": [("127.0.0.1", "26739")], 31 | }, 32 | ) 33 | assert isinstance(cf, expected) 34 | 35 | 36 | @pytest.mark.parametrize( 37 | "conn_factory,expected", 38 | [ 39 | ("django_redis.pool.SentinelConnectionFactory", pool.SentinelConnectionFactory), 40 | ("django_redis.pool.ConnectionFactory", pool.ConnectionFactory), 41 | ], 42 | ) 43 | def test_connection_factory_path(conn_factory: str, expected): 44 | cf = pool.get_connection_factory( 45 | path=conn_factory, 46 | options={ 47 | "SENTINELS": [("127.0.0.1", "26739")], 48 | }, 49 | ) 50 | assert isinstance(cf, expected) 51 | 52 | 53 | def test_connection_factory_no_sentinels(): 54 | with pytest.raises(ImproperlyConfigured): 55 | pool.get_connection_factory( 56 | path=None, 57 | options={ 58 | "CONNECTION_FACTORY": "django_redis.pool.SentinelConnectionFactory", 59 | }, 60 | ) 61 | -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | import sys 2 | from collections.abc import Iterable 3 | from os import environ 4 | from pathlib import Path 5 | 6 | import pytest 7 | from xdist.scheduler import LoadScopeScheduling 8 | 9 | from django_redis.cache import BaseCache 10 | from tests.settings_wrapper import SettingsWrapper 11 | 12 | 13 | class FixtureScheduling(LoadScopeScheduling): 14 | """Split by [] value. This is very hackish and might blow up any time!""" 15 | 16 | def _split_scope(self, nodeid): 17 | if "[sqlite" in nodeid: 18 | return nodeid.rsplit("[")[-1].replace("]", "") 19 | return None 20 | 21 | 22 | def pytest_xdist_make_scheduler(log, config): 23 | return FixtureScheduling(config, log) 24 | 25 | 26 | def pytest_configure(config): 27 | sys.path.insert(0, str(Path(__file__).absolute().parent)) 28 | 29 | 30 | @pytest.fixture() 31 | def settings(): 32 | """A Django settings object which restores changes after the testrun""" 33 | wrapper = SettingsWrapper() 34 | yield wrapper 35 | wrapper.finalize() 36 | 37 | 38 | @pytest.fixture() 39 | def cache(cache_settings: str) -> Iterable[BaseCache]: 40 | from django import setup 41 | 42 | environ["DJANGO_SETTINGS_MODULE"] = f"settings.{cache_settings}" 43 | setup() 44 | 45 | from django.core.cache import cache as default_cache 46 | 47 | yield default_cache 48 | default_cache.clear() 49 | 50 | 51 | def pytest_generate_tests(metafunc): 52 | if "cache" in metafunc.fixturenames or "session" in metafunc.fixturenames: 53 | # Mark 54 | settings = [ 55 | "sqlite", 56 | "sqlite_gzip", 57 | "sqlite_herd", 58 | "sqlite_json", 59 | "sqlite_lz4", 60 | "sqlite_msgpack", 61 | "sqlite_sentinel", 62 | "sqlite_sentinel_opts", 63 | "sqlite_sharding", 64 | "sqlite_usock", 65 | "sqlite_zlib", 66 | "sqlite_zstd", 67 | ] 68 | metafunc.parametrize("cache_settings", settings) 69 | -------------------------------------------------------------------------------- /django_redis/hash_ring.py: -------------------------------------------------------------------------------- 1 | import bisect 2 | import hashlib 3 | from collections.abc import Iterable, Iterator 4 | from typing import Optional 5 | 6 | 7 | class HashRing: 8 | nodes: list[str] = [] 9 | 10 | def __init__(self, nodes: Iterable[str] = (), replicas: int = 128) -> None: 11 | self.replicas: int = replicas 12 | self.ring: dict[str, str] = {} 13 | self.sorted_keys: list[str] = [] 14 | 15 | for node in nodes: 16 | self.add_node(node) 17 | 18 | def add_node(self, node: str) -> None: 19 | self.nodes.append(node) 20 | 21 | for x in range(self.replicas): 22 | _key = f"{node}:{x}" 23 | _hash = hashlib.sha256(_key.encode()).hexdigest() 24 | 25 | self.ring[_hash] = node 26 | self.sorted_keys.append(_hash) 27 | 28 | self.sorted_keys.sort() 29 | 30 | def remove_node(self, node: str) -> None: 31 | self.nodes.remove(node) 32 | for x in range(self.replicas): 33 | _hash = hashlib.sha256(f"{node}:{x}".encode()).hexdigest() 34 | del self.ring[_hash] 35 | self.sorted_keys.remove(_hash) 36 | 37 | def get_node(self, key: str) -> Optional[str]: 38 | n, i = self.get_node_pos(key) 39 | return n 40 | 41 | def get_node_pos(self, key: str) -> tuple[Optional[str], Optional[int]]: 42 | if len(self.ring) == 0: 43 | return None, None 44 | 45 | _hash = hashlib.sha256(key.encode()).hexdigest() 46 | idx = bisect.bisect(self.sorted_keys, _hash) 47 | idx = min(idx - 1, (self.replicas * len(self.nodes)) - 1) 48 | return self.ring[self.sorted_keys[idx]], idx 49 | 50 | def iter_nodes(self, key: str) -> Iterator[tuple[Optional[str], Optional[str]]]: 51 | if len(self.ring) == 0: 52 | yield None, None 53 | 54 | node, pos = self.get_node_pos(key) 55 | for k in self.sorted_keys[pos:]: 56 | yield k, self.ring[k] 57 | 58 | def __call__(self, key: str) -> Optional[str]: 59 | return self.get_node(key) 60 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | # Code of Conduct 2 | 3 | As contributors and maintainers of the Jazzband projects, and in the interest of 4 | fostering an open and welcoming community, we pledge to respect all people who 5 | contribute through reporting issues, posting feature requests, updating documentation, 6 | submitting pull requests or patches, and other activities. 7 | 8 | We are committed to making participation in the Jazzband a harassment-free experience 9 | for everyone, regardless of the level of experience, gender, gender identity and 10 | expression, sexual orientation, disability, personal appearance, body size, race, 11 | ethnicity, age, religion, or nationality. 12 | 13 | Examples of unacceptable behavior by participants include: 14 | 15 | - The use of sexualized language or imagery 16 | - Personal attacks 17 | - Trolling or insulting/derogatory comments 18 | - Public or private harassment 19 | - Publishing other's private information, such as physical or electronic addresses, 20 | without explicit permission 21 | - Other unethical or unprofessional conduct 22 | 23 | The Jazzband roadies have the right and responsibility to remove, edit, or reject 24 | comments, commits, code, wiki edits, issues, and other contributions that are not 25 | aligned to this Code of Conduct, or to ban temporarily or permanently any contributor 26 | for other behaviors that they deem inappropriate, threatening, offensive, or harmful. 27 | 28 | By adopting this Code of Conduct, the roadies commit themselves to fairly and 29 | consistently applying these principles to every aspect of managing the jazzband 30 | projects. Roadies who do not follow or enforce the Code of Conduct may be permanently 31 | removed from the Jazzband roadies. 32 | 33 | This code of conduct applies both within project spaces and in public spaces when an 34 | individual is representing the project or its community. 35 | 36 | Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by 37 | contacting the roadies at `roadies@jazzband.co`. All complaints will be reviewed and 38 | investigated and will result in a response that is deemed necessary and appropriate to 39 | the circumstances. Roadies are obligated to maintain confidentiality with regard to the 40 | reporter of an incident. 41 | 42 | This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 43 | 1.3.0, available at [https://contributor-covenant.org/version/1/3/0/][version] 44 | 45 | [homepage]: https://contributor-covenant.org 46 | [version]: https://contributor-covenant.org/version/1/3/0/ 47 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [metadata] 2 | name = django-redis 3 | version = attr: django_redis.__version__ 4 | url = https://github.com/jazzband/django-redis 5 | author = Andrei Antoukh 6 | author_email = niwi@niwi.nz 7 | description = Full featured redis cache backend for Django. 8 | long_description = file: README.rst 9 | long_description_content_type = text/x-rst 10 | license = BSD-3-Clause 11 | classifiers = 12 | Development Status :: 5 - Production/Stable 13 | Environment :: Web Environment 14 | Framework :: Django 15 | Framework :: Django :: 4.2 16 | Framework :: Django :: 5.1 17 | Framework :: Django :: 5.2 18 | Intended Audience :: Developers 19 | License :: OSI Approved :: BSD License 20 | Operating System :: OS Independent 21 | Programming Language :: Python 22 | Programming Language :: Python :: 3 23 | Programming Language :: Python :: 3 :: Only 24 | Programming Language :: Python :: 3.9 25 | Programming Language :: Python :: 3.10 26 | Programming Language :: Python :: 3.11 27 | Programming Language :: Python :: 3.12 28 | Programming Language :: Python :: 3.13 29 | Topic :: Software Development :: Libraries 30 | Topic :: Utilities 31 | 32 | [options] 33 | python_requires = >=3.9 34 | packages = 35 | django_redis 36 | django_redis.client 37 | django_redis.client.mixins 38 | django_redis.serializers 39 | django_redis.compressors 40 | install_requires = 41 | Django>=4.2,<5.3,!=5.0.* 42 | redis>=4.0.2 43 | 44 | [options.extras_require] 45 | hiredis = redis[hiredis]>=4.0.2 46 | 47 | [coverage:run] 48 | omit = 49 | tests/*, 50 | 51 | [coverage:report] 52 | precision = 1 53 | skip_covered = true 54 | 55 | [tox:tox] 56 | minversion = 3.15.0 57 | envlist = 58 | ruff 59 | mypy 60 | # tests against released versions 61 | py39-dj{42}-redislatest 62 | py{310,311,312}-dj{42,50,51,52}-redislatest 63 | py313-dj{51,52}-redislatest 64 | # tests against unreleased versions 65 | py313-dj52-redismaster 66 | py313-djmain-redis{latest,master} 67 | 68 | [gh-actions] 69 | python = 70 | 3.9: py39, ruff, mypy 71 | 3.10: py310 72 | 3.11: py311 73 | 3.12: py312 74 | 3.13: py313 75 | 76 | [gh-actions:env] 77 | DJANGO = 78 | 4.2: dj42 79 | 5.1: dj51 80 | 5.2: dj52 81 | main: djmain 82 | REDIS = 83 | latest: redislatest 84 | master: redismaster 85 | 86 | [testenv] 87 | passenv = CI, GITHUB* 88 | commands = 89 | {envpython} -m pytest -n 4 {posargs} 90 | 91 | deps = 92 | dj42: Django>=4.2,<5.0 93 | dj51: Django>=5.1,<5.2 94 | dj52: Django>=5.2,<6.0 95 | djmain: https://github.com/django/django/archive/main.tar.gz 96 | msgpack>=0.6.0 97 | pytest 98 | pytest-cov 99 | pytest-mock 100 | pytest-pythonpath 101 | pytest-xdist 102 | redismaster: https://github.com/redis/redis-py/archive/master.tar.gz 103 | lz4>=0.15 104 | pyzstd>=0.15 105 | 106 | [testenv:{pre-commit,mypy}] 107 | basepython = python3 108 | envdir={toxworkdir}/lint 109 | commands = 110 | pre-commit: pre-commit run --all-files 111 | mypy: mypy {posargs:--cobertura-xml-report .} django_redis tests 112 | deps = 113 | django-stubs 114 | lxml 115 | pre-commit 116 | mypy 117 | # typing dependencies 118 | pytest 119 | pytest-mock 120 | types-redis 121 | skip_install = true 122 | 123 | [tool:pytest] 124 | addopts = 125 | --doctest-modules 126 | --cov=django_redis 127 | --cov-config=setup.cfg 128 | --no-cov-on-fail 129 | pythonpath = tests 130 | testpaths = tests 131 | xfail_strict = true 132 | 133 | [mypy] 134 | plugins = 135 | mypy_django_plugin.main 136 | pretty = true 137 | show_error_codes = true 138 | show_error_context = true 139 | warn_redundant_casts = true 140 | warn_unused_ignores = true 141 | warn_unreachable = true 142 | 143 | [mypy.plugins.django-stubs] 144 | # all test settings look similar enough for mypy 145 | django_settings_module = tests.settings.sqlite 146 | ignore_missing_settings = true 147 | 148 | # third party mypy config 149 | [mypy-lz4.frame] 150 | ignore_missing_imports = true 151 | 152 | [mypy-xdist.scheduler] 153 | ignore_missing_imports = true 154 | 155 | [mypy-pyzstd] 156 | ignore_missing_imports = true 157 | 158 | [mypy-msgpack] 159 | ignore_missing_imports = true 160 | 161 | [mypy-redis.sentinel] 162 | ignore_missing_imports = true 163 | -------------------------------------------------------------------------------- /tests/test_cache_options.py: -------------------------------------------------------------------------------- 1 | import copy 2 | from collections.abc import Iterable 3 | from typing import cast 4 | 5 | import pytest 6 | from django.core.cache import caches 7 | from pytest import LogCaptureFixture 8 | from redis.exceptions import ConnectionError as RedisConnectionError 9 | 10 | from django_redis.cache import RedisCache 11 | from django_redis.client import ShardClient 12 | 13 | 14 | def make_key(key: str, prefix: str, version: str) -> str: 15 | return f"{prefix}#{version}#{key}" 16 | 17 | 18 | def reverse_key(key: str) -> str: 19 | return key.split("#", 2)[2] 20 | 21 | 22 | @pytest.fixture 23 | def ignore_exceptions_cache(settings) -> RedisCache: 24 | caches_setting = copy.deepcopy(settings.CACHES) 25 | caches_setting["doesnotexist"]["OPTIONS"]["IGNORE_EXCEPTIONS"] = True 26 | caches_setting["doesnotexist"]["OPTIONS"]["LOG_IGNORED_EXCEPTIONS"] = True 27 | settings.CACHES = caches_setting 28 | settings.DJANGO_REDIS_IGNORE_EXCEPTIONS = True 29 | settings.DJANGO_REDIS_LOG_IGNORED_EXCEPTIONS = True 30 | return cast("RedisCache", caches["doesnotexist"]) 31 | 32 | 33 | def test_get_django_omit_exceptions_many_returns_default_arg( 34 | ignore_exceptions_cache: RedisCache, 35 | ): 36 | assert ignore_exceptions_cache._ignore_exceptions is True 37 | assert ignore_exceptions_cache.get_many(["key1", "key2", "key3"]) == {} 38 | 39 | 40 | def test_get_django_omit_exceptions( 41 | caplog: LogCaptureFixture, 42 | ignore_exceptions_cache: RedisCache, 43 | ): 44 | assert ignore_exceptions_cache._ignore_exceptions is True 45 | assert ignore_exceptions_cache._log_ignored_exceptions is True 46 | 47 | assert ignore_exceptions_cache.get("key") is None 48 | assert ignore_exceptions_cache.get("key", "default") == "default" 49 | assert ignore_exceptions_cache.get("key", default="default") == "default" 50 | 51 | assert len(caplog.records) == 3 52 | assert all( 53 | record.levelname == "ERROR" and record.msg == "Exception ignored" 54 | for record in caplog.records 55 | ) 56 | 57 | 58 | def test_get_django_omit_exceptions_priority_1(settings): 59 | caches_setting = copy.deepcopy(settings.CACHES) 60 | caches_setting["doesnotexist"]["OPTIONS"]["IGNORE_EXCEPTIONS"] = True 61 | settings.CACHES = caches_setting 62 | settings.DJANGO_REDIS_IGNORE_EXCEPTIONS = False 63 | cache = cast("RedisCache", caches["doesnotexist"]) 64 | assert cache._ignore_exceptions is True 65 | assert cache.get("key") is None 66 | 67 | 68 | def test_get_django_omit_exceptions_priority_2(settings): 69 | caches_setting = copy.deepcopy(settings.CACHES) 70 | caches_setting["doesnotexist"]["OPTIONS"]["IGNORE_EXCEPTIONS"] = False 71 | settings.CACHES = caches_setting 72 | settings.DJANGO_REDIS_IGNORE_EXCEPTIONS = True 73 | cache = cast("RedisCache", caches["doesnotexist"]) 74 | assert cache._ignore_exceptions is False 75 | with pytest.raises(RedisConnectionError): 76 | cache.get("key") 77 | 78 | 79 | @pytest.fixture 80 | def key_prefix_cache(cache: RedisCache, settings) -> Iterable[RedisCache]: 81 | caches_setting = copy.deepcopy(settings.CACHES) 82 | caches_setting["default"]["KEY_PREFIX"] = "*" 83 | settings.CACHES = caches_setting 84 | yield cache 85 | 86 | 87 | @pytest.fixture 88 | def with_prefix_cache() -> Iterable[RedisCache]: 89 | with_prefix = cast("RedisCache", caches["with_prefix"]) 90 | yield with_prefix 91 | with_prefix.clear() 92 | 93 | 94 | class TestDjangoRedisCacheEscapePrefix: 95 | def test_delete_pattern( 96 | self, 97 | key_prefix_cache: RedisCache, 98 | with_prefix_cache: RedisCache, 99 | ): 100 | key_prefix_cache.set("a", "1") 101 | with_prefix_cache.set("b", "2") 102 | key_prefix_cache.delete_pattern("*") 103 | assert key_prefix_cache.has_key("a") is False 104 | assert with_prefix_cache.get("b") == "2" 105 | 106 | def test_iter_keys( 107 | self, 108 | key_prefix_cache: RedisCache, 109 | with_prefix_cache: RedisCache, 110 | ): 111 | if isinstance(key_prefix_cache.client, ShardClient): 112 | pytest.skip("ShardClient doesn't support iter_keys") 113 | 114 | key_prefix_cache.set("a", "1") 115 | with_prefix_cache.set("b", "2") 116 | assert list(key_prefix_cache.iter_keys("*")) == ["a"] 117 | 118 | def test_keys(self, key_prefix_cache: RedisCache, with_prefix_cache: RedisCache): 119 | key_prefix_cache.set("a", "1") 120 | with_prefix_cache.set("b", "2") 121 | keys = key_prefix_cache.keys("*") 122 | assert "a" in keys 123 | assert "b" not in keys 124 | 125 | 126 | def test_custom_key_function(cache: RedisCache, settings): 127 | caches_setting = copy.deepcopy(settings.CACHES) 128 | caches_setting["default"]["KEY_FUNCTION"] = "test_cache_options.make_key" 129 | caches_setting["default"]["REVERSE_KEY_FUNCTION"] = "test_cache_options.reverse_key" 130 | settings.CACHES = caches_setting 131 | 132 | if isinstance(cache.client, ShardClient): 133 | pytest.skip("ShardClient doesn't support get_client") 134 | 135 | for key in ["foo-aa", "foo-ab", "foo-bb", "foo-bc"]: 136 | cache.set(key, "foo") 137 | 138 | res = cache.delete_pattern("*foo-a*") 139 | assert bool(res) is True 140 | 141 | keys = cache.keys("foo*") 142 | assert set(keys) == {"foo-bb", "foo-bc"} 143 | # ensure our custom function was actually called 144 | assert {k.decode() for k in cache.client.get_client(write=False).keys("*")} == ( 145 | {"#1#foo-bc", "#1#foo-bb"} 146 | ) 147 | -------------------------------------------------------------------------------- /django_redis/client/herd.py: -------------------------------------------------------------------------------- 1 | import random 2 | import socket 3 | import time 4 | from collections import OrderedDict 5 | 6 | from django.conf import settings 7 | from redis.exceptions import ConnectionError as RedisConnectionError 8 | from redis.exceptions import ResponseError 9 | from redis.exceptions import TimeoutError as RedisTimeoutError 10 | 11 | from django_redis.client.default import DEFAULT_TIMEOUT, DefaultClient 12 | from django_redis.exceptions import ConnectionInterrupted 13 | 14 | _main_exceptions = ( 15 | RedisConnectionError, 16 | RedisTimeoutError, 17 | ResponseError, 18 | socket.timeout, 19 | ) 20 | 21 | 22 | class Marker: 23 | """ 24 | Dummy class for use as 25 | marker for herded keys. 26 | """ 27 | 28 | pass 29 | 30 | 31 | def _is_expired(x, herd_timeout: int) -> bool: 32 | if x >= herd_timeout: 33 | return True 34 | val = x + random.randint(1, herd_timeout) 35 | 36 | return val >= herd_timeout 37 | 38 | 39 | class HerdClient(DefaultClient): 40 | def __init__(self, *args, **kwargs): 41 | self._marker = Marker() 42 | self._herd_timeout = getattr(settings, "CACHE_HERD_TIMEOUT", 60) 43 | super().__init__(*args, **kwargs) 44 | 45 | def _pack(self, value, timeout): 46 | herd_timeout = (timeout or self._backend.default_timeout) + int(time.time()) 47 | return self._marker, value, herd_timeout 48 | 49 | def _unpack(self, value): 50 | try: 51 | marker, unpacked, herd_timeout = value 52 | except (ValueError, TypeError): 53 | return value, False 54 | 55 | if not isinstance(marker, Marker): 56 | return value, False 57 | 58 | now = int(time.time()) 59 | if herd_timeout < now: 60 | x = now - herd_timeout 61 | return unpacked, _is_expired(x, self._herd_timeout) 62 | 63 | return unpacked, False 64 | 65 | def set( 66 | self, 67 | key, 68 | value, 69 | timeout=DEFAULT_TIMEOUT, 70 | version=None, 71 | client=None, 72 | nx=False, 73 | xx=False, 74 | ): 75 | if timeout is DEFAULT_TIMEOUT: 76 | timeout = self._backend.default_timeout 77 | 78 | if timeout is None or timeout <= 0: 79 | return super().set( 80 | key, 81 | value, 82 | timeout=timeout, 83 | version=version, 84 | client=client, 85 | nx=nx, 86 | xx=xx, 87 | ) 88 | 89 | packed = self._pack(value, timeout) 90 | real_timeout = timeout + self._herd_timeout 91 | 92 | return super().set( 93 | key, 94 | packed, 95 | timeout=real_timeout, 96 | version=version, 97 | client=client, 98 | nx=nx, 99 | ) 100 | 101 | def get(self, key, default=None, version=None, client=None): 102 | packed = super().get(key, default=default, version=version, client=client) 103 | val, refresh = self._unpack(packed) 104 | 105 | if refresh: 106 | return default 107 | 108 | return val 109 | 110 | def get_many(self, keys, version=None, client=None): 111 | if client is None: 112 | client = self.get_client(write=False) 113 | 114 | if not keys: 115 | return {} 116 | 117 | recovered_data = OrderedDict() 118 | 119 | new_keys = [self.make_key(key, version=version) for key in keys] 120 | map_keys = dict(zip(new_keys, keys)) 121 | 122 | try: 123 | results = client.mget(*new_keys) 124 | except _main_exceptions as e: 125 | raise ConnectionInterrupted(connection=client) from e 126 | 127 | for key, value in zip(new_keys, results): 128 | if value is None: 129 | continue 130 | 131 | val, refresh = self._unpack(self.decode(value)) 132 | recovered_data[map_keys[key]] = None if refresh else val 133 | 134 | return recovered_data 135 | 136 | def set_many( 137 | self, 138 | data, 139 | timeout=DEFAULT_TIMEOUT, 140 | version=None, 141 | client=None, 142 | herd=True, 143 | ): 144 | """ 145 | Set a bunch of values in the cache at once from a dict of key/value 146 | pairs. This is much more efficient than calling set() multiple times. 147 | 148 | If timeout is given, that timeout will be used for the key; otherwise 149 | the default cache timeout will be used. 150 | """ 151 | if client is None: 152 | client = self.get_client(write=True) 153 | 154 | set_function = self.set if herd else super().set 155 | 156 | try: 157 | pipeline = client.pipeline() 158 | for key, value in data.items(): 159 | set_function(key, value, timeout, version=version, client=pipeline) 160 | pipeline.execute() 161 | except _main_exceptions as e: 162 | raise ConnectionInterrupted(connection=client) from e 163 | 164 | def incr(self, *args, **kwargs): 165 | raise NotImplementedError 166 | 167 | def decr(self, *args, **kwargs): 168 | raise NotImplementedError 169 | 170 | def touch(self, key, timeout=DEFAULT_TIMEOUT, version=None, client=None): 171 | if client is None: 172 | client = self.get_client(write=True) 173 | 174 | value = self.get(key, version=version, client=client) 175 | if value is None: 176 | return False 177 | 178 | self.set(key, value, timeout=timeout, version=version, client=client) 179 | return True 180 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: CI 2 | 3 | on: [push, pull_request] 4 | 5 | jobs: 6 | test: 7 | name: > 8 | Test Python ${{ matrix.python-version }}, 9 | Django ${{ matrix.django-version }}, 10 | Redis.py ${{ matrix.redis-version }} 11 | 12 | runs-on: ubuntu-latest 13 | 14 | strategy: 15 | fail-fast: false 16 | matrix: 17 | python-version: 18 | - '3.10' 19 | - '3.11' 20 | - '3.12' 21 | django-version: 22 | - '4.2' 23 | - '5.1' 24 | - '5.2' 25 | redis-version: 26 | - 'latest' 27 | 28 | # Only test pre-release dependencies for the latest Python. 29 | include: 30 | # Django 4.2 and python 3.9 with latest redis 31 | - django-version: '4.2' 32 | redis-version: 'latest' 33 | python-version: '3.9' 34 | 35 | # latest Django with latest redis 36 | - django-version: '5.2' 37 | redis-version: 'latest' 38 | python-version: '3.13' 39 | 40 | # latest Django with pre-release redis 41 | - django-version: '5.2' 42 | redis-version: 'master' 43 | python-version: '3.13' 44 | 45 | # pre-release Django with latest redis 46 | - django-version: 'main' 47 | redis-version: 'latest' 48 | python-version: '3.13' 49 | 50 | # pre-release Django and pre-release redis 51 | - django-version: 'main' 52 | redis-version: 'master' 53 | python-version: '3.13' 54 | 55 | steps: 56 | - uses: actions/checkout@v5 57 | 58 | - name: Set up Python ${{ matrix.python-version }} 59 | uses: actions/setup-python@v6 60 | with: 61 | python-version: ${{ matrix.python-version }} 62 | 63 | - name: Get pip cache dir 64 | id: pip-cache 65 | run: | 66 | echo "::set-output name=dir::$(pip cache dir)" 67 | 68 | - name: Cache 69 | uses: actions/cache@v4 70 | with: 71 | path: ${{ steps.pip-cache.outputs.dir }} 72 | key: pip-test-python-${{ matrix.python-version }}-django-${{ matrix.django-version }}-redis-${{ matrix.redis-version }}-${{ hashFiles('**/setup.*') }} 73 | restore-keys: | 74 | pip-test-python-${{ matrix.python-version }}-django-${{ matrix.django-version }}-redis-${{ matrix.redis-version }} 75 | pip-test-python-${{ matrix.python-version }} 76 | pip-test- 77 | 78 | - name: Install dependencies 79 | run: | 80 | python -m pip install --upgrade pip 81 | python -m pip install --upgrade tox-uv tox-gh-actions 82 | 83 | - name: Docker compose up 84 | run: docker compose -f docker/docker-compose.yml up -d --wait 85 | 86 | - name: Tox tests 87 | run: tox 88 | env: 89 | DJANGO: ${{ matrix.django-version }} 90 | REDIS: ${{ matrix.redis-version }} 91 | 92 | - name: Upload coverage 93 | uses: codecov/codecov-action@v5 94 | with: 95 | env_vars: DJANGO,REDIS,PYTHON 96 | flags: tests 97 | env: 98 | DJANGO: ${{ matrix.django-version }} 99 | REDIS: ${{ matrix.redis-version }} 100 | PYTHON: ${{ matrix.python-version }} 101 | 102 | lint: 103 | name: Lint (${{ matrix.tool }}) 104 | runs-on: ubuntu-latest 105 | 106 | strategy: 107 | fail-fast: false 108 | matrix: 109 | tool: 110 | - 'pre-commit' 111 | - 'mypy' 112 | 113 | steps: 114 | - uses: actions/checkout@v5 115 | 116 | - name: Set up Python 117 | uses: actions/setup-python@v6 118 | with: 119 | python-version: '3.x' 120 | 121 | - name: Get pip cache dir 122 | id: pip-cache 123 | run: | 124 | echo "::set-output name=dir::$(pip cache dir)" 125 | 126 | - name: Cache 127 | uses: actions/cache@v4 128 | with: 129 | path: ${{ steps.pip-cache.outputs.dir }} 130 | key: pip-lint-${{ hashFiles('**/setup.*') }} 131 | restore-keys: | 132 | pip-lint- 133 | 134 | - name: Install dependencies 135 | run: | 136 | python -m pip install --upgrade pip 137 | python -m pip install --upgrade "tox<4" 138 | 139 | - name: Run ${{ matrix.tool }} 140 | run: tox -e ${{ matrix.tool }} 141 | 142 | - name: Upload coverage 143 | if: ${{ matrix.tool == 'mypy' }} 144 | uses: codecov/codecov-action@v5 145 | with: 146 | flags: mypy 147 | 148 | check-changelog: 149 | name: Check for news fragments in 'changelog.d/' 150 | runs-on: ubuntu-latest 151 | 152 | steps: 153 | - uses: actions/checkout@v5 154 | with: 155 | fetch-depth: 0 156 | 157 | - name: Set up Python 158 | uses: actions/setup-python@v6 159 | with: 160 | python-version: '3.x' 161 | 162 | - name: Get pip cache dir 163 | id: pip-cache 164 | run: | 165 | echo "::set-output name=dir::$(pip cache dir)" 166 | 167 | - name: Cache 168 | uses: actions/cache@v4 169 | with: 170 | path: ${{ steps.pip-cache.outputs.dir }} 171 | key: pip-check-changelog-${{ hashFiles('**/setup.*') }} 172 | restore-keys: | 173 | pip-check-changelog- 174 | 175 | - name: Install dependencies 176 | run: | 177 | python -m pip install --upgrade towncrier 178 | 179 | - name: Run towncrier check 180 | run: | 181 | if ! towncrier check; then 182 | echo '' 183 | echo "Please add a description of your changes to 'changelog.d/{issue or PR number}.{feature,bugfix,misc,doc,removal}'" 184 | exit 1 185 | fi 186 | -------------------------------------------------------------------------------- /.ruff.toml: -------------------------------------------------------------------------------- 1 | fix = true 2 | target-version = "py39" 3 | 4 | [lint] 5 | # https://beta.ruff.rs/docs/rules/ 6 | select = [ 7 | # https://docs.astral.sh/ruff/rules/#flake8-builtins-a 8 | # https://github.com/gforcada/flake8-builtins 9 | "A", 10 | 11 | # https://docs.astral.sh/ruff/rules/#flake8-annotations-ann 12 | # https://github.com/sco1/flake8-annotations 13 | # "ANN", 14 | 15 | # https://docs.astral.sh/ruff/rules/#flake8-unused-arguments-arg 16 | # https://github.com/nhoad/flake8-unused-arguments 17 | # TODO: "ARG", 18 | 19 | # https://docs.astral.sh/ruff/rules/#flake8-bugbear-b 20 | # https://github.com/PyCQA/flake8-bugbear 21 | "B", 22 | 23 | # https://docs.astral.sh/ruff/rules/#flake8-blind-except-ble 24 | # https://github.com/elijahandrews/flake8-blind-except 25 | "BLE", 26 | 27 | # https://docs.astral.sh/ruff/rules/#flake8-commas-com 28 | # https://github.com/PyCQA/flake8-commas/ 29 | # About this warning: 30 | # > The following rule may cause conflicts when used with the formatter: `COM812` 31 | # Check https://github.com/astral-sh/ruff/issues/9216 32 | "COM", 33 | 34 | # https://docs.astral.sh/ruff/rules/#flake8-comprehensions-c4 35 | # https://github.com/adamchainz/flake8-comprehensions 36 | "C4", 37 | 38 | # rules from mccabe 39 | "C90", 40 | 41 | # https://docs.astral.sh/ruff/rules/#flake8-django-dj 42 | # https://github.com/rocioar/flake8-django 43 | "DJ", 44 | 45 | # https://docs.astral.sh/ruff/rules/#flake8-datetimez-dtz 46 | # https://github.com/pjknkda/flake8-datetimez 47 | # TODO: "DTZ", 48 | 49 | # https://docs.astral.sh/ruff/rules/#error-e 50 | # https://github.com/PyCQA/pycodestyle 51 | "E", 52 | 53 | # https://docs.astral.sh/ruff/rules/#flake8-errmsg-em 54 | # https://github.com/henryiii/flake8-errmsg 55 | "EM", 56 | 57 | # https://docs.astral.sh/ruff/rules/#flake8-executable-exe 58 | # https://github.com/xuhdev/flake8-executable 59 | "EXE", 60 | 61 | # https://docs.astral.sh/ruff/rules/#pyflakes-f 62 | # https://github.com/PyCQA/pyflakes 63 | "F", 64 | 65 | # https://docs.astral.sh/ruff/rules/#flake8-boolean-trap-fbt 66 | # https://github.com/pwoolvett/flake8_boolean_trap 67 | # TODO: "FBT", 68 | 69 | # https://docs.astral.sh/ruff/rules/#flake8-logging-format-g 70 | # https://github.com/globality-corp/flake8-logging-format 71 | "G", 72 | 73 | # https://docs.astral.sh/ruff/rules/#isort-i 74 | # https://pycqa.github.io/isort/ 75 | "I", 76 | 77 | # https://docs.astral.sh/ruff/rules/#flake8-import-conventions-icn 78 | # https://github.com/joaopalmeiro/flake8-import-conventions 79 | "ICN", 80 | 81 | # https://docs.astral.sh/ruff/rules/#flake8-implicit-str-concat-isc 82 | # https://github.com/flake8-implicit-str-concat/flake8-implicit-str-concat 83 | "ISC", 84 | 85 | # https://docs.astral.sh/ruff/rules/#flake8-no-pep420-inp 86 | # https://github.com/adamchainz/flake8-no-pep420 87 | "INP", 88 | 89 | # https://docs.astral.sh/ruff/rules/#flake8-gettext-int 90 | # https://github.com/cielavenir/flake8_gettext 91 | "INT", 92 | 93 | # https://docs.astral.sh/ruff/rules/#flake8-pie-pie 94 | # https://github.com/sbdchd/flake8-pie 95 | # TODO: "PIE", 96 | 97 | # https://docs.astral.sh/ruff/rules/#flake8-pytest-style-pt 98 | # https://github.com/m-burst/flake8-pytest-style 99 | # TODO: "PT", 100 | 101 | # https://docs.astral.sh/ruff/rules/#flake8-pyi-pyi 102 | # https://github.com/PyCQA/flake8-pyi 103 | "PYI", 104 | 105 | # https://docs.astral.sh/ruff/rules/#flake8-use-pathlib-pth 106 | # https://gitlab.com/RoPP/flake8-use-pathlib 107 | "PTH", 108 | 109 | # https://docs.astral.sh/ruff/rules/#flake8-return-ret 110 | # https://github.com/afonasev/flake8-return 111 | "RET", 112 | 113 | # https://docs.astral.sh/ruff/rules/#flake8-raise-rse 114 | # https://github.com/jdufresne/flake8-raise 115 | "RSE", 116 | 117 | # https://docs.astral.sh/ruff/rules/unused-noqa/ 118 | "RUF100", 119 | 120 | # https://docs.astral.sh/ruff/rules/#flake8-bandit-s 121 | # https://github.com/tylerwince/flake8-bandit 122 | "S", 123 | 124 | # https://docs.astral.sh/ruff/rules/#flake8-simplify-sim 125 | # https://github.com/MartinThoma/flake8-simplify 126 | "SIM", 127 | 128 | # https://docs.astral.sh/ruff/rules/#flake8-self-slf 129 | # https://github.com/korijn/flake8-self 130 | # TODO: "SLF", 131 | 132 | # rules from flake8-type-checking 133 | # TODO: 134 | # Replace with the new rules 135 | # https://github.com/astral-sh/ruff/issues/9573 136 | "TCH", 137 | 138 | # https://docs.astral.sh/ruff/rules/#flake8-tidy-imports-tid 139 | # https://github.com/adamchainz/flake8-tidy-imports 140 | "TID", 141 | 142 | # https://docs.astral.sh/ruff/rules/#flake8-debugger-t10 143 | # https://github.com/jbkahn/flake8-debugger 144 | "T10", 145 | 146 | # https://docs.astral.sh/ruff/rules/#flake8-print-t20 147 | # https://github.com/jbkahn/flake8-print 148 | "T20", 149 | 150 | # https://docs.astral.sh/ruff/rules/#pyupgrade-up 151 | # https://github.com/asottile/pyupgrade 152 | "UP", 153 | 154 | # https://docs.astral.sh/ruff/rules/#warning-w 155 | # https://github.com/PyCQA/pycodestyle 156 | "W", 157 | 158 | # https://docs.astral.sh/ruff/rules/#flake8-2020-ytt 159 | # https://github.com/asottile-archive/flake8-2020 160 | "YTT", 161 | ] 162 | 163 | ignore = [ 164 | "ANN101", # ignore missing type annotation in self parameter 165 | "S311", # ignore Standard pseudo-random generators because they are not used for cryptographic purposes 166 | ] 167 | 168 | [lint.flake8-tidy-imports] 169 | ## Disallow all relative imports. 170 | ban-relative-imports = "all" 171 | 172 | [lint.per-file-ignores] 173 | # ignore assert statements in tests 174 | "tests/*.py" = ["S101"] 175 | 176 | # ignore SECRET_KEY in settings files in tests 177 | "tests/settings/*.py" = ["S105"] 178 | 179 | # pickle is used on purpose and its use is discouraged 180 | "django_redis/serializers/pickle.py" = ["S301"] 181 | 182 | # min/max are official Redis parameter names matching redis-py API 183 | "django_redis/client/mixins/sorted_sets.py" = ["A002"] 184 | -------------------------------------------------------------------------------- /tests/test_client.py: -------------------------------------------------------------------------------- 1 | from collections.abc import Iterable 2 | from unittest.mock import Mock, call, patch 3 | 4 | import pytest 5 | from django.core.cache import DEFAULT_CACHE_ALIAS 6 | from django.test import override_settings 7 | from pytest_mock import MockerFixture 8 | 9 | from django_redis.cache import RedisCache 10 | from django_redis.client import DefaultClient, ShardClient 11 | from tests.settings_wrapper import SettingsWrapper 12 | 13 | 14 | @pytest.fixture() 15 | def cache_client(cache: RedisCache) -> Iterable[DefaultClient]: 16 | client = cache.client 17 | client.set("TestClientClose", 0) 18 | yield client 19 | client.delete("TestClientClose") 20 | 21 | 22 | class TestClientClose: 23 | def test_close_client_disconnect_default( 24 | self, 25 | cache_client: DefaultClient, 26 | mocker: MockerFixture, 27 | ): 28 | cache_client._options.clear() 29 | mock = mocker.patch.object(cache_client.connection_factory, "disconnect") 30 | cache_client.close() 31 | assert not mock.called 32 | 33 | def test_close_disconnect_settings( 34 | self, 35 | cache_client: DefaultClient, 36 | settings: SettingsWrapper, 37 | mocker: MockerFixture, 38 | ): 39 | with override_settings(DJANGO_REDIS_CLOSE_CONNECTION=True): 40 | mock = mocker.patch.object(cache_client.connection_factory, "disconnect") 41 | cache_client.close() 42 | assert mock.called 43 | 44 | def test_close_disconnect_settings_cache( 45 | self, 46 | cache_client: DefaultClient, 47 | mocker: MockerFixture, 48 | settings: SettingsWrapper, 49 | ): 50 | caches = settings.CACHES 51 | caches[DEFAULT_CACHE_ALIAS]["OPTIONS"]["CLOSE_CONNECTION"] = True 52 | with override_settings(CACHES=caches): 53 | cache_client.set("TestClientClose", 0) 54 | mock = mocker.patch.object(cache_client.connection_factory, "disconnect") 55 | cache_client.close() 56 | assert mock.called 57 | 58 | def test_close_disconnect_client_options( 59 | self, 60 | cache_client: DefaultClient, 61 | mocker: MockerFixture, 62 | ): 63 | cache_client._options["CLOSE_CONNECTION"] = True 64 | mock = mocker.patch.object(cache_client.connection_factory, "disconnect") 65 | cache_client.close() 66 | assert mock.called 67 | 68 | 69 | class TestDefaultClient: 70 | @patch("test_client.DefaultClient.get_client") 71 | @patch("test_client.DefaultClient.__init__", return_value=None) 72 | def test_delete_pattern_calls_get_client_given_no_client( 73 | self, 74 | init_mock, 75 | get_client_mock, 76 | ): 77 | client = DefaultClient() 78 | client._backend = Mock() 79 | client._backend.key_prefix = "" 80 | 81 | client.delete_pattern(pattern="foo*") 82 | get_client_mock.assert_called_once_with(write=True) 83 | 84 | @patch("test_client.DefaultClient.make_pattern") 85 | @patch("test_client.DefaultClient.get_client", return_value=Mock()) 86 | @patch("test_client.DefaultClient.__init__", return_value=None) 87 | def test_delete_pattern_calls_make_pattern( 88 | self, 89 | init_mock, 90 | get_client_mock, 91 | make_pattern_mock, 92 | ): 93 | client = DefaultClient() 94 | client._backend = Mock() 95 | client._backend.key_prefix = "" 96 | get_client_mock.return_value.scan_iter.return_value = [] 97 | 98 | client.delete_pattern(pattern="foo*") 99 | 100 | kwargs = {"version": None, "prefix": None} 101 | make_pattern_mock.assert_called_once_with("foo*", **kwargs) 102 | 103 | @patch("test_client.DefaultClient.make_pattern") 104 | @patch("test_client.DefaultClient.get_client", return_value=Mock()) 105 | @patch("test_client.DefaultClient.__init__", return_value=None) 106 | def test_delete_pattern_calls_scan_iter_with_count_if_itersize_given( 107 | self, 108 | init_mock, 109 | get_client_mock, 110 | make_pattern_mock, 111 | ): 112 | client = DefaultClient() 113 | client._backend = Mock() 114 | client._backend.key_prefix = "" 115 | get_client_mock.return_value.scan_iter.return_value = [] 116 | 117 | client.delete_pattern(pattern="foo*", itersize=90210) 118 | 119 | get_client_mock.return_value.scan_iter.assert_called_once_with( 120 | count=90210, 121 | match=make_pattern_mock.return_value, 122 | ) 123 | 124 | @patch("test_client.DefaultClient.make_pattern") 125 | @patch("test_client.DefaultClient.get_client", return_value=Mock()) 126 | @patch("test_client.DefaultClient.__init__", return_value=None) 127 | def test_delete_pattern_calls_pipeline_delete_and_execute( 128 | self, 129 | init_mock, 130 | get_client_mock, 131 | make_pattern_mock, 132 | ): 133 | client = DefaultClient() 134 | client._backend = Mock() 135 | client._backend.key_prefix = "" 136 | get_client_mock.return_value.scan_iter.return_value = [":1:foo", ":1:foo-a"] 137 | get_client_mock.return_value.pipeline.return_value = Mock() 138 | get_client_mock.return_value.pipeline.return_value.delete = Mock() 139 | get_client_mock.return_value.pipeline.return_value.execute = Mock() 140 | 141 | client.delete_pattern(pattern="foo*") 142 | 143 | assert get_client_mock.return_value.pipeline.return_value.delete.call_count == 2 144 | get_client_mock.return_value.pipeline.return_value.delete.assert_has_calls( 145 | [call(":1:foo"), call(":1:foo-a")], 146 | ) 147 | get_client_mock.return_value.pipeline.return_value.execute.assert_called_once() 148 | 149 | 150 | class TestShardClient: 151 | @patch("test_client.DefaultClient.make_pattern") 152 | @patch("test_client.ShardClient.__init__", return_value=None) 153 | def test_delete_pattern_calls_scan_iter_with_count_if_itersize_given( 154 | self, 155 | init_mock, 156 | make_pattern_mock, 157 | ): 158 | client = ShardClient() 159 | client._backend = Mock() 160 | client._backend.key_prefix = "" 161 | 162 | connection = Mock() 163 | connection.scan_iter.return_value = [] 164 | client._serverdict = {"test": connection} 165 | 166 | client.delete_pattern(pattern="foo*", itersize=10) 167 | 168 | connection.scan_iter.assert_called_once_with( 169 | count=10, 170 | match=make_pattern_mock.return_value, 171 | ) 172 | 173 | @patch("test_client.DefaultClient.make_pattern") 174 | @patch("test_client.ShardClient.__init__", return_value=None) 175 | def test_delete_pattern_calls_scan_iter(self, init_mock, make_pattern_mock): 176 | client = ShardClient() 177 | client._backend = Mock() 178 | client._backend.key_prefix = "" 179 | connection = Mock() 180 | connection.scan_iter.return_value = [] 181 | client._serverdict = {"test": connection} 182 | 183 | client.delete_pattern(pattern="foo*") 184 | 185 | connection.scan_iter.assert_called_once_with( 186 | match=make_pattern_mock.return_value, 187 | ) 188 | 189 | @patch("test_client.DefaultClient.make_pattern") 190 | @patch("test_client.ShardClient.__init__", return_value=None) 191 | def test_delete_pattern_calls_delete_for_given_keys( 192 | self, 193 | init_mock, 194 | make_pattern_mock, 195 | ): 196 | client = ShardClient() 197 | client._backend = Mock() 198 | client._backend.key_prefix = "" 199 | connection = Mock() 200 | connection.scan_iter.return_value = [Mock(), Mock()] 201 | connection.delete.return_value = 0 202 | client._serverdict = {"test": connection} 203 | 204 | client.delete_pattern(pattern="foo*") 205 | 206 | connection.delete.assert_called_once_with(*connection.scan_iter.return_value) 207 | -------------------------------------------------------------------------------- /django_redis/pool.py: -------------------------------------------------------------------------------- 1 | from urllib.parse import parse_qs, urlencode, urlparse, urlunparse 2 | 3 | from django.conf import settings 4 | from django.core.exceptions import ImproperlyConfigured 5 | from django.utils.module_loading import import_string 6 | from redis import Redis 7 | from redis.connection import ConnectionPool, DefaultParser, to_bool 8 | from redis.sentinel import Sentinel 9 | 10 | 11 | class ConnectionFactory: 12 | # Store connection pool by cache backend options. 13 | # 14 | # _pools is a process-global, as otherwise _pools is cleared every time 15 | # ConnectionFactory is instantiated, as Django creates new cache client 16 | # (DefaultClient) instance for every request. 17 | 18 | _pools: dict[str, ConnectionPool] = {} 19 | 20 | def __init__(self, options): 21 | pool_cls_path = options.get( 22 | "CONNECTION_POOL_CLASS", 23 | "redis.connection.ConnectionPool", 24 | ) 25 | self.pool_cls = import_string(pool_cls_path) 26 | self.pool_cls_kwargs = options.get("CONNECTION_POOL_KWARGS", {}) 27 | 28 | redis_client_cls_path = options.get("REDIS_CLIENT_CLASS", "redis.client.Redis") 29 | self.redis_client_cls = import_string(redis_client_cls_path) 30 | self.redis_client_cls_kwargs = options.get("REDIS_CLIENT_KWARGS", {}) 31 | 32 | self.options = options 33 | 34 | def make_connection_params(self, url): 35 | """ 36 | Given a main connection parameters, build a complete 37 | dict of connection parameters. 38 | """ 39 | 40 | kwargs = { 41 | "url": url, 42 | "parser_class": self.get_parser_cls(), 43 | } 44 | 45 | password = self.options.get("PASSWORD", None) 46 | if password: 47 | kwargs["password"] = password 48 | 49 | socket_timeout = self.options.get("SOCKET_TIMEOUT", None) 50 | if socket_timeout: 51 | if not isinstance(socket_timeout, (int, float)): 52 | error_message = "Socket timeout should be float or integer" 53 | raise ImproperlyConfigured(error_message) 54 | kwargs["socket_timeout"] = socket_timeout 55 | 56 | socket_connect_timeout = self.options.get("SOCKET_CONNECT_TIMEOUT", None) 57 | if socket_connect_timeout: 58 | if not isinstance(socket_connect_timeout, (int, float)): 59 | error_message = "Socket connect timeout should be float or integer" 60 | raise ImproperlyConfigured(error_message) 61 | kwargs["socket_connect_timeout"] = socket_connect_timeout 62 | 63 | return kwargs 64 | 65 | def connect(self, url: str) -> Redis: 66 | """ 67 | Given a basic connection parameters, 68 | return a new connection. 69 | """ 70 | params = self.make_connection_params(url) 71 | return self.get_connection(params) 72 | 73 | def disconnect(self, connection: Redis) -> None: 74 | """ 75 | Given a not null client connection it disconnect from the Redis server. 76 | 77 | The default implementation uses a pool to hold connections. 78 | """ 79 | connection.connection_pool.disconnect() 80 | 81 | def get_connection(self, params): 82 | """ 83 | Given a now preformatted params, return a 84 | new connection. 85 | 86 | The default implementation uses a cached pools 87 | for create new connection. 88 | """ 89 | pool = self.get_or_create_connection_pool(params) 90 | return self.redis_client_cls( 91 | connection_pool=pool, 92 | **self.redis_client_cls_kwargs, 93 | ) 94 | 95 | def get_parser_cls(self): 96 | cls = self.options.get("PARSER_CLASS", None) 97 | if cls is None: 98 | return DefaultParser 99 | return import_string(cls) 100 | 101 | def get_or_create_connection_pool(self, params): 102 | """ 103 | Given a connection parameters and return a new 104 | or cached connection pool for them. 105 | 106 | Reimplement this method if you want distinct 107 | connection pool instance caching behavior. 108 | """ 109 | key = params["url"] 110 | if key not in self._pools: 111 | self._pools[key] = self.get_connection_pool(params) 112 | return self._pools[key] 113 | 114 | def get_connection_pool(self, params): 115 | """ 116 | Given a connection parameters, return a new 117 | connection pool for them. 118 | 119 | Overwrite this method if you want a custom 120 | behavior on creating connection pool. 121 | """ 122 | cp_params = dict(params) 123 | cp_params.update(self.pool_cls_kwargs) 124 | pool = self.pool_cls.from_url(**cp_params) 125 | 126 | if pool.connection_kwargs.get("password", None) is None: 127 | pool.connection_kwargs["password"] = params.get("password", None) 128 | pool.reset() 129 | 130 | return pool 131 | 132 | 133 | class SentinelConnectionFactory(ConnectionFactory): 134 | def __init__(self, options): 135 | # allow overriding the default SentinelConnectionPool class 136 | options.setdefault( 137 | "CONNECTION_POOL_CLASS", 138 | "redis.sentinel.SentinelConnectionPool", 139 | ) 140 | super().__init__(options) 141 | 142 | sentinels = options.get("SENTINELS") 143 | if not sentinels: 144 | error_message = "SENTINELS must be provided as a list of (host, port)." 145 | raise ImproperlyConfigured(error_message) 146 | 147 | # provide the connection pool kwargs to the sentinel in case it 148 | # needs to use the socket options for the sentinels themselves 149 | connection_kwargs = self.make_connection_params(None) 150 | connection_kwargs.pop("url") 151 | connection_kwargs.update(self.pool_cls_kwargs) 152 | self._sentinel = Sentinel( 153 | sentinels, 154 | sentinel_kwargs=options.get("SENTINEL_KWARGS"), 155 | **connection_kwargs, 156 | ) 157 | 158 | def get_connection_pool(self, params): 159 | """ 160 | Given a connection parameters, return a new sentinel connection pool 161 | for them. 162 | """ 163 | url = urlparse(params["url"]) 164 | 165 | # explicitly set service_name and sentinel_manager for the 166 | # SentinelConnectionPool constructor since will be called by from_url 167 | cp_params = dict(params) 168 | # convert "is_master" to a boolean if set on the URL, otherwise if not 169 | # provided it defaults to True. 170 | query_params = parse_qs(url.query) 171 | is_master = query_params.get("is_master") 172 | if is_master: 173 | cp_params["is_master"] = to_bool(is_master[0]) 174 | # then remove the "is_master" query string from the URL 175 | # so it doesn't interfere with the SentinelConnectionPool constructor 176 | if "is_master" in query_params: 177 | del query_params["is_master"] 178 | new_query = urlencode(query_params, doseq=True) 179 | 180 | new_url = urlunparse( 181 | (url.scheme, url.netloc, url.path, url.params, new_query, url.fragment), 182 | ) 183 | 184 | cp_params.update( 185 | service_name=url.hostname, 186 | sentinel_manager=self._sentinel, 187 | url=new_url, 188 | ) 189 | 190 | return super().get_connection_pool(cp_params) 191 | 192 | 193 | def get_connection_factory(path=None, options=None): 194 | if path is None: 195 | path = getattr( 196 | settings, 197 | "DJANGO_REDIS_CONNECTION_FACTORY", 198 | "django_redis.pool.ConnectionFactory", 199 | ) 200 | opt_conn_factory = options.get("CONNECTION_FACTORY") 201 | if opt_conn_factory: 202 | path = opt_conn_factory 203 | 204 | cls = import_string(path) 205 | return cls(options or {}) 206 | -------------------------------------------------------------------------------- /tests/test_backend_sorted_sets.py: -------------------------------------------------------------------------------- 1 | from django_redis.cache import RedisCache 2 | 3 | 4 | class TestSortedSetOperations: 5 | """Tests for sorted set (ZSET) operations.""" 6 | 7 | def test_zadd_basic(self, cache: RedisCache): 8 | """Test adding members to sorted set.""" 9 | result = cache.zadd("scores", {"player1": 100.0, "player2": 200.0}) 10 | assert result == 2 11 | assert cache.zcard("scores") == 2 12 | 13 | def test_zadd_with_nx(self, cache: RedisCache): 14 | """Test zadd with nx flag (only add new).""" 15 | cache.zadd("scores", {"alice": 10.0}) 16 | result = cache.zadd("scores", {"alice": 20.0}, nx=True) 17 | assert result == 0 18 | assert cache.zscore("scores", "alice") == 10.0 19 | 20 | def test_zadd_with_xx(self, cache: RedisCache): 21 | """Test zadd with xx flag (only update existing).""" 22 | cache.zadd("scores", {"bob": 15.0}) 23 | result = cache.zadd("scores", {"bob": 25.0}, xx=True) 24 | assert result == 0 # No new members added 25 | assert cache.zscore("scores", "bob") == 25.0 26 | result = cache.zadd("scores", {"charlie": 30.0}, xx=True) 27 | assert result == 0 28 | assert cache.zscore("scores", "charlie") is None 29 | 30 | def test_zadd_with_ch(self, cache: RedisCache): 31 | """Test zadd with ch flag (return changed count).""" 32 | cache.zadd("scores", {"player1": 100.0}) 33 | result = cache.zadd("scores", {"player1": 150.0, "player2": 200.0}, ch=True) 34 | assert result == 2 # 1 changed + 1 added 35 | 36 | def test_zcard(self, cache: RedisCache): 37 | """Test getting sorted set cardinality.""" 38 | cache.zadd("scores", {"a": 1.0, "b": 2.0, "c": 3.0}) 39 | assert cache.zcard("scores") == 3 40 | assert cache.zcard("nonexistent") == 0 41 | 42 | def test_zcount(self, cache: RedisCache): 43 | """Test counting members in score range.""" 44 | cache.zadd("scores", {"a": 1.0, "b": 2.0, "c": 3.0, "d": 4.0, "e": 5.0}) 45 | assert cache.zcount("scores", 2.0, 4.0) == 3 # b, c, d 46 | assert cache.zcount("scores", "-inf", "+inf") == 5 47 | assert cache.zcount("scores", 10.0, 20.0) == 0 48 | 49 | def test_zincrby(self, cache: RedisCache): 50 | """Test incrementing member score.""" 51 | cache.zadd("scores", {"player1": 100.0}) 52 | new_score = cache.zincrby("scores", 50.0, "player1") 53 | assert new_score == 150.0 54 | assert cache.zscore("scores", "player1") == 150.0 55 | new_score = cache.zincrby("scores", 25.0, "player2") 56 | assert new_score == 25.0 57 | 58 | def test_zpopmax(self, cache: RedisCache): 59 | """Test popping highest scored members.""" 60 | cache.zadd("scores", {"a": 1.0, "b": 2.0, "c": 3.0}) 61 | result = cache.zpopmax("scores") 62 | assert result == ("c", 3.0) 63 | assert cache.zcard("scores") == 2 64 | cache.zadd("scores", {"d": 4.0, "e": 5.0}) 65 | result = cache.zpopmax("scores", count=2) 66 | assert len(result) == 2 67 | assert result[0][0] == "e" and result[0][1] == 5.0 68 | assert result[1][0] == "d" and result[1][1] == 4.0 69 | 70 | def test_zpopmin(self, cache: RedisCache): 71 | """Test popping lowest scored members.""" 72 | cache.zadd("scores", {"a": 1.0, "b": 2.0, "c": 3.0}) 73 | result = cache.zpopmin("scores") 74 | assert result == ("a", 1.0) 75 | assert cache.zcard("scores") == 2 76 | cache.zadd("scores", {"d": 0.5, "e": 0.1}) 77 | result = cache.zpopmin("scores", count=2) 78 | assert len(result) == 2 79 | assert result[0][0] == "e" and result[0][1] == 0.1 80 | assert result[1][0] == "d" and result[1][1] == 0.5 81 | 82 | def test_zrange_basic(self, cache: RedisCache): 83 | """Test getting range of members by index.""" 84 | cache.zadd("scores", {"alice": 10.0, "bob": 20.0, "charlie": 15.0}) 85 | result = cache.zrange("scores", 0, -1) 86 | assert result == ["alice", "charlie", "bob"] 87 | result = cache.zrange("scores", 0, 1) 88 | assert result == ["alice", "charlie"] 89 | 90 | def test_zrange_withscores(self, cache: RedisCache): 91 | """Test zrange with scores.""" 92 | cache.zadd("scores", {"alice": 10.5, "bob": 20.0, "charlie": 15.5}) 93 | result = cache.zrange("scores", 0, -1, withscores=True) 94 | assert result == [("alice", 10.5), ("charlie", 15.5), ("bob", 20.0)] 95 | 96 | def test_zrange_desc(self, cache: RedisCache): 97 | """Test zrange in descending order.""" 98 | cache.zadd("scores", {"a": 1.0, "b": 2.0, "c": 3.0}) 99 | result = cache.zrange("scores", 0, -1, desc=True) 100 | assert result == ["c", "b", "a"] 101 | 102 | def test_zrangebyscore(self, cache: RedisCache): 103 | """Test getting members by score range.""" 104 | cache.zadd("scores", {"a": 1.0, "b": 2.0, "c": 3.0, "d": 4.0, "e": 5.0}) 105 | result = cache.zrangebyscore("scores", 2.0, 4.0) 106 | assert result == ["b", "c", "d"] 107 | result = cache.zrangebyscore("scores", "-inf", 2.0) 108 | assert result == ["a", "b"] 109 | 110 | def test_zrangebyscore_withscores(self, cache: RedisCache): 111 | """Test zrangebyscore with scores.""" 112 | cache.zadd("scores", {"a": 1.0, "b": 2.0, "c": 3.0}) 113 | result = cache.zrangebyscore("scores", 1.0, 2.0, withscores=True) 114 | assert result == [("a", 1.0), ("b", 2.0)] 115 | 116 | def test_zrangebyscore_pagination(self, cache: RedisCache): 117 | """Test zrangebyscore with pagination.""" 118 | cache.zadd("scores", {"a": 1.0, "b": 2.0, "c": 3.0, "d": 4.0, "e": 5.0}) 119 | result = cache.zrangebyscore("scores", "-inf", "+inf", start=1, num=2) 120 | assert len(result) == 2 121 | assert result == ["b", "c"] 122 | 123 | def test_zrank(self, cache: RedisCache): 124 | """Test getting member rank.""" 125 | cache.zadd("scores", {"alice": 10.0, "bob": 20.0, "charlie": 15.0}) 126 | assert cache.zrank("scores", "alice") == 0 # Lowest score 127 | assert cache.zrank("scores", "charlie") == 1 128 | assert cache.zrank("scores", "bob") == 2 129 | assert cache.zrank("scores", "nonexistent") is None 130 | 131 | def test_zrem(self, cache: RedisCache): 132 | """Test removing members from sorted set.""" 133 | cache.zadd("scores", {"a": 1.0, "b": 2.0, "c": 3.0}) 134 | result = cache.zrem("scores", "b") 135 | assert result == 1 136 | assert cache.zcard("scores") == 2 137 | result = cache.zrem("scores", "a", "c") 138 | assert result == 2 139 | assert cache.zcard("scores") == 0 140 | 141 | def test_zremrangebyscore(self, cache: RedisCache): 142 | """Test removing members by score range.""" 143 | cache.zadd("scores", {"a": 1.0, "b": 2.0, "c": 3.0, "d": 4.0, "e": 5.0}) 144 | result = cache.zremrangebyscore("scores", 2.0, 4.0) 145 | assert result == 3 # b, c, d removed 146 | assert cache.zcard("scores") == 2 147 | assert cache.zrange("scores", 0, -1) == ["a", "e"] 148 | 149 | def test_zrevrange(self, cache: RedisCache): 150 | """Test getting reverse range (highest to lowest).""" 151 | cache.zadd("scores", {"a": 1.0, "b": 2.0, "c": 3.0}) 152 | result = cache.zrevrange("scores", 0, -1) 153 | assert result == ["c", "b", "a"] 154 | 155 | def test_zrevrange_withscores(self, cache: RedisCache): 156 | """Test zrevrange with scores.""" 157 | cache.zadd("scores", {"a": 1.0, "b": 2.0, "c": 3.0}) 158 | result = cache.zrevrange("scores", 0, -1, withscores=True) 159 | assert result == [("c", 3.0), ("b", 2.0), ("a", 1.0)] 160 | 161 | def test_zrevrangebyscore(self, cache: RedisCache): 162 | """Test getting reverse range by score.""" 163 | cache.zadd("scores", {"a": 1.0, "b": 2.0, "c": 3.0, "d": 4.0, "e": 5.0}) 164 | result = cache.zrevrangebyscore("scores", 4.0, 2.0) 165 | assert result == ["d", "c", "b"] 166 | 167 | def test_zscore(self, cache: RedisCache): 168 | """Test getting member score.""" 169 | cache.zadd("scores", {"alice": 42.5, "bob": 100.0}) 170 | assert cache.zscore("scores", "alice") == 42.5 171 | assert cache.zscore("scores", "bob") == 100.0 172 | assert cache.zscore("scores", "nonexistent") is None 173 | 174 | def test_sorted_set_serialization(self, cache: RedisCache): 175 | """Test that complex objects serialize correctly as members.""" 176 | cache.zadd("complex", {("tuple", "key"): 1.0, "string": 2.0}) 177 | result = cache.zrange("complex", 0, -1) 178 | assert ("tuple", "key") in result or ["tuple", "key"] in result 179 | assert "string" in result 180 | 181 | def test_sorted_set_version_support(self, cache: RedisCache): 182 | """Test version parameter works correctly.""" 183 | cache.zadd("data", {"v1": 1.0}, version=1) 184 | cache.zadd("data", {"v2": 2.0}, version=2) 185 | 186 | assert cache.zcard("data", version=1) == 1 187 | assert cache.zcard("data", version=2) == 1 188 | assert cache.zrange("data", 0, -1, version=1) == ["v1"] 189 | assert cache.zrange("data", 0, -1, version=2) == ["v2"] 190 | 191 | def test_sorted_set_float_scores(self, cache: RedisCache): 192 | """Test that float scores work correctly.""" 193 | cache.zadd("precise", {"a": 1.1, "b": 1.2, "c": 1.15}) 194 | result = cache.zrange("precise", 0, -1, withscores=True) 195 | assert result[0] == ("a", 1.1) 196 | assert result[1] == ("c", 1.15) 197 | assert result[2] == ("b", 1.2) 198 | 199 | def test_sorted_set_negative_scores(self, cache: RedisCache): 200 | """Test that negative scores work correctly.""" 201 | cache.zadd("temps", {"freezing": -10.0, "cold": 0.0, "warm": 20.0}) 202 | result = cache.zrange("temps", 0, -1) 203 | assert result == ["freezing", "cold", "warm"] 204 | 205 | def test_zpopmin_empty_set(self, cache: RedisCache): 206 | """Test zpopmin on empty sorted set.""" 207 | result = cache.zpopmin("nonexistent") 208 | assert result is None 209 | result = cache.zpopmin("nonexistent", count=5) 210 | assert result == [] 211 | 212 | def test_zpopmax_empty_set(self, cache: RedisCache): 213 | """Test zpopmax on empty sorted set.""" 214 | result = cache.zpopmax("nonexistent") 215 | assert result is None 216 | result = cache.zpopmax("nonexistent", count=5) 217 | assert result == [] 218 | -------------------------------------------------------------------------------- /django_redis/client/mixins/sorted_sets.py: -------------------------------------------------------------------------------- 1 | from typing import Any, Optional, Union 2 | 3 | from redis import Redis 4 | from redis.typing import KeyT 5 | 6 | from django_redis.client.mixins.protocols import ClientProtocol 7 | 8 | 9 | class SortedSetMixin(ClientProtocol): 10 | """Mixin providing Redis sorted set (ZSET) operations.""" 11 | 12 | def zadd( 13 | self, 14 | name: KeyT, 15 | mapping: dict[Any, float], 16 | nx: bool = False, 17 | xx: bool = False, 18 | ch: bool = False, 19 | incr: bool = False, 20 | gt: bool = False, 21 | lt: bool = False, 22 | version: Optional[int] = None, 23 | client: Optional[Redis] = None, 24 | ) -> int: 25 | """Add members with scores to sorted set.""" 26 | if client is None: 27 | client = self.get_client(write=True) 28 | 29 | name = self.make_key(name, version=version) 30 | # Encode members but NOT scores (scores must remain as floats) 31 | encoded_mapping = { 32 | self.encode(member): score for member, score in mapping.items() 33 | } 34 | 35 | return int( 36 | client.zadd( 37 | name, 38 | encoded_mapping, # type: ignore[arg-type] 39 | nx=nx, 40 | xx=xx, 41 | ch=ch, 42 | incr=incr, 43 | gt=gt, 44 | lt=lt, 45 | ), 46 | ) 47 | 48 | def zcard( 49 | self, 50 | name: KeyT, 51 | version: Optional[int] = None, 52 | client: Optional[Redis] = None, 53 | ) -> int: 54 | """Get the number of members in sorted set.""" 55 | if client is None: 56 | client = self.get_client(write=False) 57 | 58 | name = self.make_key(name, version=version) 59 | return int(client.zcard(name)) 60 | 61 | def zcount( 62 | self, 63 | name: KeyT, 64 | min: Union[float, str], 65 | max: Union[float, str], 66 | version: Optional[int] = None, 67 | client: Optional[Redis] = None, 68 | ) -> int: 69 | """Count members in sorted set with scores between min and max.""" 70 | if client is None: 71 | client = self.get_client(write=False) 72 | 73 | name = self.make_key(name, version=version) 74 | return int(client.zcount(name, min, max)) 75 | 76 | def zincrby( 77 | self, 78 | name: KeyT, 79 | amount: float, 80 | value: Any, 81 | version: Optional[int] = None, 82 | client: Optional[Redis] = None, 83 | ) -> float: 84 | """Increment the score of member in sorted set by amount.""" 85 | if client is None: 86 | client = self.get_client(write=True) 87 | 88 | name = self.make_key(name, version=version) 89 | value = self.encode(value) 90 | return float(client.zincrby(name, amount, value)) 91 | 92 | def zpopmax( 93 | self, 94 | name: KeyT, 95 | count: Optional[int] = None, 96 | version: Optional[int] = None, 97 | client: Optional[Redis] = None, 98 | ) -> Union[list[tuple[Any, float]], tuple[Any, float], None]: 99 | """Remove and return members with highest scores.""" 100 | if client is None: 101 | client = self.get_client(write=True) 102 | 103 | name = self.make_key(name, version=version) 104 | result = client.zpopmax(name, count) 105 | 106 | if not result: 107 | return None if count is None else [] 108 | 109 | decoded = [(self.decode(member), score) for member, score in result] 110 | 111 | if count is None: 112 | return decoded[0] if decoded else None 113 | 114 | return decoded 115 | 116 | def zpopmin( 117 | self, 118 | name: KeyT, 119 | count: Optional[int] = None, 120 | version: Optional[int] = None, 121 | client: Optional[Redis] = None, 122 | ) -> Union[list[tuple[Any, float]], tuple[Any, float], None]: 123 | """Remove and return members with lowest scores.""" 124 | if client is None: 125 | client = self.get_client(write=True) 126 | 127 | name = self.make_key(name, version=version) 128 | result = client.zpopmin(name, count) 129 | 130 | if not result: 131 | return None if count is None else [] 132 | 133 | decoded = [(self.decode(member), score) for member, score in result] 134 | 135 | if count is None: 136 | return decoded[0] if decoded else None 137 | 138 | return decoded 139 | 140 | def zrange( 141 | self, 142 | name: KeyT, 143 | start: int, 144 | end: int, 145 | desc: bool = False, 146 | withscores: bool = False, 147 | score_cast_func: type = float, 148 | version: Optional[int] = None, 149 | client: Optional[Redis] = None, 150 | ) -> Union[list[Any], list[tuple[Any, float]]]: 151 | """Return members in sorted set by index range.""" 152 | if client is None: 153 | client = self.get_client(write=False) 154 | 155 | name = self.make_key(name, version=version) 156 | result = client.zrange( 157 | name, 158 | start, 159 | end, 160 | desc=desc, 161 | withscores=withscores, 162 | score_cast_func=score_cast_func, 163 | ) 164 | 165 | if withscores: 166 | return [(self.decode(member), score) for member, score in result] 167 | 168 | return [self.decode(member) for member in result] 169 | 170 | def zrangebyscore( 171 | self, 172 | name: KeyT, 173 | min: Union[float, str], 174 | max: Union[float, str], 175 | start: Optional[int] = None, 176 | num: Optional[int] = None, 177 | withscores: bool = False, 178 | score_cast_func: type = float, 179 | version: Optional[int] = None, 180 | client: Optional[Redis] = None, 181 | ) -> Union[list[Any], list[tuple[Any, float]]]: 182 | """Return members in sorted set by score range.""" 183 | if client is None: 184 | client = self.get_client(write=False) 185 | 186 | name = self.make_key(name, version=version) 187 | result = client.zrangebyscore( 188 | name, 189 | min, 190 | max, 191 | start=start, 192 | num=num, 193 | withscores=withscores, 194 | score_cast_func=score_cast_func, 195 | ) 196 | 197 | if withscores: 198 | return [(self.decode(member), score) for member, score in result] 199 | 200 | return [self.decode(member) for member in result] 201 | 202 | def zrank( 203 | self, 204 | name: KeyT, 205 | value: Any, 206 | version: Optional[int] = None, 207 | client: Optional[Redis] = None, 208 | ) -> Optional[int]: 209 | """Get the rank (index) of member in sorted set, ordered low to high.""" 210 | if client is None: 211 | client = self.get_client(write=False) 212 | 213 | name = self.make_key(name, version=version) 214 | value = self.encode(value) 215 | rank = client.zrank(name, value) 216 | 217 | return int(rank) if rank is not None else None 218 | 219 | def zrem( 220 | self, 221 | name: KeyT, 222 | *values: Any, 223 | version: Optional[int] = None, 224 | client: Optional[Redis] = None, 225 | ) -> int: 226 | """Remove members from sorted set.""" 227 | if client is None: 228 | client = self.get_client(write=True) 229 | 230 | name = self.make_key(name, version=version) 231 | encoded_values = [self.encode(value) for value in values] 232 | return int(client.zrem(name, *encoded_values)) 233 | 234 | def zremrangebyscore( 235 | self, 236 | name: KeyT, 237 | min: Union[float, str], 238 | max: Union[float, str], 239 | version: Optional[int] = None, 240 | client: Optional[Redis] = None, 241 | ) -> int: 242 | """Remove members from sorted set with scores between min and max.""" 243 | if client is None: 244 | client = self.get_client(write=True) 245 | 246 | name = self.make_key(name, version=version) 247 | return int(client.zremrangebyscore(name, min, max)) 248 | 249 | def zrevrange( 250 | self, 251 | name: KeyT, 252 | start: int, 253 | end: int, 254 | withscores: bool = False, 255 | score_cast_func: type = float, 256 | version: Optional[int] = None, 257 | client: Optional[Redis] = None, 258 | ) -> Union[list[Any], list[tuple[Any, float]]]: 259 | """Return members in sorted set by index range, ordered high to low.""" 260 | if client is None: 261 | client = self.get_client(write=False) 262 | 263 | name = self.make_key(name, version=version) 264 | result = client.zrevrange( 265 | name, 266 | start, 267 | end, 268 | withscores=withscores, 269 | score_cast_func=score_cast_func, 270 | ) 271 | 272 | if withscores: 273 | return [(self.decode(member), score) for member, score in result] 274 | 275 | return [self.decode(member) for member in result] 276 | 277 | def zrevrangebyscore( 278 | self, 279 | name: KeyT, 280 | max: Union[float, str], 281 | min: Union[float, str], 282 | start: Optional[int] = None, 283 | num: Optional[int] = None, 284 | withscores: bool = False, 285 | score_cast_func: type = float, 286 | version: Optional[int] = None, 287 | client: Optional[Redis] = None, 288 | ) -> Union[list[Any], list[tuple[Any, float]]]: 289 | """Return members in sorted set by score range, ordered high to low.""" 290 | if client is None: 291 | client = self.get_client(write=False) 292 | 293 | name = self.make_key(name, version=version) 294 | result = client.zrevrangebyscore( 295 | name, 296 | max, 297 | min, 298 | start=start, 299 | num=num, 300 | withscores=withscores, 301 | score_cast_func=score_cast_func, 302 | ) 303 | 304 | if withscores: 305 | return [(self.decode(member), score) for member, score in result] 306 | 307 | return [self.decode(member) for member in result] 308 | 309 | def zscore( 310 | self, 311 | name: KeyT, 312 | value: Any, 313 | version: Optional[int] = None, 314 | client: Optional[Redis] = None, 315 | ) -> Optional[float]: 316 | """Get the score of member in sorted set.""" 317 | if client is None: 318 | client = self.get_client(write=False) 319 | 320 | name = self.make_key(name, version=version) 321 | value = self.encode(value) 322 | score = client.zscore(name, value) 323 | 324 | return float(score) if score is not None else None 325 | -------------------------------------------------------------------------------- /django_redis/cache.py: -------------------------------------------------------------------------------- 1 | import functools 2 | import logging 3 | from typing import Any, Callable, Optional 4 | 5 | from django import VERSION as DJANGO_VERSION 6 | from django.conf import settings 7 | from django.core.cache.backends.base import BaseCache 8 | from django.utils.module_loading import import_string 9 | 10 | from django_redis.exceptions import ConnectionInterrupted 11 | 12 | CONNECTION_INTERRUPTED = object() 13 | 14 | 15 | def omit_exception( 16 | method: Optional[Callable] = None, 17 | return_value: Optional[Any] = None, 18 | ): 19 | """ 20 | Simple decorator that intercepts connection 21 | errors and ignores these if settings specify this. 22 | """ 23 | 24 | if method is None: 25 | return functools.partial(omit_exception, return_value=return_value) 26 | 27 | @functools.wraps(method) 28 | def _decorator(self, *args, **kwargs): 29 | try: 30 | return method(self, *args, **kwargs) 31 | except ConnectionInterrupted as e: 32 | if self._ignore_exceptions: 33 | if self._log_ignored_exceptions: 34 | self.logger.exception("Exception ignored") 35 | 36 | return return_value 37 | raise e.__cause__ # noqa: B904 38 | 39 | return _decorator 40 | 41 | 42 | class RedisCache(BaseCache): 43 | def __init__(self, server: str, params: dict[str, Any]) -> None: 44 | super().__init__(params) 45 | self._server = server 46 | self._params = params 47 | self._default_scan_itersize = getattr( 48 | settings, 49 | "DJANGO_REDIS_SCAN_ITERSIZE", 50 | 10, 51 | ) 52 | 53 | options = params.get("OPTIONS", {}) 54 | self._client_cls = options.get( 55 | "CLIENT_CLASS", 56 | "django_redis.client.DefaultClient", 57 | ) 58 | self._client_cls = import_string(self._client_cls) 59 | self._client = None 60 | 61 | self._ignore_exceptions = options.get( 62 | "IGNORE_EXCEPTIONS", 63 | getattr(settings, "DJANGO_REDIS_IGNORE_EXCEPTIONS", False), 64 | ) 65 | self._log_ignored_exceptions = getattr( 66 | settings, 67 | "DJANGO_REDIS_LOG_IGNORED_EXCEPTIONS", 68 | False, 69 | ) 70 | self.logger = ( 71 | logging.getLogger(getattr(settings, "DJANGO_REDIS_LOGGER", __name__)) 72 | if self._log_ignored_exceptions 73 | else None 74 | ) 75 | 76 | @property 77 | def client(self): 78 | """ 79 | Lazy client connection property. 80 | """ 81 | if self._client is None: 82 | self._client = self._client_cls(self._server, self._params, self) 83 | return self._client 84 | 85 | @omit_exception 86 | def set(self, *args, **kwargs): 87 | return self.client.set(*args, **kwargs) 88 | 89 | @omit_exception 90 | def incr_version(self, *args, **kwargs): 91 | return self.client.incr_version(*args, **kwargs) 92 | 93 | @omit_exception 94 | def add(self, *args, **kwargs): 95 | return self.client.add(*args, **kwargs) 96 | 97 | def get(self, key, default=None, version=None, client=None): 98 | value = self._get(key, default, version, client) 99 | if value is CONNECTION_INTERRUPTED: 100 | value = default 101 | return value 102 | 103 | @omit_exception(return_value=CONNECTION_INTERRUPTED) 104 | def _get(self, key, default, version, client): 105 | return self.client.get(key, default=default, version=version, client=client) 106 | 107 | @omit_exception 108 | def delete(self, *args, **kwargs): 109 | """returns a boolean instead of int since django version 3.1""" 110 | result = self.client.delete(*args, **kwargs) 111 | return bool(result) if DJANGO_VERSION >= (3, 1, 0) else result 112 | 113 | @omit_exception 114 | def delete_pattern(self, *args, **kwargs): 115 | kwargs.setdefault("itersize", self._default_scan_itersize) 116 | return self.client.delete_pattern(*args, **kwargs) 117 | 118 | @omit_exception 119 | def delete_many(self, *args, **kwargs): 120 | return self.client.delete_many(*args, **kwargs) 121 | 122 | @omit_exception 123 | def clear(self): 124 | return self.client.clear() 125 | 126 | @omit_exception(return_value={}) 127 | def get_many(self, *args, **kwargs): 128 | return self.client.get_many(*args, **kwargs) 129 | 130 | @omit_exception 131 | def set_many(self, *args, **kwargs): 132 | return self.client.set_many(*args, **kwargs) 133 | 134 | @omit_exception 135 | def incr(self, *args, **kwargs): 136 | return self.client.incr(*args, **kwargs) 137 | 138 | @omit_exception 139 | def decr(self, *args, **kwargs): 140 | return self.client.decr(*args, **kwargs) 141 | 142 | @omit_exception 143 | def has_key(self, *args, **kwargs): 144 | return self.client.has_key(*args, **kwargs) 145 | 146 | @omit_exception 147 | def keys(self, *args, **kwargs): 148 | return self.client.keys(*args, **kwargs) 149 | 150 | @omit_exception 151 | def iter_keys(self, *args, **kwargs): 152 | return self.client.iter_keys(*args, **kwargs) 153 | 154 | @omit_exception 155 | def ttl(self, *args, **kwargs): 156 | return self.client.ttl(*args, **kwargs) 157 | 158 | @omit_exception 159 | def pttl(self, *args, **kwargs): 160 | return self.client.pttl(*args, **kwargs) 161 | 162 | @omit_exception 163 | def persist(self, *args, **kwargs): 164 | return self.client.persist(*args, **kwargs) 165 | 166 | @omit_exception 167 | def expire(self, *args, **kwargs): 168 | return self.client.expire(*args, **kwargs) 169 | 170 | @omit_exception 171 | def expire_at(self, *args, **kwargs): 172 | return self.client.expire_at(*args, **kwargs) 173 | 174 | @omit_exception 175 | def pexpire(self, *args, **kwargs): 176 | return self.client.pexpire(*args, **kwargs) 177 | 178 | @omit_exception 179 | def pexpire_at(self, *args, **kwargs): 180 | return self.client.pexpire_at(*args, **kwargs) 181 | 182 | @omit_exception 183 | def lock(self, *args, **kwargs): 184 | return self.client.lock(*args, **kwargs) 185 | 186 | @omit_exception 187 | def close(self, **kwargs): 188 | self.client.close(**kwargs) 189 | 190 | @omit_exception 191 | def touch(self, *args, **kwargs): 192 | return self.client.touch(*args, **kwargs) 193 | 194 | @omit_exception 195 | def sadd(self, *args, **kwargs): 196 | return self.client.sadd(*args, **kwargs) 197 | 198 | @omit_exception 199 | def scard(self, *args, **kwargs): 200 | return self.client.scard(*args, **kwargs) 201 | 202 | @omit_exception 203 | def sdiff(self, *args, **kwargs): 204 | return self.client.sdiff(*args, **kwargs) 205 | 206 | @omit_exception 207 | def sdiffstore(self, *args, **kwargs): 208 | return self.client.sdiffstore(*args, **kwargs) 209 | 210 | @omit_exception 211 | def sinter(self, *args, **kwargs): 212 | return self.client.sinter(*args, **kwargs) 213 | 214 | @omit_exception 215 | def sinterstore(self, *args, **kwargs): 216 | return self.client.sinterstore(*args, **kwargs) 217 | 218 | @omit_exception 219 | def sismember(self, *args, **kwargs): 220 | return self.client.sismember(*args, **kwargs) 221 | 222 | @omit_exception 223 | def smembers(self, *args, **kwargs): 224 | return self.client.smembers(*args, **kwargs) 225 | 226 | @omit_exception 227 | def smove(self, *args, **kwargs): 228 | return self.client.smove(*args, **kwargs) 229 | 230 | @omit_exception 231 | def spop(self, *args, **kwargs): 232 | return self.client.spop(*args, **kwargs) 233 | 234 | @omit_exception 235 | def srandmember(self, *args, **kwargs): 236 | return self.client.srandmember(*args, **kwargs) 237 | 238 | @omit_exception 239 | def srem(self, *args, **kwargs): 240 | return self.client.srem(*args, **kwargs) 241 | 242 | @omit_exception 243 | def sscan(self, *args, **kwargs): 244 | return self.client.sscan(*args, **kwargs) 245 | 246 | @omit_exception 247 | def sscan_iter(self, *args, **kwargs): 248 | return self.client.sscan_iter(*args, **kwargs) 249 | 250 | @omit_exception 251 | def smismember(self, *args, **kwargs): 252 | return self.client.smismember(*args, **kwargs) 253 | 254 | @omit_exception 255 | def sunion(self, *args, **kwargs): 256 | return self.client.sunion(*args, **kwargs) 257 | 258 | @omit_exception 259 | def sunionstore(self, *args, **kwargs): 260 | return self.client.sunionstore(*args, **kwargs) 261 | 262 | @omit_exception 263 | def hset(self, *args, **kwargs): 264 | return self.client.hset(*args, **kwargs) 265 | 266 | @omit_exception 267 | def hdel(self, *args, **kwargs): 268 | return self.client.hdel(*args, **kwargs) 269 | 270 | @omit_exception 271 | def hlen(self, *args, **kwargs): 272 | return self.client.hlen(*args, **kwargs) 273 | 274 | @omit_exception 275 | def hkeys(self, *args, **kwargs): 276 | return self.client.hkeys(*args, **kwargs) 277 | 278 | @omit_exception 279 | def hexists(self, *args, **kwargs): 280 | return self.client.hexists(*args, **kwargs) 281 | 282 | # Sorted Set Operations 283 | @omit_exception 284 | def zadd(self, *args, **kwargs): 285 | return self.client.zadd(*args, **kwargs) 286 | 287 | @omit_exception 288 | def zcard(self, *args, **kwargs): 289 | return self.client.zcard(*args, **kwargs) 290 | 291 | @omit_exception 292 | def zcount(self, *args, **kwargs): 293 | return self.client.zcount(*args, **kwargs) 294 | 295 | @omit_exception 296 | def zincrby(self, *args, **kwargs): 297 | return self.client.zincrby(*args, **kwargs) 298 | 299 | @omit_exception 300 | def zpopmax(self, *args, **kwargs): 301 | return self.client.zpopmax(*args, **kwargs) 302 | 303 | @omit_exception 304 | def zpopmin(self, *args, **kwargs): 305 | return self.client.zpopmin(*args, **kwargs) 306 | 307 | @omit_exception 308 | def zrange(self, *args, **kwargs): 309 | return self.client.zrange(*args, **kwargs) 310 | 311 | @omit_exception 312 | def zrangebyscore(self, *args, **kwargs): 313 | return self.client.zrangebyscore(*args, **kwargs) 314 | 315 | @omit_exception 316 | def zrank(self, *args, **kwargs): 317 | return self.client.zrank(*args, **kwargs) 318 | 319 | @omit_exception 320 | def zrem(self, *args, **kwargs): 321 | return self.client.zrem(*args, **kwargs) 322 | 323 | @omit_exception 324 | def zremrangebyscore(self, *args, **kwargs): 325 | return self.client.zremrangebyscore(*args, **kwargs) 326 | 327 | @omit_exception 328 | def zrevrange(self, *args, **kwargs): 329 | return self.client.zrevrange(*args, **kwargs) 330 | 331 | @omit_exception 332 | def zrevrangebyscore(self, *args, **kwargs): 333 | return self.client.zrevrangebyscore(*args, **kwargs) 334 | 335 | @omit_exception 336 | def zscore(self, *args, **kwargs): 337 | return self.client.zscore(*args, **kwargs) 338 | -------------------------------------------------------------------------------- /tests/test_session.py: -------------------------------------------------------------------------------- 1 | import base64 2 | from collections import Counter 3 | from collections.abc import Iterable 4 | from datetime import timedelta 5 | 6 | import django 7 | import pytest 8 | from django.contrib.sessions.backends.cache import SessionStore 9 | from django.test import override_settings 10 | from django.utils import timezone 11 | 12 | 13 | @pytest.fixture 14 | def session(cache) -> Iterable[SessionStore]: 15 | s = SessionStore() 16 | 17 | yield s 18 | 19 | s.delete() 20 | 21 | 22 | def test_new_session(session): 23 | assert session.modified is False 24 | assert session.accessed is False 25 | 26 | 27 | def test_get_empty(session): 28 | assert session.get("cat") is None 29 | 30 | 31 | def test_store(session): 32 | session["cat"] = "dog" 33 | assert session.modified is True 34 | assert session.pop("cat") == "dog" 35 | 36 | 37 | def test_pop(session): 38 | session["some key"] = "exists" 39 | # Need to reset these to pretend we haven't accessed it: 40 | session.accessed = False 41 | session.modified = False 42 | 43 | assert session.pop("some key") == "exists" 44 | assert session.accessed is True 45 | assert session.modified is True 46 | assert session.get("some key") is None 47 | 48 | 49 | def test_pop_default(session): 50 | assert session.pop("some key", "does not exist") == "does not exist" 51 | assert session.accessed is True 52 | assert session.modified is False 53 | 54 | 55 | def test_pop_default_named_argument(session): 56 | assert session.pop("some key", default="does not exist") == "does not exist" 57 | assert session.accessed is True 58 | assert session.modified is False 59 | 60 | 61 | def test_pop_no_default_keyerror_raised(session): 62 | with pytest.raises(KeyError): 63 | session.pop("some key") 64 | 65 | 66 | def test_setdefault(session): 67 | assert session.setdefault("foo", "bar") == "bar" 68 | assert session.setdefault("foo", "baz") == "bar" 69 | assert session.accessed is True 70 | assert session.modified is True 71 | 72 | 73 | def test_update(session): 74 | session.update({"update key": 1}) 75 | assert session.accessed is True 76 | assert session.modified is True 77 | assert session.get("update key") == 1 78 | 79 | 80 | def test_has_key(session): 81 | session["some key"] = 1 82 | session.modified = False 83 | session.accessed = False 84 | assert "some key" in session 85 | assert session.accessed is True 86 | assert session.modified is False 87 | 88 | 89 | def test_values(session): 90 | assert list(session.values()) == [] 91 | assert session.accessed is True 92 | session["some key"] = 1 93 | session.modified = False 94 | session.accessed = False 95 | assert list(session.values()) == [1] 96 | assert session.accessed is True 97 | assert session.modified is False 98 | 99 | 100 | def test_keys(session): 101 | session["x"] = 1 102 | session.modified = False 103 | session.accessed = False 104 | assert list(session.keys()) == ["x"] 105 | assert session.accessed is True 106 | assert session.modified is False 107 | 108 | 109 | def test_items(session): 110 | session["x"] = 1 111 | session.modified = False 112 | session.accessed = False 113 | assert list(session.items()) == [("x", 1)] 114 | assert session.accessed is True 115 | assert session.modified is False 116 | 117 | 118 | def test_clear(session): 119 | session["x"] = 1 120 | session.modified = False 121 | session.accessed = False 122 | assert list(session.items()) == [("x", 1)] 123 | session.clear() 124 | assert list(session.items()) == [] 125 | assert session.accessed is True 126 | assert session.modified is True 127 | 128 | 129 | def test_save(session): 130 | session.save() 131 | assert session.exists(session.session_key) is True 132 | 133 | 134 | def test_delete(session): 135 | session.save() 136 | session.delete(session.session_key) 137 | assert session.exists(session.session_key) is False 138 | 139 | 140 | def test_flush(session): 141 | session["foo"] = "bar" 142 | session.save() 143 | prev_key = session.session_key 144 | session.flush() 145 | assert session.exists(prev_key) is False 146 | assert session.session_key != prev_key 147 | assert session.session_key is None 148 | assert session.modified is True 149 | assert session.accessed is True 150 | 151 | 152 | def test_cycle(session): 153 | session["a"], session["b"] = "c", "d" 154 | session.save() 155 | prev_key = session.session_key 156 | prev_data = list(session.items()) 157 | session.cycle_key() 158 | assert session.exists(prev_key) is False 159 | assert session.session_key != prev_key 160 | assert list(session.items()) == prev_data 161 | 162 | 163 | def test_cycle_with_no_session_cache(session): 164 | session["a"], session["b"] = "c", "d" 165 | session.save() 166 | prev_data = session.items() 167 | session = SessionStore(session.session_key) 168 | assert hasattr(session, "_session_cache") is False 169 | session.cycle_key() 170 | assert Counter(session.items()) == Counter(prev_data) 171 | 172 | 173 | def test_save_doesnt_clear_data(session): 174 | session["a"] = "b" 175 | session.save() 176 | assert session["a"] == "b" 177 | 178 | 179 | def test_invalid_key(session): 180 | # Submitting an invalid session key (either by guessing, or if the db has 181 | # removed the key) results in a new key being generated. 182 | try: 183 | session = SessionStore("1") 184 | session.save() 185 | assert session.session_key != "1" 186 | assert session.get("cat") is None 187 | session.delete() 188 | finally: 189 | # Some backends leave a stale cache entry for the invalid 190 | # session key; make sure that entry is manually deleted 191 | session.delete("1") 192 | 193 | 194 | def test_session_key_empty_string_invalid(session): 195 | """Falsey values (Such as an empty string) are rejected.""" 196 | session._session_key = "" 197 | assert session.session_key is None 198 | 199 | 200 | def test_session_key_too_short_invalid(session): 201 | """Strings shorter than 8 characters are rejected.""" 202 | session._session_key = "1234567" 203 | assert session.session_key is None 204 | 205 | 206 | def test_session_key_valid_string_saved(session): 207 | """Strings of length 8 and up are accepted and stored.""" 208 | session._session_key = "12345678" 209 | assert session.session_key == "12345678" 210 | 211 | 212 | def test_session_key_is_read_only(session): 213 | def set_session_key(s): 214 | s.session_key = s._get_new_session_key() 215 | 216 | with pytest.raises(AttributeError): 217 | set_session_key(session) 218 | 219 | 220 | # Custom session expiry 221 | def test_default_expiry(session, settings): 222 | # A normal session has a max age equal to settings 223 | assert session.get_expiry_age() == settings.SESSION_COOKIE_AGE 224 | 225 | # So does a custom session with an idle expiration time of 0 (but it'll 226 | # expire at browser close) 227 | session.set_expiry(0) 228 | assert session.get_expiry_age() == settings.SESSION_COOKIE_AGE 229 | 230 | 231 | def test_custom_expiry_seconds(session): 232 | modification = timezone.now() 233 | 234 | session.set_expiry(10) 235 | 236 | date = session.get_expiry_date(modification=modification) 237 | assert date == modification + timedelta(seconds=10) 238 | 239 | age = session.get_expiry_age(modification=modification) 240 | assert age == 10 241 | 242 | 243 | def test_custom_expiry_timedelta(session): 244 | modification = timezone.now() 245 | 246 | # Mock timezone.now, because set_expiry calls it on this code path. 247 | original_now = timezone.now 248 | try: 249 | timezone.now = lambda: modification 250 | session.set_expiry(timedelta(seconds=10)) 251 | finally: 252 | timezone.now = original_now 253 | 254 | date = session.get_expiry_date(modification=modification) 255 | assert date == modification + timedelta(seconds=10) 256 | 257 | age = session.get_expiry_age(modification=modification) 258 | assert age == 10 259 | 260 | 261 | def test_custom_expiry_datetime(session): 262 | modification = timezone.now() 263 | 264 | session.set_expiry(modification + timedelta(seconds=10)) 265 | 266 | date = session.get_expiry_date(modification=modification) 267 | assert date == modification + timedelta(seconds=10) 268 | 269 | age = session.get_expiry_age(modification=modification) 270 | assert age == 10 271 | 272 | 273 | def test_custom_expiry_reset(session, settings): 274 | session.set_expiry(None) 275 | session.set_expiry(10) 276 | session.set_expiry(None) 277 | assert session.get_expiry_age() == settings.SESSION_COOKIE_AGE 278 | 279 | 280 | def test_get_expire_at_browser_close(session): 281 | # Tests get_expire_at_browser_close with different settings and different 282 | # set_expiry calls 283 | with override_settings(SESSION_EXPIRE_AT_BROWSER_CLOSE=False): 284 | session.set_expiry(10) 285 | assert session.get_expire_at_browser_close() is False 286 | 287 | session.set_expiry(0) 288 | assert session.get_expire_at_browser_close() is True 289 | 290 | session.set_expiry(None) 291 | assert session.get_expire_at_browser_close() is False 292 | 293 | with override_settings(SESSION_EXPIRE_AT_BROWSER_CLOSE=True): 294 | session.set_expiry(10) 295 | assert session.get_expire_at_browser_close() is False 296 | 297 | session.set_expiry(0) 298 | assert session.get_expire_at_browser_close() is True 299 | 300 | session.set_expiry(None) 301 | assert session.get_expire_at_browser_close() is True 302 | 303 | 304 | def test_decode(session): 305 | # Ensure we can decode what we encode 306 | data = {"a test key": "a test value"} 307 | encoded = session.encode(data) 308 | assert session.decode(encoded) == data 309 | 310 | 311 | def test_decode_failure_logged_to_security(session, caplog): 312 | bad_encode = base64.b64encode(b"flaskdj:alkdjf").decode("ascii") 313 | # with self.assertLogs("django.security.SuspiciousSession", "WARNING") as cm: 314 | assert session.decode(bad_encode) == {} 315 | assert ( 316 | "django.security.SuspiciousSession", 317 | 30, 318 | "Session data corrupted", 319 | ) in caplog.record_tuples 320 | 321 | 322 | @pytest.mark.skipif( 323 | django.VERSION >= (4, 2), 324 | reason="PickleSerializer is removed as of https://code.djangoproject.com/ticket/29708", 325 | ) 326 | def test_actual_expiry(session): 327 | # this doesn't work with JSONSerializer (serializing timedelta) 328 | with override_settings( 329 | SESSION_SERIALIZER="django.contrib.sessions.serializers.PickleSerializer", 330 | ): 331 | session = SessionStore() # reinitialize after overriding settings 332 | 333 | # Regression test for #19200 334 | old_session_key = None 335 | new_session_key = None 336 | try: 337 | session["foo"] = "bar" 338 | session.set_expiry(-timedelta(seconds=10)) 339 | session.save() 340 | old_session_key = session.session_key 341 | # With an expiry date in the past, the session expires instantly. 342 | new_session = SessionStore(session.session_key) 343 | new_session_key = new_session.session_key 344 | assert "foo" not in new_session 345 | finally: 346 | session.delete(old_session_key) 347 | session.delete(new_session_key) 348 | 349 | 350 | def test_session_load_does_not_create_record(session): 351 | """ 352 | Loading an unknown session key does not create a session record. 353 | Creating session records on load is a DOS vulnerability. 354 | """ 355 | session = SessionStore("someunknownkey") 356 | session.load() 357 | 358 | assert session.session_key is None 359 | assert session.exists(session.session_key) is False 360 | # provided unknown key was cycled, not reused 361 | assert session.session_key != "someunknownkey" 362 | 363 | 364 | def test_session_save_does_not_resurrect_session_logged_out_in_other_context(session): 365 | """ 366 | Sessions shouldn't be resurrected by a concurrent request. 367 | """ 368 | from django.contrib.sessions.backends.base import UpdateError 369 | 370 | # Create new session. 371 | s1 = SessionStore() 372 | s1["test_data"] = "value1" 373 | s1.save(must_create=True) 374 | 375 | # Logout in another context. 376 | s2 = SessionStore(s1.session_key) 377 | s2.delete() 378 | 379 | # Modify session in first context. 380 | s1["test_data"] = "value2" 381 | with pytest.raises(UpdateError): 382 | # This should throw an exception as the session is deleted, not 383 | # resurrect the session. 384 | s1.save() 385 | 386 | assert s1.load() == {} 387 | -------------------------------------------------------------------------------- /django_redis/client/sharded.py: -------------------------------------------------------------------------------- 1 | import builtins 2 | import re 3 | from collections import OrderedDict 4 | from collections.abc import Iterator 5 | from datetime import datetime 6 | from typing import Any, Optional, Union 7 | 8 | from redis import Redis 9 | from redis.exceptions import ConnectionError as RedisConnectionError 10 | from redis.typing import KeyT 11 | 12 | from django_redis.client.default import DEFAULT_TIMEOUT, DefaultClient 13 | from django_redis.exceptions import ConnectionInterrupted 14 | from django_redis.hash_ring import HashRing 15 | from django_redis.util import CacheKey 16 | 17 | 18 | class ShardClient(DefaultClient): 19 | _findhash = re.compile(r".*\{(.*)\}.*", re.I) 20 | 21 | def __init__(self, *args, **kwargs): 22 | super().__init__(*args, **kwargs) 23 | 24 | if not isinstance(self._server, (list, tuple)): 25 | self._server = [self._server] 26 | 27 | self._ring = HashRing(self._server) 28 | self._serverdict = self.connect() 29 | 30 | def get_client(self, *args, **kwargs): 31 | raise NotImplementedError 32 | 33 | def connect(self, index=0): 34 | connection_dict = {} 35 | for name in self._server: 36 | connection_dict[name] = self.connection_factory.connect(name) 37 | return connection_dict 38 | 39 | def get_server_name(self, _key): 40 | key = str(_key) 41 | g = self._findhash.match(key) 42 | if g is not None and len(g.groups()) > 0: 43 | key = g.groups()[0] 44 | return self._ring.get_node(key) 45 | 46 | def get_server(self, key): 47 | name = self.get_server_name(key) 48 | return self._serverdict[name] 49 | 50 | def add(self, key, value, timeout=DEFAULT_TIMEOUT, version=None, client=None): 51 | if client is None: 52 | key = self.make_key(key, version=version) 53 | client = self.get_server(key) 54 | 55 | return super().add( 56 | key=key, 57 | value=value, 58 | version=version, 59 | client=client, 60 | timeout=timeout, 61 | ) 62 | 63 | def get(self, key, default=None, version=None, client=None): 64 | if client is None: 65 | key = self.make_key(key, version=version) 66 | client = self.get_server(key) 67 | 68 | return super().get(key=key, default=default, version=version, client=client) 69 | 70 | def get_many(self, keys, version=None): 71 | if not keys: 72 | return {} 73 | 74 | recovered_data = OrderedDict() 75 | 76 | new_keys = [self.make_key(key, version=version) for key in keys] 77 | map_keys = dict(zip(new_keys, keys)) 78 | 79 | for key in new_keys: 80 | client = self.get_server(key) 81 | value = self.get(key=key, version=version, client=client) 82 | 83 | if value is None: 84 | continue 85 | 86 | recovered_data[map_keys[key]] = value 87 | return recovered_data 88 | 89 | def set( 90 | self, 91 | key, 92 | value, 93 | timeout=DEFAULT_TIMEOUT, 94 | version=None, 95 | client=None, 96 | nx=False, 97 | xx=False, 98 | ): 99 | """ 100 | Persist a value to the cache, and set an optional expiration time. 101 | """ 102 | if client is None: 103 | key = self.make_key(key, version=version) 104 | client = self.get_server(key) 105 | 106 | return super().set( 107 | key=key, 108 | value=value, 109 | timeout=timeout, 110 | version=version, 111 | client=client, 112 | nx=nx, 113 | xx=xx, 114 | ) 115 | 116 | def set_many(self, data, timeout=DEFAULT_TIMEOUT, version=None, client=None): 117 | """ 118 | Set a bunch of values in the cache at once from a dict of key/value 119 | pairs. This is much more efficient than calling set() multiple times. 120 | 121 | If timeout is given, that timeout will be used for the key; otherwise 122 | the default cache timeout will be used. 123 | """ 124 | for key, value in data.items(): 125 | self.set(key, value, timeout, version=version, client=client) 126 | 127 | def has_key(self, key, version=None, client=None): 128 | """ 129 | Test if key exists. 130 | """ 131 | 132 | if client is None: 133 | key = self.make_key(key, version=version) 134 | client = self.get_server(key) 135 | 136 | key = self.make_key(key, version=version) 137 | try: 138 | return client.exists(key) == 1 139 | except RedisConnectionError as e: 140 | raise ConnectionInterrupted(connection=client) from e 141 | 142 | def delete(self, key, version=None, client=None): 143 | if client is None: 144 | key = self.make_key(key, version=version) 145 | client = self.get_server(key) 146 | 147 | return super().delete(key=key, version=version, client=client) 148 | 149 | def ttl(self, key, version=None, client=None): 150 | """ 151 | Executes TTL redis command and return the "time-to-live" of specified key. 152 | If key is a non volatile key, it returns None. 153 | """ 154 | 155 | if client is None: 156 | key = self.make_key(key, version=version) 157 | client = self.get_server(key) 158 | 159 | return super().ttl(key=key, version=version, client=client) 160 | 161 | def pttl(self, key, version=None, client=None): 162 | """ 163 | Executes PTTL redis command and return the "time-to-live" of specified key 164 | in milliseconds. If key is a non volatile key, it returns None. 165 | """ 166 | 167 | if client is None: 168 | key = self.make_key(key, version=version) 169 | client = self.get_server(key) 170 | 171 | return super().pttl(key=key, version=version, client=client) 172 | 173 | def persist(self, key, version=None, client=None): 174 | if client is None: 175 | key = self.make_key(key, version=version) 176 | client = self.get_server(key) 177 | 178 | return super().persist(key=key, version=version, client=client) 179 | 180 | def expire(self, key, timeout, version=None, client=None): 181 | if client is None: 182 | key = self.make_key(key, version=version) 183 | client = self.get_server(key) 184 | 185 | return super().expire(key=key, timeout=timeout, version=version, client=client) 186 | 187 | def pexpire(self, key, timeout, version=None, client=None): 188 | if client is None: 189 | key = self.make_key(key, version=version) 190 | client = self.get_server(key) 191 | 192 | return super().pexpire(key=key, timeout=timeout, version=version, client=client) 193 | 194 | def pexpire_at(self, key, when: Union[datetime, int], version=None, client=None): 195 | """ 196 | Set an expire flag on a ``key`` to ``when`` on a shard client. 197 | ``when`` which can be represented as an integer indicating unix 198 | time or a Python datetime object. 199 | """ 200 | if client is None: 201 | key = self.make_key(key, version=version) 202 | client = self.get_server(key) 203 | 204 | return super().pexpire_at(key=key, when=when, version=version, client=client) 205 | 206 | def expire_at(self, key, when: Union[datetime, int], version=None, client=None): 207 | """ 208 | Set an expire flag on a ``key`` to ``when`` on a shard client. 209 | ``when`` which can be represented as an integer indicating unix 210 | time or a Python datetime object. 211 | """ 212 | if client is None: 213 | key = self.make_key(key, version=version) 214 | client = self.get_server(key) 215 | 216 | return super().expire_at(key=key, when=when, version=version, client=client) 217 | 218 | def lock( 219 | self, 220 | key, 221 | version=None, 222 | timeout=None, 223 | sleep=0.1, 224 | blocking_timeout=None, 225 | client=None, 226 | thread_local=True, 227 | ): 228 | if client is None: 229 | key = self.make_key(key, version=version) 230 | client = self.get_server(key) 231 | 232 | key = self.make_key(key, version=version) 233 | return super().lock( 234 | key, 235 | timeout=timeout, 236 | sleep=sleep, 237 | client=client, 238 | blocking_timeout=blocking_timeout, 239 | thread_local=thread_local, 240 | ) 241 | 242 | def delete_many(self, keys, version=None): 243 | """ 244 | Remove multiple keys at once. 245 | """ 246 | res = 0 247 | for key in [self.make_key(k, version=version) for k in keys]: 248 | client = self.get_server(key) 249 | res += self.delete(key, client=client) 250 | return res 251 | 252 | def incr_version(self, key, delta=1, version=None, client=None): 253 | if client is None: 254 | key = self.make_key(key, version=version) 255 | client = self.get_server(key) 256 | 257 | if version is None: 258 | version = self._backend.version 259 | 260 | old_key = self.make_key(key, version) 261 | value = self.get(old_key, version=version, client=client) 262 | 263 | try: 264 | ttl = self.ttl(old_key, version=version, client=client) 265 | except RedisConnectionError as e: 266 | raise ConnectionInterrupted(connection=client) from e 267 | 268 | if value is None: 269 | msg = f"Key '{key}' not found" 270 | raise ValueError(msg) 271 | 272 | if isinstance(key, CacheKey): 273 | new_key = self.make_key(key.original_key(), version=version + delta) 274 | else: 275 | new_key = self.make_key(key, version=version + delta) 276 | 277 | self.set(new_key, value, timeout=ttl, client=self.get_server(new_key)) 278 | self.delete(old_key, client=client) 279 | return version + delta 280 | 281 | def incr(self, key, delta=1, version=None, client=None): 282 | if client is None: 283 | key = self.make_key(key, version=version) 284 | client = self.get_server(key) 285 | 286 | return super().incr(key=key, delta=delta, version=version, client=client) 287 | 288 | def decr(self, key, delta=1, version=None, client=None): 289 | if client is None: 290 | key = self.make_key(key, version=version) 291 | client = self.get_server(key) 292 | 293 | return super().decr(key=key, delta=delta, version=version, client=client) 294 | 295 | def iter_keys(self, key, version=None): 296 | error_message = "iter_keys not supported on sharded client" 297 | raise NotImplementedError(error_message) 298 | 299 | def keys(self, search, version=None): 300 | pattern = self.make_pattern(search, version=version) 301 | keys = [] 302 | try: 303 | for connection in self._serverdict.values(): 304 | keys.extend(connection.keys(pattern)) 305 | except RedisConnectionError as e: 306 | # FIXME: technically all clients should be passed as `connection`. 307 | client = self.get_server(pattern) 308 | raise ConnectionInterrupted(connection=client) from e 309 | 310 | return [self.reverse_key(k.decode()) for k in keys] 311 | 312 | def delete_pattern( 313 | self, 314 | pattern, 315 | version=None, 316 | client=None, 317 | itersize=None, 318 | prefix=None, 319 | ): 320 | """ 321 | Remove all keys matching pattern. 322 | """ 323 | pattern = self.make_pattern(pattern, version=version, prefix=prefix) 324 | kwargs = {"match": pattern} 325 | if itersize: 326 | kwargs["count"] = itersize 327 | 328 | keys = [] 329 | for connection in self._serverdict.values(): 330 | keys.extend(key for key in connection.scan_iter(**kwargs)) 331 | 332 | res = 0 333 | if keys: 334 | for connection in self._serverdict.values(): 335 | res += connection.delete(*keys) 336 | return res 337 | 338 | def do_close_clients(self): 339 | for client in self._serverdict.values(): 340 | self.disconnect(client=client) 341 | 342 | def touch(self, key, timeout=DEFAULT_TIMEOUT, version=None, client=None): 343 | if client is None: 344 | key = self.make_key(key, version=version) 345 | client = self.get_server(key) 346 | 347 | return super().touch(key=key, timeout=timeout, version=version, client=client) 348 | 349 | def clear(self, client=None): 350 | for connection in self._serverdict.values(): 351 | connection.flushdb() 352 | 353 | def sadd( 354 | self, 355 | key: KeyT, 356 | *values: Any, 357 | version: Optional[int] = None, 358 | client: Optional[Redis] = None, 359 | ) -> int: 360 | if client is None: 361 | key = self.make_key(key, version=version) 362 | client = self.get_server(key) 363 | return super().sadd(key, *values, version=version, client=client) 364 | 365 | def scard( 366 | self, 367 | key: KeyT, 368 | version: Optional[int] = None, 369 | client: Optional[Redis] = None, 370 | ) -> int: 371 | if client is None: 372 | key = self.make_key(key, version=version) 373 | client = self.get_server(key) 374 | return super().scard(key=key, version=version, client=client) 375 | 376 | def smembers( 377 | self, 378 | key: KeyT, 379 | version: Optional[int] = None, 380 | client: Optional[Redis] = None, 381 | ) -> builtins.set[Any]: 382 | if client is None: 383 | key = self.make_key(key, version=version) 384 | client = self.get_server(key) 385 | return super().smembers(key=key, version=version, client=client) 386 | 387 | def smove( 388 | self, 389 | source: KeyT, 390 | destination: KeyT, 391 | member: Any, 392 | version: Optional[int] = None, 393 | client: Optional[Redis] = None, 394 | ): 395 | if client is None: 396 | source = self.make_key(source, version=version) 397 | client = self.get_server(source) 398 | destination = self.make_key(destination, version=version) 399 | 400 | return super().smove( 401 | source=source, 402 | destination=destination, 403 | member=member, 404 | version=version, 405 | client=client, 406 | ) 407 | 408 | def srem( 409 | self, 410 | key: KeyT, 411 | *members, 412 | version: Optional[int] = None, 413 | client: Optional[Redis] = None, 414 | ) -> int: 415 | if client is None: 416 | key = self.make_key(key, version=version) 417 | client = self.get_server(key) 418 | return super().srem(key, *members, version=version, client=client) 419 | 420 | def sscan( 421 | self, 422 | key: KeyT, 423 | match: Optional[str] = None, 424 | count: Optional[int] = 10, 425 | version: Optional[int] = None, 426 | client: Optional[Redis] = None, 427 | ) -> builtins.set[Any]: 428 | if client is None: 429 | key = self.make_key(key, version=version) 430 | client = self.get_server(key) 431 | return super().sscan( 432 | key=key, 433 | match=match, 434 | count=count, 435 | version=version, 436 | client=client, 437 | ) 438 | 439 | def sscan_iter( 440 | self, 441 | key: KeyT, 442 | match: Optional[str] = None, 443 | count: Optional[int] = 10, 444 | version: Optional[int] = None, 445 | client: Optional[Redis] = None, 446 | ) -> Iterator[Any]: 447 | if client is None: 448 | key = self.make_key(key, version=version) 449 | client = self.get_server(key) 450 | return super().sscan_iter( 451 | key=key, 452 | match=match, 453 | count=count, 454 | version=version, 455 | client=client, 456 | ) 457 | 458 | def srandmember( 459 | self, 460 | key: KeyT, 461 | count: Optional[int] = None, 462 | version: Optional[int] = None, 463 | client: Optional[Redis] = None, 464 | ) -> Union[builtins.set, Any]: 465 | if client is None: 466 | key = self.make_key(key, version=version) 467 | client = self.get_server(key) 468 | return super().srandmember(key=key, count=count, version=version, client=client) 469 | 470 | def sismember( 471 | self, 472 | key: KeyT, 473 | member: Any, 474 | version: Optional[int] = None, 475 | client: Optional[Redis] = None, 476 | ) -> bool: 477 | if client is None: 478 | key = self.make_key(key, version=version) 479 | client = self.get_server(key) 480 | return super().sismember(key, member, version=version, client=client) 481 | 482 | def spop( 483 | self, 484 | key: KeyT, 485 | count: Optional[int] = None, 486 | version: Optional[int] = None, 487 | client: Optional[Redis] = None, 488 | ) -> Union[builtins.set, Any]: 489 | if client is None: 490 | key = self.make_key(key, version=version) 491 | client = self.get_server(key) 492 | return super().spop(key=key, count=count, version=version, client=client) 493 | 494 | def smismember( 495 | self, 496 | key: KeyT, 497 | *members, 498 | version: Optional[int] = None, 499 | client: Optional[Redis] = None, 500 | ) -> list[bool]: 501 | if client is None: 502 | key = self.make_key(key, version=version) 503 | client = self.get_server(key) 504 | return super().smismember(key, *members, version=version, client=client) 505 | -------------------------------------------------------------------------------- /CHANGELOG.rst: -------------------------------------------------------------------------------- 1 | Changelog 2 | ========= 3 | 4 | .. towncrier release notes start 5 | 6 | django-redis 6.0.0 (2025-06-17) 7 | =============================== 8 | 9 | Features 10 | -------- 11 | 12 | - Support HashMaps (`#598 `_) 13 | - Support gzip compression (`#688 `_) 14 | - Support for sets and support basic operations, sadd, scard, sdiff, sdiffstore, sinter, sinterstore, smismember, sismember, smembers, smove, spop, srandmember, srem, sscan, sscan_iter, sunion, sunionstore (`#730 `_) 15 | 16 | 17 | Bug Fixes 18 | --------- 19 | 20 | - Hotfix for timeout=DEFAULT_TIMEOUT in expire and pexpire (`#724 `_) 21 | - Fix is_master parsing error for write separation in sentinel mode (`#749 `_) 22 | - Added blocking parameter for `cache.lock` (`#752 `_) 23 | 24 | 25 | Miscellaneous 26 | ------------- 27 | 28 | - Added support for Python 3.12 (`#689 `_) 29 | - Pin pytest to <7.0 until compatibility issues are resolved (`#690 `_) 30 | - Replace isort and flake8 with ruff (`#692 `_) 31 | - Drop django 4.0 (`#693 `_) 32 | - Upgrade black to 23.10.1 (`#695 `_) 33 | - Typed DefaultClient (`#696 `_) 34 | - Support pytest>=7 (`#697 `_) 35 | - Drop support for django 3.2, python 3.6 and python 3.7 (`#699 `_) 36 | - Support tox 4 (`#701 `_) 37 | - Configured dependabot for github actions (`#702 `_) 38 | - Use ubuntu-latest for CI (`#703 `_) 39 | - Dropped support for django 4.1 and added support for django 5.0 (`#729 `_) 40 | - Added support for django 5.1 (`#754 `_) 41 | - Update minimum supported versions in README.md: Python to 3.8, Django to 4.2, redis-py to 4.0.2 (`#755 `_) 42 | - Added support for Python 3.13 (`#756 `_) 43 | - Speed up tests by using `pytest-xdist` and separating settings on different redis databases. 44 | Dropped `pytest-django` 45 | Using `docker-compose` for setting up redis containers for testing 46 | Use `tox-uv` (`#757 `_) 47 | - Confirm support for Django 5.2. 48 | Fix shadowing builtin Python exceptions. (`#824 `_) 49 | 50 | 51 | Deprecations and Removals 52 | ------------------------- 53 | 54 | - Drop support for Python 3.8 (`#852 `_) 55 | 56 | 57 | django-redis 5.4.0 (2023-10-01) 58 | =============================== 59 | 60 | Features 61 | -------- 62 | 63 | - Connection factory goes to cache options (`#680 `_) 64 | 65 | 66 | Documentation 67 | ------------- 68 | 69 | - Added note in docs for correctly configuring hiredis parser when using redis-py version 5. (`#677 `_) 70 | 71 | 72 | django-redis 5.3.0 (2023-06-16) 73 | =============================== 74 | 75 | Features 76 | -------- 77 | 78 | - Add support for django 4 (`#627 `_) 79 | 80 | 81 | Bug Fixes 82 | --------- 83 | 84 | - Access `django_redis.cache.DJANGO_REDIS_SCAN_ITERSIZE` and `django_redis.client.herd.CACHE_HERD_TIMEOUT` in runtime to not read Django settings in import time. (`#638 `_) 85 | - Skipping pickle serializer test for django >= 4.2 (`#646 `_) 86 | 87 | 88 | Miscellaneous 89 | ------------- 90 | 91 | - Speed up deleting multiple keys by a pattern with pipelines and larger itersize (`#609 `_) 92 | - Print full exception traceback when logging ignored exceptions (`#611 `_) 93 | - Fix mypy linting (`#626 `_) 94 | - Added support for python 3.11 (`#633 `_) 95 | - Fix CI, running tox<4 to still support Python 3.6. (`#645 `_) 96 | - Dropped support for django 2.2 and 3.1 (`#649 `_) 97 | - Run actions & tox against Django 4..2 (`#668 `_) 98 | 99 | 100 | django-redis 5.2.0 (2021-12-22) 101 | =============================== 102 | 103 | Bug Fixes 104 | --------- 105 | 106 | - Block use with broken redis-py 4.0.0 and 4.0.1 (`#542 `_) 107 | 108 | 109 | Miscellaneous 110 | ------------- 111 | 112 | - Unblock redis-py >=4.0.2 (`#576 `_) 113 | - Add support for django 4 (`#579 `_) 114 | 115 | 116 | Django_Redis 5.1.0 (2021-11-29) 117 | =============================== 118 | 119 | Features 120 | -------- 121 | 122 | - Add Python 3.10 to CI (`#536 `_) 123 | - Configured ``towncrier`` to generate the changelog. (`#548 `_) 124 | - Added ``django_redis.compressors.zstd.ZStdCompressor`` to provide ``pyzstd`` cache value compression. (`#551 `_) 125 | - Change pickle default version to Python default instead of highest version. (`#555 `_) 126 | - Add ``hiredis`` extra dependency to request ``redis[hiredis]``. (`#556 `_) 127 | - Add pexpireat to allow setting 'expire at' with millisecond precision. (`#564 `_) 128 | 129 | 130 | Bug Fixes 131 | --------- 132 | 133 | - Make expire, pexpire, expireat and persist return the redis client value (`#564 `_) 134 | 135 | 136 | Miscellaneous 137 | ------------- 138 | 139 | - Convert most unittest class tests to pytest tests. (`#553 `_) 140 | - Update type comments to type annotations. (`#568 `_) 141 | - Pin redis-py to 3.x until 4.x breaking changes can be addressed. (`#570 `_) 142 | 143 | 144 | Documentation 145 | ------------- 146 | 147 | - Clarify redis primary name in sentinel documentation. (`#529 `_) 148 | - Add documentation on configuring self signed SSL certificates. (`#559 `_) 149 | 150 | 151 | django-redis 5.0.0 (2021-05-30) 152 | =============================== 153 | 154 | - supporting django 3.1 and django 3.2 155 | - dropped support for python 3.5 156 | - added support for python 3.9 157 | - started type hinting the codebase 158 | - ensure connections are closed 159 | - fixed ``ShardClient`` ``.clear()`` method 160 | - ``.delete()`` now returns boolean from django 3.1 onwards 161 | - disconnect connection pools on ``.close()`` 162 | - added support for redis sentinel 163 | - added ``.expire_at()`` method 164 | - fixed ``.incr()`` when ttl is ``None`` or when the number is larger than 64 bit 165 | - fixed ``.incr_version()`` when ttl is ``None`` 166 | - added ``.pttl()`` method to the clients to support milli-second precision for 167 | ttl of a key 168 | - added ``.pexpire()`` method to the clients to support milli-second precision 169 | for setting expiry of a key 170 | 171 | 172 | django-redis 4.12.1 (2020-05-27) 173 | ================================ 174 | 175 | - No code changes. 176 | - Fixed a typo in setup.cfg metadata preventing a successful release. 177 | 178 | 179 | django-redis 4.12.0 (2020-05-27) 180 | ================================ 181 | 182 | - The project has moved to `Jazzband `_. This is the 183 | first release under the new organization. The new repository URL is 184 | ``_. 185 | - Removed support for end-of-life Django < 2.2. 186 | - Removed support for unmaintained redis-py 2.X. 187 | - Changed uses of deprecated ``smart_text()`` to ``smart_str()``. 188 | - Fixed deprecation warning with the msgpack serializer. 189 | - The ``.touch()`` method now uses the default timeout, to cache forever pass 190 | ``None``. 191 | - Subclasses of ``JSONSerializer`` can now override the ``encoder_class`` 192 | attribute to change the JSON encoder. It defaults to ``DjangoJSONEncoder``. 193 | - Fixed ``DefaultClient.set()`` to work with empty ``Pipeline``. 194 | - The ``thread_local`` parameter is now forwarded to the Redis client. 195 | 196 | 197 | django-redis 4.11.0 (2019-12-13) 198 | ================================ 199 | 200 | - Removed support for Python 2.7 and 3.4. 201 | - Removed support for Django 2.0 and 2.1. 202 | - Added support for Python 3.8. 203 | - Added support for Django 2.2 and 3.0. 204 | - Changed msgpack-python soft dependency to msgpack. 205 | - Fixed ``.touch()`` method for sharded client. 206 | - Fixed prefix escaping for the sharded client. 207 | - Fixed ``.add()`` method to return a bool. 208 | 209 | 210 | django-redis 4.10.0 (2018-10-19) 211 | ================================ 212 | 213 | - Add support and testing for Django 2.1 and Python 3.7. No actual code changes 214 | were required. 215 | - Add support for redis-py 3.0. 216 | - Add touch command. 217 | 218 | 219 | django-redis 4.9.1 (2018-10-19) 220 | =============================== 221 | 222 | - Pin redis version to 2.10.6 223 | 224 | 225 | django-redis 4.9.0 (2018-03-01) 226 | =============================== 227 | 228 | - Add testing and support for Django 2.0. No actual code changes were required. 229 | - Escape ``KEY_PREFIX`` and ``VERSION`` when used in glob expressions. 230 | - Improve handling timeouts less than 1ms. 231 | - Remove fakeredis support. 232 | - Add datetime, date, time, and timedelta serialization support to the JSON 233 | serializer. 234 | - The deprecated feature of passing ``True`` as a timeout value is no longer 235 | supported. 236 | - Fix ``add()`` with a negative timeout to not store key (it is immediately 237 | invalid). 238 | - Remove support for Django < 1.11. 239 | - Add support for atomic incr if key is not set. 240 | 241 | 242 | django-redis 4.8.0 (2017-04-25) 243 | =============================== 244 | 245 | - Drop deprecated exception with typo ConnectionInterrumped. Use 246 | ConnectionInterrupted instead. 247 | - Remove many workarounds related to old and not supported versions 248 | of django and redis-py. 249 | - Code cleaning and flake8 compliance fixes. 250 | - Add better impl for ``close`` method. 251 | - Fix compatibility warnings with python 3.6 252 | 253 | 254 | django-redis 4.7.0 (2017-01-02) 255 | =============================== 256 | 257 | - Add the ability to enable write to replica servers when the primary server is 258 | not available. 259 | - Add ``itersize`` parameter to ``delete_pattern``. 260 | 261 | 262 | django-redis 4.6.0 (2016-11-02) 263 | =============================== 264 | 265 | - Fix incorrect behavior of ``clear()`` method. 266 | 267 | 268 | django-redis 4.5.0 (2016-09-21) 269 | =============================== 270 | 271 | - Now only support Django 1.8 and above. Support for older versions has been dropped. 272 | - Remove undocumented and deprecated support for old connection string format. 273 | - Add support for ``PASSWORD`` option (useful when the password contains url unsafe 274 | characters). 275 | - Make the package compatible with fake redis. 276 | - Fix compatibility issues with latest django version (1.10). 277 | 278 | 279 | django-redis 4.4.4 (2016-07-25) 280 | =============================== 281 | 282 | - Fix possible race condition on incr implementation using 283 | lua script (thanks to @prokaktus). 284 | 285 | 286 | django-redis 4.4.3 (2016-05-17) 287 | =============================== 288 | 289 | - Fix minor ttl inconsistencies. 290 | 291 | 292 | django-redis 4.4.2 (2016-04-21) 293 | =============================== 294 | 295 | - Fix timeout bug (thanks to @skorokithakis) 296 | 297 | 298 | django-redis 4.4.1 (2016-04-13) 299 | =============================== 300 | 301 | - Add additional check for avoid wrong exception on ``get_redis_connection``. 302 | 303 | 304 | django-redis 4.4.0 (2016-04-12) 305 | =============================== 306 | 307 | - Make redis client pluggable (thanks to @arnuschky) 308 | - Add version number inside python module (thanks to @BertrandBordage) 309 | - Fix clear method (thanks to @ostcar) 310 | - Add the ability to specify key prefix on delete and delete_pattern. 311 | - BREAKING CHANGE: improved compression support (make it more plugable). 312 | 313 | 314 | django-redis 4.3.0 (2015-10-31) 315 | =============================== 316 | 317 | - Improved exception handling in herd client (thanks to @brandoshmando) 318 | - Fix bug that not allows use generators on delete_many (thanks to @ostcar). 319 | - Remove obsolete code that makes hard dependency to mspack. 320 | 321 | 322 | django-redis 4.2.0 (2015-07-03) 323 | =============================== 324 | 325 | - Add ``persist`` and ``expire`` methods. 326 | - Remove old and broken dummy client. 327 | - Expose a redis lock method. 328 | 329 | 330 | django-redis 4.1.0 (2015-06-15) 331 | =============================== 332 | 333 | - Add plugable serializers architecture (thanks to @jdufresne) 334 | - Add json serializer (thanks to @jdufresne) 335 | - Add msgpack serializer (thanks to @uditagarwal) 336 | - Implement delete_pattern using iter_scan for better performance (thanks to @lenzenmi) 337 | 338 | 339 | django-redis 4.0.0 340 | ================== 341 | 342 | - Remove usage of deprecated ``get_cache`` method. 343 | - Added connection option SOCKET_CONNECT_TIMEOUT. [Jorge C. Leitão]. 344 | - Replace ``setex`` and friends with set, because it now supports all need for atomic. 345 | updates (thanks to @23doors) (re revert changes from 3.8.x branch). 346 | - Fix django 1.8 compatibilities. 347 | - Fix django 1.9 compatibilities. 348 | - BREAKING CHANGE: Now timeout=0 works as django specified (expires immediately) 349 | - Now requires redis server >= 2.8 350 | - BREAKING CHANGE: ``redis_cache`` is no longer a valid package name 351 | 352 | 353 | django-redis 3.8.4 354 | ================== 355 | 356 | - Backport django 1.8 fixes from master. 357 | 358 | 359 | django-redis 3.8.3 360 | ================== 361 | 362 | - Minor fix on regular expression for old url notation. 363 | 364 | 365 | django-redis 3.8.2 366 | ================== 367 | 368 | - Revert some changes from 3.8.1 that are incompatible with redis server < 2.6.12 369 | 370 | 371 | django-redis 3.8.1 372 | ================== 373 | 374 | - Fix documentation related to new url format. 375 | - Fix documentation parts that uses now removed functions. 376 | - Fix invalid url transformation from old format (password was not set properly) 377 | - Replace setex and friends with set, because it now supports all need for atomic 378 | updates (thanks to @23doors). 379 | 380 | 381 | django-redis 3.8.0 382 | ================== 383 | 384 | - Add compression support. (Thanks to @alanjds) 385 | - Change package name from redis_cache to django_redis. 386 | - Add backward compatibility layer for redis_cache package name. 387 | - BACKWARD INCOMPATIBLE CHANGE: use StrictRedis instead of Redis class of redis-py 388 | - Add redis dummy backend for development purposes. (Thanks to @papaloizouc) 389 | - Now use redis native url notation for connection string (the own connection string 390 | notation is also supported but is marked as deprecated). 391 | - Now requires redis-py >= 2.10.0 392 | - Remove deprecated ``raw_cache`` property from backend. 393 | 394 | 395 | django-redis 3.7.2 396 | ================== 397 | 398 | - Add missing forward of version parameter from ``add()`` to ``set()`` function. (by @fellowshipofone) 399 | 400 | 401 | django-redis 3.7.1 402 | ================== 403 | 404 | - Improve docs (by @dkingman). 405 | - Fix missing imports on sentinel client (by @opapy). 406 | - Connection closing improvements on sentinel client (by @opapy). 407 | 408 | 409 | django-redis 3.7.0 410 | ================== 411 | 412 | - Add support for django's ``KEY_FUNCTION`` and ``REVERSE_KEY_FUNCTION`` (by @teferi) 413 | - Accept float value for socket timeout. 414 | - Fix wrong behavior of ``DJANGO_REDIS_IGNORE_EXCEPTIONS`` with socket timeouts. 415 | - Backward incompatible change: now raises original exceptions instead of self defined. 416 | 417 | 418 | django-redis 3.6.2 419 | ================== 420 | 421 | - Add ttl method purposed to be included in django core. 422 | - Add iter_keys method that uses redis scan methods for memory efficient keys retrieval. 423 | - Add version keyword parameter to keys. 424 | - Deprecate django 1.3.x support. 425 | 426 | 427 | django-redis 3.6.1 428 | ================== 429 | 430 | - Fix wrong import on sentinel client. 431 | 432 | 433 | django-redis 3.6.0 434 | ================== 435 | 436 | - Add pluggable connection factory. 437 | - Negative timeouts now works as expected. 438 | - Delete operation now returns a number of deleted items instead of None. 439 | 440 | 441 | django-redis 3.5.1 442 | ================== 443 | 444 | - Fixed redis-py < 2.9.0 incompatibilities 445 | - Fixed runtests error with django 1.7 446 | 447 | 448 | django-redis 3.5.0 449 | ================== 450 | 451 | - Removed: stats module (should be replaced with an other in future) 452 | - New: experimental client for add support to redis-sentinel. 453 | - Now uses a django ``DEFAULT_TIMEOUT`` constant instead of ``True``. 454 | Deprecation warning added for code that now uses ``True`` (unlikely). 455 | - Fix wrong forward of timeout on shard client. 456 | - Fix incr_version wrong behavior when using shard client (wrong client used for set new key). 457 | 458 | 459 | django-redis 3.4.0 460 | ================== 461 | 462 | - Fix exception name from ConnectionInterrumped to 463 | ConnectionInterrupted maintaining an old exception class 464 | for backward compatibility (thanks Łukasz Langa (@ambv)) 465 | 466 | - Fix wrong behavior for "default" parameter on get method 467 | when DJANGO_REDIS_IGNORE_EXCEPTIONS is True 468 | (also thanks to Łukasz Langa (@ambv)). 469 | 470 | - Now added support for replication setups to default client (it still 471 | experimental because is not tested in production environments). 472 | 473 | - Merged SimpleFailoverClient experimental client (only for 474 | experiment with it, not ready for use in production) 475 | 476 | - Django 1.6 cache changes compatibility. Explicitly passing in 477 | timeout=None no longer results in using the default timeout. 478 | 479 | - Major code cleaning. (Thanks to Bertrand Bordage @BertrandBordage) 480 | 481 | - Bugfixes related to some index error on hashring module. 482 | 483 | 484 | django-redis 3.3.0 485 | ================== 486 | 487 | - Add SOCKET_TIMEOUT attribute to OPTIONS (thanks to @eclipticplane) 488 | 489 | 490 | django-redis 3.2.0 491 | ================== 492 | 493 | - Changed default behavior of connection error exceptions: now by default 494 | raises exception on connection error is occurred. 495 | 496 | Thanks to Mümin Öztürk: 497 | 498 | - cache.add now uses setnx redis command (atomic operation) 499 | - cache.incr and cache.decr now uses redis incrby command (atomic operation) 500 | 501 | 502 | django-redis 3.1.7 503 | ================== 504 | 505 | - Fix python3 compatibility on utils module. 506 | 507 | django-redis 3.1.6 508 | ================== 509 | 510 | - Add nx argument on set method for both clients (thanks to Kirill Zaitsev) 511 | 512 | 513 | django-redis 3.1.5 514 | ================== 515 | 516 | - Bug fixes on sharded client. 517 | 518 | 519 | django-redis 3.1.4 520 | ================== 521 | 522 | - Now reuse connection pool on massive use of ``get_cache`` method. 523 | 524 | 525 | django-redis 3.1.3 526 | ================== 527 | 528 | - Fixed python 2.6 compatibility. 529 | 530 | 531 | django-redis 3.1.2 532 | ================== 533 | 534 | - Now on call close() not disconnect all connection pool. 535 | 536 | 537 | django-redis 3.1.1 538 | ================== 539 | 540 | - Fixed incorrect exception message on LOCATION has wrong format. 541 | (Thanks to Yoav Weiss) 542 | 543 | 544 | django-redis 3.1 545 | ================ 546 | 547 | - Helpers for access to raw redis connection. 548 | 549 | 550 | django-redis 3.0 551 | ================ 552 | 553 | - Python 3.2+ support. 554 | - Code cleaning and refactor. 555 | - Ignore exceptions (same behavior as memcached backend) 556 | - Pluggable clients. 557 | - Unified connection string. 558 | 559 | 560 | django-redis 2.2.2 561 | ================== 562 | 563 | - Bug fixes on ``keys`` and ``delete_pattern`` methods. 564 | 565 | 566 | django-redis 2.2.1 567 | ================== 568 | 569 | - Remove duplicate check if key exists on ``incr`` method. 570 | - Fix incorrect behavior of ``delete_pattern`` with sharded client. 571 | 572 | 573 | django-redis 2.2 574 | ================ 575 | 576 | - New ``delete_pattern`` method. Useful for delete keys using wildcard syntax. 577 | 578 | 579 | django-redis 2.1 580 | ================ 581 | 582 | - Many bug fixes. 583 | - Client side sharding. 584 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | ============================== 2 | Redis cache backend for Django 3 | ============================== 4 | 5 | .. image:: https://jazzband.co/static/img/badge.svg 6 | :target: https://jazzband.co/ 7 | :alt: Jazzband 8 | 9 | .. image:: https://github.com/jazzband/django-redis/actions/workflows/ci.yml/badge.svg 10 | :target: https://github.com/jazzband/django-redis/actions/workflows/ci.yml 11 | :alt: GitHub Actions 12 | 13 | .. image:: https://codecov.io/gh/jazzband/django-redis/branch/master/graph/badge.svg 14 | :target: https://codecov.io/gh/jazzband/django-redis 15 | :alt: Coverage 16 | 17 | .. image:: https://img.shields.io/pypi/v/django-redis.svg?style=flat 18 | :target: https://pypi.org/project/django-redis/ 19 | 20 | This is a `Jazzband `_ project. By contributing you agree 21 | to abide by the `Contributor Code of Conduct 22 | `_ and follow the `guidelines 23 | `_. 24 | 25 | Introduction 26 | ------------ 27 | 28 | django-redis is a BSD licensed, full featured Redis cache and session backend 29 | for Django. 30 | 31 | Why use django-redis? 32 | ~~~~~~~~~~~~~~~~~~~~~ 33 | 34 | - Uses native redis-py url notation connection strings 35 | - Pluggable clients 36 | - Pluggable parsers 37 | - Pluggable serializers 38 | - Primary/secondary support in the default client 39 | - Comprehensive test suite 40 | - Used in production in several projects as cache and session storage 41 | - Supports infinite timeouts 42 | - Facilities for raw access to Redis client/connection pool 43 | - Highly configurable (can emulate memcached exception behavior, for example) 44 | - Unix sockets supported by default 45 | 46 | Requirements 47 | ~~~~~~~~~~~~ 48 | 49 | - `Python`_ 3.9+ 50 | - `Django`_ 4.2+ 51 | - `redis-py`_ 4.0.2+ 52 | - `Redis server`_ 2.8+ 53 | 54 | .. _Python: https://www.python.org/downloads/ 55 | .. _Django: https://www.djangoproject.com/download/ 56 | .. _redis-py: https://pypi.org/project/redis/ 57 | .. _Redis server: https://redis.io/download 58 | 59 | User guide 60 | ---------- 61 | 62 | Installation 63 | ~~~~~~~~~~~~ 64 | 65 | Install with pip: 66 | 67 | .. code-block:: console 68 | 69 | $ python -m pip install django-redis 70 | 71 | Configure as cache backend 72 | ~~~~~~~~~~~~~~~~~~~~~~~~~~ 73 | 74 | To start using django-redis, you should change your Django cache settings to 75 | something like: 76 | 77 | .. code-block:: python 78 | 79 | CACHES = { 80 | "default": { 81 | "BACKEND": "django_redis.cache.RedisCache", 82 | "LOCATION": "redis://127.0.0.1:6379/1", 83 | "OPTIONS": { 84 | "CLIENT_CLASS": "django_redis.client.DefaultClient", 85 | } 86 | } 87 | } 88 | 89 | django-redis uses the redis-py native URL notation for connection strings, it 90 | allows better interoperability and has a connection string in more "standard" 91 | way. Some examples: 92 | 93 | - ``redis://[[username]:[password]]@localhost:6379/0`` 94 | - ``rediss://[[username]:[password]]@localhost:6379/0`` 95 | - ``unix://[[username]:[password]]@/path/to/socket.sock?db=0`` 96 | 97 | Three URL schemes are supported: 98 | 99 | - ``redis://``: creates a normal TCP socket connection 100 | - ``rediss://``: creates a SSL wrapped TCP socket connection 101 | - ``unix://`` creates a Unix Domain Socket connection 102 | 103 | There are several ways to specify a database number: 104 | 105 | - A ``db`` querystring option, e.g. ``redis://localhost?db=0`` 106 | - If using the ``redis://`` scheme, the path argument of the URL, e.g. 107 | ``redis://localhost/0`` 108 | 109 | When using `Redis' ACLs `_, you will need to add the 110 | username to the URL (and provide the password with the Cache ``OPTIONS``). 111 | The login for the user ``django`` would look like this: 112 | 113 | .. code-block:: python 114 | 115 | CACHES = { 116 | "default": { 117 | "BACKEND": "django_redis.cache.RedisCache", 118 | "LOCATION": "redis://django@localhost:6379/0", 119 | "OPTIONS": { 120 | "CLIENT_CLASS": "django_redis.client.DefaultClient", 121 | "PASSWORD": "mysecret" 122 | } 123 | } 124 | } 125 | 126 | An alternative would be write both username and password into the URL: 127 | 128 | .. code-block:: python 129 | 130 | CACHES = { 131 | "default": { 132 | "BACKEND": "django_redis.cache.RedisCache", 133 | "LOCATION": "redis://django:mysecret@localhost:6379/0", 134 | "OPTIONS": { 135 | "CLIENT_CLASS": "django_redis.client.DefaultClient", 136 | } 137 | } 138 | } 139 | 140 | In some circumstances the password you should use to connect Redis 141 | is not URL-safe, in this case you can escape it or just use the 142 | convenience option in ``OPTIONS`` dict: 143 | 144 | .. code-block:: python 145 | 146 | CACHES = { 147 | "default": { 148 | "BACKEND": "django_redis.cache.RedisCache", 149 | "LOCATION": "redis://127.0.0.1:6379/1", 150 | "OPTIONS": { 151 | "CLIENT_CLASS": "django_redis.client.DefaultClient", 152 | "PASSWORD": "mysecret" 153 | } 154 | } 155 | } 156 | 157 | Take care, that this option does not overwrites the password in the uri, so if 158 | you have set the password in the uri, this settings will be ignored. 159 | 160 | Configure as session backend 161 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 162 | 163 | Django can by default use any cache backend as session backend and you benefit 164 | from that by using django-redis as backend for session storage without 165 | installing any additional backends: 166 | 167 | .. code-block:: python 168 | 169 | SESSION_ENGINE = "django.contrib.sessions.backends.cache" 170 | SESSION_CACHE_ALIAS = "default" 171 | 172 | Testing with django-redis 173 | ~~~~~~~~~~~~~~~~~~~~~~~~~ 174 | 175 | django-redis supports customizing the underlying Redis client (see "Pluggable 176 | clients"). This can be used for testing purposes. 177 | 178 | In case you want to flush all data from the cache after a test, add the 179 | following lines to your test class: 180 | 181 | .. code-block:: python 182 | 183 | from django_redis import get_redis_connection 184 | 185 | def tearDown(self): 186 | get_redis_connection("default").flushall() 187 | 188 | Advanced usage 189 | -------------- 190 | 191 | Pickle version 192 | ~~~~~~~~~~~~~~ 193 | 194 | For almost all values, django-redis uses pickle to serialize objects. 195 | 196 | The ``pickle.DEFAULT_PROTOCOL`` version of pickle is used by default to ensure safe upgrades and compatibility across Python versions. 197 | If you want set a concrete version, you can do it, using ``PICKLE_VERSION`` option: 198 | 199 | .. code-block:: python 200 | 201 | CACHES = { 202 | "default": { 203 | # ... 204 | "OPTIONS": { 205 | "PICKLE_VERSION": -1 # Will use highest protocol version available 206 | } 207 | } 208 | } 209 | 210 | Socket timeout 211 | ~~~~~~~~~~~~~~ 212 | 213 | Socket timeout can be set using ``SOCKET_TIMEOUT`` and 214 | ``SOCKET_CONNECT_TIMEOUT`` options: 215 | 216 | .. code-block:: python 217 | 218 | CACHES = { 219 | "default": { 220 | # ... 221 | "OPTIONS": { 222 | "SOCKET_CONNECT_TIMEOUT": 5, # seconds 223 | "SOCKET_TIMEOUT": 5, # seconds 224 | } 225 | } 226 | } 227 | 228 | ``SOCKET_CONNECT_TIMEOUT`` is the timeout for the connection to be established 229 | and ``SOCKET_TIMEOUT`` is the timeout for read and write operations after the 230 | connection is established. 231 | 232 | Compression support 233 | ~~~~~~~~~~~~~~~~~~~ 234 | 235 | django-redis comes with compression support out of the box, but is deactivated 236 | by default. You can activate it setting up a concrete backend: 237 | 238 | .. code-block:: python 239 | 240 | CACHES = { 241 | "default": { 242 | # ... 243 | "OPTIONS": { 244 | "COMPRESSOR": "django_redis.compressors.zlib.ZlibCompressor", 245 | } 246 | } 247 | } 248 | 249 | Let see an example, of how make it work with *lzma* compression format: 250 | 251 | .. code-block:: python 252 | 253 | import lzma 254 | 255 | CACHES = { 256 | "default": { 257 | # ... 258 | "OPTIONS": { 259 | "COMPRESSOR": "django_redis.compressors.lzma.LzmaCompressor", 260 | } 261 | } 262 | } 263 | 264 | *Lz4* compression support (requires the lz4 library): 265 | 266 | .. code-block:: python 267 | 268 | import lz4 269 | 270 | CACHES = { 271 | "default": { 272 | # ... 273 | "OPTIONS": { 274 | "COMPRESSOR": "django_redis.compressors.lz4.Lz4Compressor", 275 | } 276 | } 277 | } 278 | 279 | *Zstandard (zstd)* compression support (requires the pyzstd library): 280 | 281 | .. code-block:: python 282 | 283 | import pyzstd 284 | 285 | CACHES = { 286 | "default": { 287 | # ... 288 | "OPTIONS": { 289 | "COMPRESSOR": "django_redis.compressors.zstd.ZStdCompressor", 290 | } 291 | } 292 | } 293 | 294 | *Gzip* compression support: 295 | 296 | .. code-block:: python 297 | 298 | import gzip 299 | 300 | CACHES = { 301 | "default": { 302 | # ... 303 | "OPTIONS": { 304 | "COMPRESSOR": "django_redis.compressors.gzip.GzipCompressor", 305 | } 306 | } 307 | } 308 | 309 | Memcached exceptions behavior 310 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 311 | 312 | In some situations, when Redis is only used for cache, you do not want 313 | exceptions when Redis is down. This is default behavior in the memcached 314 | backend and it can be emulated in django-redis. 315 | 316 | For setup memcached like behaviour (ignore connection exceptions), you should 317 | set ``IGNORE_EXCEPTIONS`` settings on your cache configuration: 318 | 319 | .. code-block:: python 320 | 321 | CACHES = { 322 | "default": { 323 | # ... 324 | "OPTIONS": { 325 | "IGNORE_EXCEPTIONS": True, 326 | } 327 | } 328 | } 329 | 330 | Also, you can apply the same settings to all configured caches, you can set the global flag in 331 | your settings: 332 | 333 | .. code-block:: python 334 | 335 | DJANGO_REDIS_IGNORE_EXCEPTIONS = True 336 | 337 | Log Ignored Exceptions 338 | ~~~~~~~~~~~~~~~~~~~~~~ 339 | 340 | When ignoring exceptions with ``IGNORE_EXCEPTIONS`` or 341 | ``DJANGO_REDIS_IGNORE_EXCEPTIONS``, you may optionally log exceptions using the 342 | global variable ``DJANGO_REDIS_LOG_IGNORED_EXCEPTIONS`` in your settings file:: 343 | 344 | DJANGO_REDIS_LOG_IGNORED_EXCEPTIONS = True 345 | 346 | If you wish to specify the logger in which the exceptions are output, simply 347 | set the global variable ``DJANGO_REDIS_LOGGER`` to the string name and/or path 348 | of the desired logger. This will default to ``__name__`` if no logger is 349 | specified and ``DJANGO_REDIS_LOG_IGNORED_EXCEPTIONS`` is ``True``:: 350 | 351 | DJANGO_REDIS_LOGGER = 'some.specified.logger' 352 | 353 | Infinite timeout 354 | ~~~~~~~~~~~~~~~~ 355 | 356 | django-redis comes with infinite timeouts support out of the box. And it 357 | behaves in same way as django backend contract specifies: 358 | 359 | - ``timeout=0`` expires the value immediately. 360 | - ``timeout=None`` infinite timeout 361 | 362 | .. code-block:: python 363 | 364 | cache.set("key", "value", timeout=None) 365 | 366 | Get ttl (time-to-live) from key 367 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 368 | 369 | With Redis, you can access to ttl of any stored key, for it, django-redis 370 | exposes ``ttl`` function. 371 | 372 | It returns: 373 | 374 | - 0 if key does not exists (or already expired). 375 | - None for keys that exists but does not have any expiration. 376 | - ttl value for any volatile key (any key that has expiration). 377 | 378 | .. code-block:: pycon 379 | 380 | >>> from django.core.cache import cache 381 | >>> cache.set("foo", "value", timeout=25) 382 | >>> cache.ttl("foo") 383 | 25 384 | >>> cache.ttl("not-existent") 385 | 0 386 | 387 | With Redis, you can access to ttl of any stored key in milliseconds, for it, django-redis 388 | exposes ``pttl`` function. 389 | 390 | .. code-block:: pycon 391 | 392 | >>> from django.core.cache import cache 393 | >>> cache.set("foo", "value", timeout=25) 394 | >>> cache.pttl("foo") 395 | 25000 396 | >>> cache.pttl("not-existent") 397 | 0 398 | 399 | Expire & Persist 400 | ~~~~~~~~~~~~~~~~ 401 | 402 | Additionally to the simple ttl query, you can send persist a concrete key or 403 | specify a new expiration timeout using the ``persist`` and ``expire`` methods: 404 | 405 | .. code-block:: pycon 406 | 407 | >>> cache.set("foo", "bar", timeout=22) 408 | >>> cache.ttl("foo") 409 | 22 410 | >>> cache.persist("foo") 411 | True 412 | >>> cache.ttl("foo") 413 | None 414 | 415 | .. code-block:: pycon 416 | 417 | >>> cache.set("foo", "bar", timeout=22) 418 | >>> cache.expire("foo", timeout=5) 419 | True 420 | >>> cache.ttl("foo") 421 | 5 422 | 423 | The ``expire_at`` method can be used to make the key expire at a specific moment in time. 424 | 425 | .. code-block:: pycon 426 | 427 | >>> cache.set("foo", "bar", timeout=22) 428 | >>> cache.expire_at("foo", datetime.now() + timedelta(hours=1)) 429 | True 430 | >>> cache.ttl("foo") 431 | 3600 432 | 433 | The ``pexpire_at`` method can be used to make the key expire at a specific moment in time with milliseconds precision: 434 | 435 | .. code-block:: pycon 436 | 437 | >>> cache.set("foo", "bar", timeout=22) 438 | >>> cache.pexpire_at("foo", datetime.now() + timedelta(milliseconds=900, hours=1)) 439 | True 440 | >>> cache.ttl("foo") 441 | 3601 442 | >>> cache.pttl("foo") 443 | 3600900 444 | 445 | The ``pexpire`` method can be used to provide millisecond precision: 446 | 447 | .. code-block:: pycon 448 | 449 | >>> cache.set("foo", "bar", timeout=22) 450 | >>> cache.pexpire("foo", timeout=5500) 451 | True 452 | >>> cache.pttl("foo") 453 | 5500 454 | 455 | Locks 456 | ~~~~~ 457 | 458 | It also supports the Redis ability to create Redis distributed named locks. The 459 | Lock interface is identical to the ``threading.Lock`` so you can use it as 460 | replacement. 461 | 462 | .. code-block:: python 463 | 464 | with cache.lock("somekey"): 465 | do_some_thing() 466 | 467 | Scan & Delete keys in bulk 468 | ~~~~~~~~~~~~~~~~~~~~~~~~~~ 469 | 470 | django-redis comes with some additional methods that help with searching or 471 | deleting keys using glob patterns. 472 | 473 | .. code-block:: pycon 474 | 475 | >>> from django.core.cache import cache 476 | >>> cache.keys("foo_*") 477 | ["foo_1", "foo_2"] 478 | 479 | A simple search like this will return all matched values. In databases with a 480 | large number of keys this isn't suitable method. Instead, you can use the 481 | ``iter_keys`` function that works like the ``keys`` function but uses Redis 482 | server side cursors. Calling ``iter_keys`` will return a generator that you can 483 | then iterate over efficiently. 484 | 485 | .. code-block:: pycon 486 | 487 | >>> from django.core.cache import cache 488 | >>> cache.iter_keys("foo_*") 489 | 490 | >>> next(cache.iter_keys("foo_*")) 491 | "foo_1" 492 | 493 | For deleting keys, you should use ``delete_pattern`` which has the same glob 494 | pattern syntax as the ``keys`` function and returns the number of deleted keys. 495 | 496 | .. code-block:: pycon 497 | 498 | >>> from django.core.cache import cache 499 | >>> cache.delete_pattern("foo_*") 500 | 501 | To achieve the best performance while deleting many keys, you should set ``DJANGO_REDIS_SCAN_ITERSIZE`` to a relatively 502 | high number (e.g., 100_000) by default in Django settings or pass it directly to the ``delete_pattern``. 503 | 504 | 505 | .. code-block:: pycon 506 | 507 | >>> from django.core.cache import cache 508 | >>> cache.delete_pattern("foo_*", itersize=100_000) 509 | 510 | Redis native commands 511 | ~~~~~~~~~~~~~~~~~~~~~ 512 | 513 | django-redis has limited support for some Redis atomic operations, such as the 514 | commands ``SETNX`` and ``INCR``. 515 | 516 | You can use the ``SETNX`` command through the backend ``set()`` method with the 517 | ``nx`` parameter: 518 | 519 | .. code-block:: pycon 520 | 521 | >>> from django.core.cache import cache 522 | >>> cache.set("key", "value1", nx=True) 523 | True 524 | >>> cache.set("key", "value2", nx=True) 525 | False 526 | >>> cache.get("key") 527 | "value1" 528 | 529 | Also, the ``incr`` and ``decr`` methods use Redis atomic operations when the 530 | value that a key contains is suitable for it. 531 | 532 | Raw client access 533 | ~~~~~~~~~~~~~~~~~ 534 | 535 | In some situations your application requires access to a raw Redis client to 536 | use some advanced features that aren't exposed by the Django cache interface. 537 | To avoid storing another setting for creating a raw connection, django-redis 538 | exposes functions with which you can obtain a raw client reusing the cache 539 | connection string: ``get_redis_connection(alias)``. 540 | 541 | .. code-block:: pycon 542 | 543 | >>> from django_redis import get_redis_connection 544 | >>> con = get_redis_connection("default") 545 | >>> con 546 | 547 | 548 | WARNING: Not all pluggable clients support this feature. 549 | 550 | Connection pools 551 | ~~~~~~~~~~~~~~~~ 552 | 553 | Behind the scenes, django-redis uses the underlying redis-py connection pool 554 | implementation, and exposes a simple way to configure it. Alternatively, you 555 | can directly customize a connection/connection pool creation for a backend. 556 | 557 | The default redis-py behavior is to not close connections, recycling them when 558 | possible. 559 | 560 | Configure default connection pool 561 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ 562 | 563 | The default connection pool is simple. For example, you can customize the 564 | maximum number of connections in the pool by setting ``CONNECTION_POOL_KWARGS`` 565 | in the ``CACHES`` setting: 566 | 567 | .. code-block:: python 568 | 569 | CACHES = { 570 | "default": { 571 | "BACKEND": "django_redis.cache.RedisCache", 572 | # ... 573 | "OPTIONS": { 574 | "CONNECTION_POOL_KWARGS": {"max_connections": 100} 575 | } 576 | } 577 | } 578 | 579 | You can verify how many connections the pool has opened with the following 580 | snippet: 581 | 582 | .. code-block:: python 583 | 584 | from django_redis import get_redis_connection 585 | 586 | r = get_redis_connection("default") # Use the name you have defined for Redis in settings.CACHES 587 | connection_pool = r.connection_pool 588 | print("Created connections so far: %d" % connection_pool._created_connections) 589 | 590 | Since the default connection pool passes all keyword arguments it doesn't use 591 | to its connections, you can also customize the connections that the pool makes 592 | by adding those options to ``CONNECTION_POOL_KWARGS``: 593 | 594 | .. code-block:: python 595 | 596 | CACHES = { 597 | "default": { 598 | # ... 599 | "OPTIONS": { 600 | "CONNECTION_POOL_KWARGS": {"max_connections": 100, "retry_on_timeout": True} 601 | } 602 | } 603 | } 604 | 605 | Use your own connection pool subclass 606 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ 607 | 608 | Sometimes you want to use your own subclass of the connection pool. This is 609 | possible with django-redis using the ``CONNECTION_POOL_CLASS`` parameter in the 610 | backend options. 611 | 612 | .. code-block:: python 613 | 614 | from redis.connection import ConnectionPool 615 | 616 | class MyOwnPool(ConnectionPool): 617 | # Just doing nothing, only for example purpose 618 | pass 619 | 620 | .. code-block:: python 621 | 622 | # Omitting all backend declaration boilerplate code. 623 | 624 | "OPTIONS": { 625 | "CONNECTION_POOL_CLASS": "myproj.mypool.MyOwnPool", 626 | } 627 | 628 | Customize connection factory 629 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ 630 | 631 | If none of the previous methods satisfies you, you can get in the middle of the 632 | django-redis connection factory process and customize or completely rewrite it. 633 | 634 | By default, django-redis creates connections through the 635 | ``django_redis.pool.ConnectionFactory`` class that is specified in the global 636 | Django setting ``DJANGO_REDIS_CONNECTION_FACTORY``. 637 | 638 | .. code-block:: python 639 | 640 | class ConnectionFactory(object): 641 | def get_connection_pool(self, params: dict): 642 | # Given connection parameters in the `params` argument, return new 643 | # connection pool. It should be overwritten if you want do 644 | # something before/after creating the connection pool, or return 645 | # your own connection pool. 646 | pass 647 | 648 | def get_connection(self, params: dict): 649 | # Given connection parameters in the `params` argument, return a 650 | # new connection. It should be overwritten if you want to do 651 | # something before/after creating a new connection. The default 652 | # implementation uses `get_connection_pool` to obtain a pool and 653 | # create a new connection in the newly obtained pool. 654 | pass 655 | 656 | def get_or_create_connection_pool(self, params: dict): 657 | # This is a high layer on top of `get_connection_pool` for 658 | # implementing a cache of created connection pools. It should be 659 | # overwritten if you want change the default behavior. 660 | pass 661 | 662 | def make_connection_params(self, url: str) -> dict: 663 | # The responsibility of this method is to convert basic connection 664 | # parameters and other settings to fully connection pool ready 665 | # connection parameters. 666 | pass 667 | 668 | def connect(self, url: str): 669 | # This is really a public API and entry point for this factory 670 | # class. This encapsulates the main logic of creating the 671 | # previously mentioned `params` using `make_connection_params` and 672 | # creating a new connection using the `get_connection` method. 673 | pass 674 | 675 | Use the sentinel connection factory 676 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ 677 | 678 | In order to facilitate using `Redis Sentinels`_, django-redis comes with a 679 | built in sentinel connection factory, which creates sentinel connection pools. 680 | In order to enable this functionality you should add the following: 681 | 682 | 683 | .. code-block:: python 684 | 685 | # Enable the alternate connection factory. 686 | DJANGO_REDIS_CONNECTION_FACTORY = 'django_redis.pool.SentinelConnectionFactory' 687 | 688 | # These sentinels are shared between all the examples, and are passed 689 | # directly to redis Sentinel. These can also be defined inline. 690 | SENTINELS = [ 691 | ('sentinel-1', 26379), 692 | ('sentinel-2', 26379), 693 | ('sentinel-3', 26379), 694 | ] 695 | 696 | CACHES = { 697 | "default": { 698 | "BACKEND": "django_redis.cache.RedisCache", 699 | # The hostname in LOCATION is the primary (service / master) name 700 | "LOCATION": "redis://service_name/db", 701 | "OPTIONS": { 702 | # While the default client will work, this will check you 703 | # have configured things correctly, and also create a 704 | # primary and replica pool for the service specified by 705 | # LOCATION rather than requiring two URLs. 706 | "CLIENT_CLASS": "django_redis.client.SentinelClient", 707 | 708 | # Sentinels which are passed directly to redis Sentinel. 709 | "SENTINELS": SENTINELS, 710 | 711 | # kwargs for redis Sentinel (optional). 712 | "SENTINEL_KWARGS": {}, 713 | 714 | # You can still override the connection pool (optional). 715 | "CONNECTION_POOL_CLASS": "redis.sentinel.SentinelConnectionPool", 716 | }, 717 | }, 718 | 719 | # A minimal example using the SentinelClient. 720 | "minimal": { 721 | "BACKEND": "django_redis.cache.RedisCache", 722 | 723 | # The SentinelClient will use this location for both the primaries 724 | # and replicas. 725 | "LOCATION": "redis://minimal_service_name/db", 726 | 727 | "OPTIONS": { 728 | "CLIENT_CLASS": "django_redis.client.SentinelClient", 729 | "SENTINELS": SENTINELS, 730 | }, 731 | }, 732 | 733 | # A minimal example using the DefaultClient. 734 | "other": { 735 | "BACKEND": "django_redis.cache.RedisCache", 736 | "LOCATION": [ 737 | # The DefaultClient is [primary, replicas...], but with the 738 | # SentinelConnectionPool it only requires one "is_master=0". 739 | "redis://other_service_name/db?is_master=1", 740 | "redis://other_service_name/db?is_master=0", 741 | ], 742 | "OPTIONS": {"SENTINELS": SENTINELS}, 743 | }, 744 | 745 | # A minimal example only using only replicas in read only mode (and 746 | # the DefaultClient). 747 | "readonly": { 748 | "BACKEND": "django_redis.cache.RedisCache", 749 | "LOCATION": "redis://readonly_service_name/db?is_master=0", 750 | "OPTIONS": {"SENTINELS": SENTINELS}, 751 | }, 752 | } 753 | 754 | It is also possible to set some caches as sentinels and some as not: 755 | 756 | .. code-block:: python 757 | 758 | SENTINELS = [ 759 | ('sentinel-1', 26379), 760 | ('sentinel-2', 26379), 761 | ('sentinel-3', 26379), 762 | ] 763 | CACHES = { 764 | "sentinel": { 765 | "BACKEND": "django_redis.cache.RedisCache", 766 | "LOCATION": "redis://service_name/db", 767 | "OPTIONS": { 768 | "CLIENT_CLASS": "django_redis.client.SentinelClient", 769 | "SENTINELS": SENTINELS, 770 | "CONNECTION_POOL_CLASS": "redis.sentinel.SentinelConnectionPool", 771 | "CONNECTION_FACTORY": "django_redis.pool.SentinelConnectionFactory", 772 | }, 773 | }, 774 | "default": { 775 | "BACKEND": "django_redis.cache.RedisCache", 776 | "LOCATION": "redis://127.0.0.1:6379/1", 777 | "OPTIONS": { 778 | "CLIENT_CLASS": "django_redis.client.DefaultClient", 779 | }, 780 | }, 781 | } 782 | 783 | .. _Redis Sentinels: https://redis.io/topics/sentinel 784 | 785 | Pluggable parsers 786 | ~~~~~~~~~~~~~~~~~ 787 | 788 | `redis-py`_, the Python Redis client used by django-redis, will automatically 789 | use a C-based parser if the `hiredis`_ package is installed in your environment. 790 | This can provide a significant performance boost for parsing Redis replies. 791 | 792 | To take advantage of this, simply install the `hiredis` package with pip: 793 | 794 | .. code-block:: console 795 | 796 | $ python -m pip install hiredis 797 | 798 | No additional configuration in your Django settings is required. django-redis 799 | will use the faster parser automatically. 800 | 801 | .. _hiredis: https://pypi.org/project/hiredis/ 802 | 803 | Pluggable clients 804 | ~~~~~~~~~~~~~~~~~ 805 | 806 | django-redis is designed for to be very flexible and very configurable. For it, 807 | it exposes a pluggable backends that make easy extend the default behavior, and 808 | it comes with few ones out the box. 809 | 810 | Default client 811 | ^^^^^^^^^^^^^^ 812 | 813 | Almost all about the default client is explained, with one exception: the 814 | default client comes with replication support. 815 | 816 | To connect to a Redis replication setup, you should change the ``LOCATION`` to 817 | something like: 818 | 819 | .. code-block:: python 820 | 821 | "LOCATION": [ 822 | "redis://127.0.0.1:6379/1", 823 | "redis://127.0.0.1:6378/1", 824 | ] 825 | 826 | The first connection string represents the primary server and the rest to 827 | replica servers. 828 | 829 | WARNING: Replication setup is not heavily tested in production environments. 830 | 831 | Shard client 832 | ^^^^^^^^^^^^ 833 | 834 | This pluggable client implements client-side sharding. It inherits almost all 835 | functionality from the default client. To use it, change your cache settings to 836 | something like this: 837 | 838 | .. code-block:: python 839 | 840 | CACHES = { 841 | "default": { 842 | "BACKEND": "django_redis.cache.RedisCache", 843 | "LOCATION": [ 844 | "redis://127.0.0.1:6379/1", 845 | "redis://127.0.0.1:6379/2", 846 | ], 847 | "OPTIONS": { 848 | "CLIENT_CLASS": "django_redis.client.ShardClient", 849 | } 850 | } 851 | } 852 | 853 | WARNING: Shard client is still experimental, so be careful when using it in 854 | production environments. 855 | 856 | Herd client 857 | ^^^^^^^^^^^ 858 | 859 | This pluggable client helps dealing with the thundering herd problem. You can read more about it 860 | on link: `Wikipedia `_ 861 | 862 | Like previous pluggable clients, it inherits all functionality from the default client, adding some 863 | additional methods for getting/setting keys. 864 | 865 | .. code-block:: python 866 | 867 | CACHES = { 868 | "default": { 869 | "BACKEND": "django_redis.cache.RedisCache", 870 | "LOCATION": "redis://127.0.0.1:6379/1", 871 | "OPTIONS": { 872 | "CLIENT_CLASS": "django_redis.client.HerdClient", 873 | } 874 | } 875 | } 876 | 877 | This client exposes additional settings: 878 | 879 | - ``CACHE_HERD_TIMEOUT``: Set default herd timeout. (Default value: 60s) 880 | 881 | Pluggable serializer 882 | ~~~~~~~~~~~~~~~~~~~~ 883 | 884 | The pluggable clients serialize data before sending it to the server. By 885 | default, django-redis serializes the data using the Python ``pickle`` module. 886 | This is very flexible and can handle a large range of object types. 887 | 888 | To serialize using JSON instead, the serializer ``JSONSerializer`` is also 889 | available. 890 | 891 | .. code-block:: python 892 | 893 | CACHES = { 894 | "default": { 895 | "BACKEND": "django_redis.cache.RedisCache", 896 | "LOCATION": "redis://127.0.0.1:6379/1", 897 | "OPTIONS": { 898 | "CLIENT_CLASS": "django_redis.client.DefaultClient", 899 | "SERIALIZER": "django_redis.serializers.json.JSONSerializer", 900 | } 901 | } 902 | } 903 | 904 | There's also support for serialization using `MsgPack`_ (that requires the 905 | msgpack library): 906 | 907 | .. code-block:: python 908 | 909 | CACHES = { 910 | "default": { 911 | "BACKEND": "django_redis.cache.RedisCache", 912 | "LOCATION": "redis://127.0.0.1:6379/1", 913 | "OPTIONS": { 914 | "CLIENT_CLASS": "django_redis.client.DefaultClient", 915 | "SERIALIZER": "django_redis.serializers.msgpack.MSGPackSerializer", 916 | } 917 | } 918 | } 919 | 920 | .. _MsgPack: https://msgpack.org/ 921 | 922 | Pluggable Redis client 923 | ~~~~~~~~~~~~~~~~~~~~~~ 924 | 925 | django-redis uses the Redis client ``redis.client.StrictClient`` by default. It 926 | is possible to use an alternative client. 927 | 928 | You can customize the client used by setting ``REDIS_CLIENT_CLASS`` in the 929 | ``CACHES`` setting. Optionally, you can provide arguments to this class by 930 | setting ``REDIS_CLIENT_KWARGS``. 931 | 932 | .. code-block:: python 933 | 934 | CACHES = { 935 | "default": { 936 | "OPTIONS": { 937 | "REDIS_CLIENT_CLASS": "my.module.ClientClass", 938 | "REDIS_CLIENT_KWARGS": {"some_setting": True}, 939 | } 940 | } 941 | } 942 | 943 | 944 | Closing Connections 945 | ~~~~~~~~~~~~~~~~~~~ 946 | 947 | The default django-redis behavior on close() is to keep the connections to Redis server. 948 | 949 | You can change this default behaviour for all caches by the ``DJANGO_REDIS_CLOSE_CONNECTION = True`` 950 | in the django settings (globally) or (at cache level) by setting ``CLOSE_CONNECTION: True`` in the ``OPTIONS`` 951 | for each configured cache. 952 | 953 | Setting True as a value will instruct the django-redis to close all the connections (since v. 4.12.2), irrespectively of its current usage. 954 | 955 | .. code-block:: python 956 | 957 | CACHES = { 958 | "default": { 959 | "BACKEND": "django_redis.cache.RedisCache", 960 | "LOCATION": "redis://127.0.0.1:6379/1", 961 | "OPTIONS": { 962 | "CLIENT_CLASS": "django_redis.client.DefaultClient", 963 | "CLOSE_CONNECTION": True, 964 | } 965 | } 966 | } 967 | 968 | SSL/TLS and Self-Signed certificates 969 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 970 | 971 | In case you encounter a Redis server offering a TLS connection using a 972 | self-signed certificate you may disable certification verification with the 973 | following: 974 | 975 | .. code-block:: python 976 | 977 | CACHES = { 978 | "default": { 979 | "BACKEND": "django_redis.cache.RedisCache", 980 | "LOCATION": "rediss://127.0.0.1:6379/1", 981 | "OPTIONS": { 982 | "CLIENT_CLASS": "django_redis.client.DefaultClient", 983 | "CONNECTION_POOL_KWARGS": {"ssl_cert_reqs": None} 984 | } 985 | } 986 | } 987 | 988 | 989 | License 990 | ------- 991 | 992 | .. code-block:: text 993 | 994 | Copyright (c) 2011-2015 Andrey Antukh 995 | Copyright (c) 2011 Sean Bleier 996 | 997 | All rights reserved. 998 | 999 | Redistribution and use in source and binary forms, with or without 1000 | modification, are permitted provided that the following conditions 1001 | are met: 1002 | 1. Redistributions of source code must retain the above copyright 1003 | notice, this list of conditions and the following disclaimer. 1004 | 2. Redistributions in binary form must reproduce the above copyright 1005 | notice, this list of conditions and the following disclaimer in the 1006 | documentation and/or other materials provided with the distribution. 1007 | 3. The name of the author may not be used to endorse or promote products 1008 | derived from this software without specific prior written permission. 1009 | 1010 | THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS`` AND ANY EXPRESS OR 1011 | IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES 1012 | OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. 1013 | IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, 1014 | INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT 1015 | NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 1016 | DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 1017 | THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 1018 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF 1019 | THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 1020 | --------------------------------------------------------------------------------