├── tests ├── redis_backend_testapp │ ├── models.py │ ├── __init__.py │ └── tests.py ├── urls.py ├── hashring_test │ ├── __init__.py │ ├── models.py │ └── tests.py ├── README.txt ├── runtests-sentinel.py ├── run_sentinel_tests.sh └── test_sqlite_sentinel.py ├── django_redis_sentinel ├── client │ ├── __init__.py │ └── sentinel.py ├── __init__.py ├── cache.py └── pool.py ├── .dockerignore ├── bash_container.sh ├── MANIFEST.in ├── run_docker_tests.sh ├── entrypoint.sh ├── AUTHORS.md ├── CHANGES.txt ├── sentinel ├── sentinel.conf ├── Dockerfile ├── entrypoint.sh └── test_sentinel.sh ├── .gitignore ├── Dockerfile ├── .travis.yml ├── LICENSE ├── setup.py ├── docker-compose.yaml └── README.md /tests/redis_backend_testapp/models.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/redis_backend_testapp/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/urls.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | 4 | -------------------------------------------------------------------------------- /tests/hashring_test/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | -------------------------------------------------------------------------------- /tests/hashring_test/models.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | 4 | -------------------------------------------------------------------------------- /django_redis_sentinel/client/__init__.py: -------------------------------------------------------------------------------- 1 | from .sentinel import SentinelClient 2 | 3 | -------------------------------------------------------------------------------- /.dockerignore: -------------------------------------------------------------------------------- 1 | # Created by .ignore support plugin (hsz.mobi) 2 | .dockerignore 3 | Dockerfile 4 | docker-compose.yaml 5 | -------------------------------------------------------------------------------- /django_redis_sentinel/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | VERSION = (0, 2, 0) 4 | __version__ = '.'.join(map(str, VERSION)) 5 | -------------------------------------------------------------------------------- /bash_container.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | DJANGO_REDIS_CONTAINER=`docker ps -aqf "name=django-redis-sentinel"` 3 | docker exec -it $DJANGO_REDIS_CONTAINER bash -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include LICENSE 2 | include AUTHORS.rst 3 | include README.rst 4 | recursive-include tests README.txt *.py 5 | recursive-include doc Makefile *.adoc *.html 6 | -------------------------------------------------------------------------------- /run_docker_tests.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | set -e 3 | DJANGO_REDIS_CONTAINER=`docker ps -aqf "name=django-redis-sentinel"` 4 | docker exec -t $DJANGO_REDIS_CONTAINER bash /django-redis/tests/run_sentinel_tests.sh 5 | -------------------------------------------------------------------------------- /tests/README.txt: -------------------------------------------------------------------------------- 1 | Test requirements 2 | ----------------- 3 | 4 | python packages 5 | ~~~~~~~~~~~~~~~ 6 | 7 | the following packages can be installed with pip 8 | 9 | * docker 10 | * docker-compose 11 | * django-redis 12 | -------------------------------------------------------------------------------- /entrypoint.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | #/usr/bin/tail -f /dev/null # keeps container running 4 | 5 | # Ends up with SSH for development purposes. Comment the line if you don't want to keep it running afters tests 6 | /usr/sbin/sshd -D 7 | -------------------------------------------------------------------------------- /AUTHORS.md: -------------------------------------------------------------------------------- 1 | django-redis-sentinel-redux 2 | ===================== 3 | Dani Gonzalez / danigosa 4 | 5 | django-redis 6 | ============ 7 | 8 | https://github.com/niwinz/django-redis/blob/master/AUTHORS.rst 9 | 10 | -------------------------------------------------------------------------------- /CHANGES.txt: -------------------------------------------------------------------------------- 1 | Changelog 2 | ========= 3 | 4 | Version 0.2.0 5 | ------------- 6 | 7 | Date: 2016-10-22 8 | 9 | - Added automated CI testing 10 | 11 | Version 0.1.0 12 | ------------- 13 | 14 | Date: 2016-10-21 15 | 16 | - Support for django-redis 4.5.0 17 | -------------------------------------------------------------------------------- /sentinel/sentinel.conf: -------------------------------------------------------------------------------- 1 | port 26379 2 | 3 | dir /tmp 4 | 5 | sentinel monitor rmaster redis-master 6379 $SENTINEL_QUORUM 6 | 7 | sentinel down-after-milliseconds rmaster $SENTINEL_DOWN_AFTER 8 | 9 | sentinel parallel-syncs rmaster 1 10 | 11 | sentinel failover-timeout rmaster $SENTINEL_FAILOVER 12 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.py[c|o] 2 | .DS_Store 3 | *.sql 4 | *.bz2 5 | *~ 6 | *.log 7 | *.json 8 | *.wsgi 9 | local_settings.py 10 | development_settings.py 11 | *.egg-info 12 | .project 13 | .pydevproject 14 | .settings 15 | versiontools* 16 | _build* 17 | doc/index.html 18 | dist* 19 | *.swp 20 | \#* 21 | .\#* 22 | .tox 23 | dump.rdb 24 | .idea 25 | -------------------------------------------------------------------------------- /sentinel/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM redis 2 | 3 | MAINTAINER danigosa 4 | 5 | EXPOSE 26379 6 | ADD sentinel.conf /etc/redis/sentinel.conf 7 | RUN chown redis:redis /etc/redis/sentinel.conf 8 | ENV SENTINEL_QUORUM 2 9 | ENV SENTINEL_DOWN_AFTER 5000 10 | ENV SENTINEL_FAILOVER 180000 11 | COPY entrypoint.sh / 12 | ENTRYPOINT ["/entrypoint.sh"] -------------------------------------------------------------------------------- /sentinel/entrypoint.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | sed -i "s/\$SENTINEL_QUORUM/$SENTINEL_QUORUM/g" /etc/redis/sentinel.conf 3 | sed -i "s/\$SENTINEL_DOWN_AFTER/$SENTINEL_DOWN_AFTER/g" /etc/redis/sentinel.conf 4 | sed -i "s/\$SENTINEL_FAILOVER/$SENTINEL_FAILOVER/g" /etc/redis/sentinel.conf 5 | 6 | exec docker-entrypoint.sh redis-server /etc/redis/sentinel.conf --sentinel 7 | -------------------------------------------------------------------------------- /tests/runtests-sentinel.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | import os 4 | import sys 5 | 6 | sys.path.insert(0, "..") 7 | os.environ.setdefault("DJANGO_SETTINGS_MODULE", "test_sqlite_sentinel") 8 | 9 | if __name__ == "__main__": 10 | from django.core.management import execute_from_command_line 11 | 12 | args = sys.argv 13 | 14 | args.insert(1, "test") 15 | if len(args) == 2: 16 | args.insert(2, "redis_backend_testapp") 17 | args.insert(3, "hashring_test") 18 | 19 | execute_from_command_line(args) 20 | -------------------------------------------------------------------------------- /django_redis_sentinel/cache.py: -------------------------------------------------------------------------------- 1 | from django_redis.cache import RedisCache, DJANGO_REDIS_IGNORE_EXCEPTIONS 2 | from django_redis.util import load_class 3 | 4 | 5 | class RedisSentinelCache(RedisCache): 6 | """ 7 | Forces SentinelClient instead of DefaultClient 8 | """ 9 | def __init__(self, server, params): 10 | super(RedisCache, self).__init__(params) 11 | self._server = server 12 | self._params = params 13 | 14 | options = params.get("OPTIONS", {}) 15 | self._client_cls = options.get("CLIENT_CLASS", "django_redis_sentinel.client.SentinelClient") 16 | self._client_cls = load_class(self._client_cls) 17 | self._client = None 18 | 19 | self._ignore_exceptions = options.get("IGNORE_EXCEPTIONS", DJANGO_REDIS_IGNORE_EXCEPTIONS) 20 | -------------------------------------------------------------------------------- /tests/run_sentinel_tests.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | cd /django-redis-sentinel/tests/ 3 | 4 | # Django 18 5 | echo "<<<< Testing Django18 >>>" 6 | alias python=python 7 | echo "Switched to PY2K" 8 | pip install 'Django>=1.8,<1.9' 9 | python runtests-sentinel.py 10 | alias python="python3" 11 | echo "Switched to PY3K" 12 | python runtests-sentinel.py 13 | 14 | # Django 19 15 | echo "<<<< Testing Django19 >>>" 16 | alias python=python 17 | echo "Switched to PY2K" 18 | pip install -U 'Django>=1.9,<1.10' 19 | python runtests-sentinel.py 20 | alias python=python3 21 | echo "Switched to PY3K" 22 | python runtests-sentinel.py 23 | 24 | # Django 110 25 | echo "<<<< Testing Django110 >>>" 26 | alias python=python 27 | echo "Switched to PY2K" 28 | pip install -U 'Django>=1.10,<1.11' 29 | python runtests-sentinel.py 30 | alias python=python3 31 | echo "Switched to PY3K" 32 | python runtests-sentinel.py 33 | 34 | alias python=python 35 | echo "Switched to PY2K" 36 | echo "End of testings PY2K and PY3K, Django>=1.8 up to Django<=1.10" -------------------------------------------------------------------------------- /tests/hashring_test/tests.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | from django.test import TestCase 4 | 5 | from django_redis.hash_ring import HashRing 6 | 7 | 8 | class Node(object): 9 | def __init__(self, id): 10 | self.id = id 11 | 12 | def __str__(self): 13 | return "node:{0}".format(self.id) 14 | 15 | def __repr__(self): 16 | return "".format(self.id) 17 | 18 | 19 | class HashRingTest(TestCase): 20 | def setUp(self): 21 | self.node0 = Node(0) 22 | self.node1 = Node(1) 23 | self.node2 = Node(2) 24 | 25 | self.nodes = [self.node0, self.node1, self.node2] 26 | self.ring = HashRing(self.nodes) 27 | 28 | def test_hashring(self): 29 | ids = [] 30 | 31 | for key in ["test{0}".format(x) for x in range(10)]: 32 | node = self.ring.get_node(key) 33 | ids.append(node.id) 34 | 35 | self.assertEqual(ids, [0, 2, 1, 2, 2, 2, 2, 0, 1, 1]) 36 | 37 | def test_hashring_brute_force(self): 38 | for key in ("test{0}".format(x) for x in range(10000)): 39 | node = self.ring.get_node(key) 40 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM ubuntu:16.04 2 | 3 | MAINTAINER danigosa 4 | 5 | RUN apt-get update && apt-get install -y -qq \ 6 | python-pip \ 7 | openssh-server 8 | 9 | RUN pip install --upgrade pip 10 | 11 | RUN pip install 'django-redis>=4.5.0' 12 | RUN pip install hiredis mock msgpack-python fakeredis 13 | 14 | RUN mkdir /django-redis-sentinel 15 | 16 | COPY . /django-redis-sentinel 17 | WORKDIR /django-redis-sentinel 18 | VOLUME /django-redis-sentinel 19 | COPY entrypoint.sh / 20 | 21 | # Enable OpenSSH for remote interpreters like pydev or Pycharm 22 | # Expose SSH for development purposes 23 | RUN mkdir /var/run/sshd 24 | RUN echo 'root:screencast' | chpasswd 25 | RUN sed -i 's/PermitRootLogin without-password/PermitRootLogin yes/' /etc/ssh/sshd_config 26 | RUN sed -i 's/prohibit-password/yes/' /etc/ssh/sshd_config 27 | 28 | # SSH login fix. Otherwise user is kicked off after login 29 | RUN sed 's@session\s*required\s*pam_loginuid.so@session optional pam_loginuid.so@g' -i /etc/pam.d/sshd 30 | 31 | ENV NOTVISIBLE "in users profile" 32 | RUN echo "export VISIBLE=now" >> /etc/profile 33 | 34 | EXPOSE 22 35 | 36 | ENTRYPOINT ["/entrypoint.sh"] 37 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | sudo: required 2 | dist: trusty 3 | 4 | services: 5 | - docker 6 | 7 | env: 8 | global: 9 | - DOCKER_VERSION=1.12.2-0~trusty 10 | - DOCKER_COMPOSE_VERSION=1.8.0 11 | 12 | before_install: 13 | # list docker-engine versions 14 | - apt-cache madison docker-engine 15 | 16 | # upgrade docker-engine to specific version 17 | - sudo apt-get -o Dpkg::Options::="--force-confnew" install -y docker-engine=${DOCKER_VERSION} 18 | 19 | # reinstall docker-compose at specific version 20 | - sudo rm -f /usr/local/bin/docker-compose 21 | - curl -L https://github.com/docker/compose/releases/download/${DOCKER_COMPOSE_VERSION}/docker-compose-`uname -s`-`uname -m` > docker-compose 22 | - chmod +x docker-compose 23 | - sudo mv docker-compose /usr/local/bin 24 | 25 | before_script: 26 | - docker-compose up --build -d 27 | 28 | script: 29 | - ./run_docker_tests.sh 30 | 31 | after_script: 32 | - docker-compose down 33 | 34 | notifications: 35 | email: 36 | recipients: 37 | - danigosa@gmail.com 38 | - dani@robbie.ai 39 | - dani@infantium.com 40 | on_success: change 41 | on_failure: change 42 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2011-2016 Andrey Antukh 2 | Copyright (c) 2011 Sean Bleier 3 | 4 | All rights reserved. 5 | 6 | Redistribution and use in source and binary forms, with or without 7 | modification, are permitted provided that the following conditions 8 | are met: 9 | 1. Redistributions of source code must retain the above copyright 10 | notice, this list of conditions and the following disclaimer. 11 | 2. Redistributions in binary form must reproduce the above copyright 12 | notice, this list of conditions and the following disclaimer in the 13 | documentation and/or other materials provided with the distribution. 14 | 3. The name of the author may not be used to endorse or promote products 15 | derived from this software without specific prior written permission. 16 | 17 | THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR 18 | IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES 19 | OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. 20 | IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, 21 | INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT 22 | NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 23 | DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 24 | THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 25 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF 26 | THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 27 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | from setuptools import setup 2 | 3 | from django_redis_sentinel import __version__ 4 | 5 | description = """ 6 | Full featured redis cache backend for Django for Sentinel Redis Clusters. 7 | """ 8 | 9 | setup( 10 | name="django-redis-sentinel-redux", 11 | url="https://github.com/danigosa/django-redis-sentinel-redux", 12 | author="Dani Gonzalez @danigosa", 13 | author_email="danigosa@gmail.com", 14 | version=__version__, 15 | packages=[ 16 | "django_redis_sentinel", 17 | "django_redis_sentinel.client" 18 | ], 19 | description=description.strip(), 20 | install_requires=[ 21 | "django-redis>=4.5.0", 22 | ], 23 | zip_safe=False, 24 | classifiers=[ 25 | "Development Status :: 4 - Beta", 26 | "Environment :: Web Environment", 27 | "Framework :: Django :: 1.8", 28 | "Framework :: Django :: 1.9", 29 | "Framework :: Django :: 1.10", 30 | "Framework :: Django", 31 | "Intended Audience :: Developers", 32 | "License :: OSI Approved :: BSD License", 33 | "Operating System :: OS Independent", 34 | "Programming Language :: Python", 35 | "Programming Language :: Python :: 2", 36 | "Programming Language :: Python :: 2.7", 37 | "Programming Language :: Python :: 3", 38 | "Programming Language :: Python :: 3.4", 39 | "Programming Language :: Python :: 3.5", 40 | "Topic :: Software Development :: Libraries", 41 | "Topic :: Utilities", 42 | ], 43 | ) 44 | -------------------------------------------------------------------------------- /docker-compose.yaml: -------------------------------------------------------------------------------- 1 | version: '2' 2 | services: 3 | django-redis: 4 | container_name: django-redis-sentinel 5 | build: . 6 | volumes: 7 | - ./:/django-redis 8 | depends_on: 9 | - redis-master 10 | - redis-slave1 11 | - redis-slave2 12 | - sentinel1 13 | - sentinel2 14 | - sentinel3 15 | ports: 16 | - "2005:22" 17 | sentinel1: 18 | container_name: sentinel1 19 | build: sentinel 20 | environment: 21 | - SENTINEL_DOWN_AFTER=5000 22 | - SENTINEL_FAILOVER=180000 23 | links: 24 | - redis-master 25 | - redis-slave1 26 | - redis-slave2 27 | sentinel2: 28 | container_name: sentinel2 29 | build: sentinel 30 | environment: 31 | - SENTINEL_DOWN_AFTER=5000 32 | - SENTINEL_FAILOVER=180000 33 | links: 34 | - redis-master 35 | - redis-slave1 36 | - redis-slave2 37 | sentinel3: 38 | container_name: sentinel3 39 | build: sentinel 40 | environment: 41 | - SENTINEL_DOWN_AFTER=5000 42 | - SENTINEL_FAILOVER=180000 43 | links: 44 | - redis-master 45 | - redis-slave1 46 | - redis-slave2 47 | redis-master: 48 | container_name: redis-master 49 | image: redis 50 | redis-slave1: 51 | container_name: redis-slave1 52 | image: redis 53 | command: redis-server --slaveof redis-master 6379 54 | links: 55 | - redis-master 56 | redis-slave2: 57 | container_name: redis-slave2 58 | image: redis 59 | command: redis-server --slaveof redis-master 6379 60 | links: 61 | - redis-master 62 | -------------------------------------------------------------------------------- /sentinel/test_sentinel.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | MASTER_IP=$(docker inspect --format '{{ .NetworkSettings.IPAddress }}' redis-master) 3 | SLAVE_IP=$(docker inspect --format '{{ .NetworkSettings.IPAddress }}' redis-slave) 4 | SENTINEL_IP=$(docker inspect --format '{{ .NetworkSettings.IPAddress }}' djangoredis_sentinel_1) 5 | 6 | echo Redis master: $MASTER_IP 7 | echo Redis Slave: $SLAVE_IP 8 | echo Redis Sentinel: $SENTINEL_IP 9 | 10 | echo ------------------------------------------------ 11 | echo Initial status of sentinel 12 | echo ------------------------------------------------ 13 | docker exec djangoredis_sentinel_1 redis-cli -p 26379 info Sentinel 14 | echo Current master is 15 | docker exec djangoredis_sentinel_1 redis-cli -p 26379 SENTINEL get-master-addr-by-name rmaster 16 | echo Current slaves are 17 | docker exec djangoredis_sentinel_1 redis-cli -p 26379 SENTINEL slaves rmaster 18 | echo ------------------------------------------------ 19 | 20 | echo Stop redis master 21 | docker pause redis-master 22 | echo Wait for 25 seconds 23 | sleep 25 24 | echo Current infomation of sentinel 25 | docker exec djangoredis_sentinel_1 redis-cli -p 26379 info Sentinel 26 | echo Current master is 27 | docker exec djangoredis_sentinel_1 redis-cli -p 26379 SENTINEL get-master-addr-by-name rmaster 28 | echo Current slaves are 29 | docker exec djangoredis_sentinel_1 redis-cli -p 26379 SENTINEL slaves rmaster 30 | 31 | echo ------------------------------------------------ 32 | echo Restart Redis master 33 | docker unpause redis-master 34 | sleep 5 35 | echo Current infomation of sentinel 36 | docker exec djangoredis_sentinel_1 redis-cli -p 26379 info Sentinel 37 | echo Current master is 38 | docker exec djangoredis_sentinel_1 redis-cli -p 26379 SENTINEL get-master-addr-by-name rmaster 39 | echo Current slaves are 40 | docker exec djangoredis_sentinel_1 redis-cli -p 26379 SENTINEL slaves rmaster -------------------------------------------------------------------------------- /tests/test_sqlite_sentinel.py: -------------------------------------------------------------------------------- 1 | DATABASES = { 2 | "default": { 3 | "ENGINE": "django.db.backends.sqlite3" 4 | }, 5 | } 6 | 7 | SECRET_KEY = "django_tests_secret_key" 8 | TIME_ZONE = "America/Chicago" 9 | LANGUAGE_CODE = "en-us" 10 | ADMIN_MEDIA_PREFIX = "/static/admin/" 11 | STATICFILES_DIRS = () 12 | 13 | MIDDLEWARE_CLASSES = [] 14 | 15 | CACHES = { 16 | "default": { 17 | "BACKEND": "django_redis_sentinel.cache.RedisSentinelCache", 18 | "LOCATION": [ 19 | ("sentinel1", 26379), 20 | ("sentinel2", 26379), 21 | ("sentinel3", 26379) 22 | ], 23 | "OPTIONS": { 24 | "CLIENT_CLASS": "django_redis_sentinel.client.SentinelClient", 25 | "SENTINEL_SERVICE_NAME": "rmaster", 26 | "REDIS_CLIENT_KWARGS": { 27 | "db": 1 28 | } 29 | } 30 | }, 31 | "doesnotexist": { 32 | "BACKEND": "django_redis_sentinel.cache.RedisSentinelCache", 33 | "LOCATION": [ 34 | ("sentinel1", 26379), 35 | ("sentinel2", 26379), 36 | ("sentinel3", 26379) 37 | ], 38 | "OPTIONS": { 39 | "CLIENT_CLASS": "django_redis_sentinel.client.SentinelClient", 40 | "SENTINEL_SERVICE_NAME": "rmaster", 41 | "REDIS_CLIENT_KWARGS": { 42 | "db": 1 43 | } 44 | } 45 | }, 46 | "sample": { 47 | "BACKEND": "django_redis_sentinel.cache.RedisSentinelCache", 48 | "LOCATION": [ 49 | ("sentinel1", 26379), 50 | ("sentinel2", 26379), 51 | ("sentinel3", 26379) 52 | ], 53 | "OPTIONS": { 54 | "CLIENT_CLASS": "django_redis_sentinel.client.SentinelClient", 55 | "SENTINEL_SERVICE_NAME": "rmaster", 56 | "REDIS_CLIENT_KWARGS": { 57 | "db": 1 58 | } 59 | } 60 | }, 61 | "with_prefix": { 62 | "BACKEND": "django_redis_sentinel.cache.RedisSentinelCache", 63 | "LOCATION": [ 64 | ("sentinel1", 26379), 65 | ("sentinel2", 26379), 66 | ("sentinel3", 26379) 67 | ], 68 | "OPTIONS": { 69 | "CLIENT_CLASS": "django_redis_sentinel.client.SentinelClient", 70 | "SENTINEL_SERVICE_NAME": "rmaster", 71 | "REDIS_CLIENT_KWARGS": { 72 | "db": 1 73 | } 74 | }, 75 | "KEY_PREFIX": "test-prefix", 76 | }, 77 | } 78 | 79 | INSTALLED_APPS = ( 80 | "django.contrib.sessions", 81 | "redis_backend_testapp", 82 | "hashring_test", 83 | ) 84 | -------------------------------------------------------------------------------- /django_redis_sentinel/pool.py: -------------------------------------------------------------------------------- 1 | from django.conf import settings 2 | from django.core.exceptions import ImproperlyConfigured 3 | from django_redis import util as djredis_util 4 | from django_redis.pool import ConnectionFactory 5 | from redis.sentinel import Sentinel 6 | 7 | 8 | def get_connection_factory(path=None, options=None): 9 | if path is None: 10 | path = getattr(settings, "DJANGO_REDIS_CONNECTION_FACTORY", 11 | "django_redis_sentinel.pool.SentinelConnectionFactory") 12 | 13 | cls = djredis_util.load_class(path) 14 | return cls(options or {}) 15 | 16 | 17 | class SentinelConnectionFactory(ConnectionFactory): 18 | # Creates Sentinel Client from connection params 19 | # It does not cache anything 20 | def __init__(self, options): 21 | super(SentinelConnectionFactory, self).__init__(options) 22 | 23 | pool_cls_path = options.get("CONNECTION_POOL_CLASS", 24 | "redis.sentinel.SentinelConnectionPool") 25 | self.pool_cls = djredis_util.load_class(pool_cls_path) 26 | self.pool_cls_kwargs = options.get("CONNECTION_POOL_KWARGS", {}) 27 | 28 | redis_client_cls_path = options.get("REDIS_CLIENT_CLASS", 29 | "redis.client.StrictRedis") 30 | self.redis_client_cls = djredis_util.load_class(redis_client_cls_path) 31 | self.redis_client_cls_kwargs = options.get("REDIS_CLIENT_KWARGS", {}) 32 | 33 | self.service_name = options.get("SENTINEL_SERVICE_NAME", None) 34 | if not self.service_name: 35 | raise ImproperlyConfigured("SentinelClient requires SENTINEL_SERVICE_NAME in OPTIONS") 36 | 37 | # Get sentinels servers from options (even though it's not an option...) 38 | self.sentinels = options.get("SENTINELS", []) 39 | 40 | self.options = options 41 | 42 | # Sentinel Connection Pool is not cached, not indexed by URL, so params are constant for each connection 43 | self.sentinel_conn_pool_cls_kwargs = { 44 | "parser_class": self.get_parser_cls(), 45 | } 46 | 47 | password = self.options.get("PASSWORD", None) 48 | if password: 49 | self.sentinel_conn_pool_cls_kwargs["password"] = password 50 | 51 | socket_timeout = self.options.get("SOCKET_TIMEOUT", None) 52 | if socket_timeout: 53 | assert isinstance(socket_timeout, (int, float)), \ 54 | "Socket timeout should be float or integer" 55 | self.sentinel_conn_pool_cls_kwargs["socket_timeout"] = socket_timeout 56 | 57 | socket_connect_timeout = self.options.get("SOCKET_CONNECT_TIMEOUT", None) 58 | if socket_connect_timeout: 59 | assert isinstance(socket_connect_timeout, (int, float)), \ 60 | "Socket connect timeout should be float or integer" 61 | self.sentinel_conn_pool_cls_kwargs["socket_connect_timeout"] = socket_connect_timeout 62 | 63 | # Actual Sentinel client, it is responsible of creating the StrictRedis clients 64 | self._sentinel = Sentinel(self.sentinels, **self.sentinel_conn_pool_cls_kwargs) 65 | self._has_slaves = len(self._sentinel.discover_slaves(self.service_name)) # Returns a list of current slaves 66 | 67 | def has_slaves(self): 68 | return self._has_slaves 69 | 70 | def connect_master(self): 71 | """ 72 | Given a basic connection parameters and sentinel client, 73 | return a new master connection. 74 | :raises MasterNotFoundError: if no master available 75 | then raises this 76 | """ 77 | return self._sentinel.master_for(self.service_name, **self.redis_client_cls_kwargs) 78 | 79 | def connect_slave(self, force_slave=False): 80 | """ 81 | Given a basic connection parameters and sentinel client, 82 | return a new slave connection if available, master's if not 83 | :raises SlaveNotFoundError: it automatically fallback to master if not slaves available, if nobody available 84 | then raises this 85 | """ 86 | if self.has_slaves() or force_slave: 87 | return self._sentinel.slave_for(self.service_name, **self.redis_client_cls_kwargs) 88 | else: 89 | # If the cluster had no slaves when creating the pool 90 | # then no need for callbacks and unnecessary discoveries, fall back 91 | # to master directly 92 | return self.connect_master() 93 | -------------------------------------------------------------------------------- /django_redis_sentinel/client/sentinel.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | from __future__ import absolute_import, unicode_literals 4 | 5 | import socket 6 | 7 | from django.core.cache.backends.base import get_key_func 8 | from django.core.exceptions import ImproperlyConfigured 9 | from django_redis.client.default import DefaultClient 10 | from django_redis.util import load_class 11 | from redis.exceptions import ConnectionError 12 | 13 | from django_redis_sentinel import pool 14 | 15 | try: 16 | from redis.exceptions import TimeoutError, ResponseError 17 | 18 | _main_exceptions = (TimeoutError, ResponseError, ConnectionError, socket.timeout) 19 | except ImportError: 20 | _main_exceptions = (ConnectionError, socket.timeout) 21 | 22 | 23 | class SentinelClient(DefaultClient): 24 | """ 25 | Modifies DefaultClient to work on Sentinel Cluster. URLs passed as servers are no longer master on index 0 and 26 | slaves the following ones. All URLs should represent the list of sentinels, where order no matters anymore. 27 | It does not use any cached ConnectionPool as SentinelConnectionPool is for sentinels, not master and slaves. 28 | The Sentinel client creates a StrictRedis client that performs the connections to actual current elected master or 29 | slave instances, instead of indexing the URLs for using them as a fixed way to connect to each server. 30 | This way, through Sentinel client instead of direct creation of StrictRedis from URLs, 31 | we always have a valid master or slave client (before or after failover). 32 | New OPTIONS: 33 | - SENTINEL_SERVICE_NAME (required): Name of monitored cluster 34 | - SENTINEL_SOCKET_TIMEOUT (optional): Socket timeout for connecting to sentinels, in seconds (accepts float) 35 | """ 36 | 37 | def __init__(self, server, params, backend): 38 | super(SentinelClient, self).__init__(server, params, backend) 39 | 40 | self._backend = backend 41 | self._server = server 42 | self._params = params 43 | 44 | self.reverse_key = get_key_func(params.get("REVERSE_KEY_FUNCTION") or 45 | "django_redis.util.default_reverse_key") 46 | 47 | if not self._server: 48 | raise ImproperlyConfigured("Missing connections string") 49 | 50 | if not isinstance(self._server, (list, tuple, set)): 51 | self._server = self._server.split(",") 52 | 53 | self._options = params.get("OPTIONS", {}) 54 | 55 | serializer_path = self._options.get("SERIALIZER", "django_redis.serializers.pickle.PickleSerializer") 56 | serializer_cls = load_class(serializer_path) 57 | 58 | compressor_path = self._options.get("COMPRESSOR", "django_redis.compressors.identity.IdentityCompressor") 59 | compressor_cls = load_class(compressor_path) 60 | 61 | self._serializer = serializer_cls(options=self._options) 62 | self._compressor = compressor_cls(options=self._options) 63 | 64 | # Hack: Add sentinels servers as options, to break legacy pool code as less as possible 65 | self._options.update({"SENTINELS": self._server}) 66 | # Create connection factory for Sentinels 67 | self.connection_factory = pool.get_connection_factory(options=self._options) 68 | 69 | def get_client(self, write=True, force_slave=False): 70 | """ 71 | Method used for obtain a raw redis client. 72 | 73 | This function is used by almost all cache backend 74 | operations for obtain a native redis client/connection 75 | instance. 76 | 77 | If read always looks for a slave (round-robin algorithm, with fallback to master if none available) 78 | If write then it looks for master 79 | """ 80 | if write: 81 | return self.connect(master=True) 82 | else: 83 | return self.connect(master=False, force_slave=force_slave) 84 | 85 | def connect(self, master=True, force_slave=False): 86 | """ 87 | Given a type of connection master or no master, returns a new raw redis client/connection 88 | instance. Sentinel always give a valid StrictRedis client with fallback to master in case of no slaves. 89 | No caching done with clients. 90 | Even though it can be an improvement, it could lead to stale invalid clients in failovers. Maybe in the future. 91 | """ 92 | if master: 93 | return self.connection_factory.connect_master() 94 | else: 95 | return self.connection_factory.connect_slave(force_slave) 96 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Django-Redis Client that supports Sentinel Cluster HA 2 | 3 | ![CI Status](https://travis-ci.org/danigosa/django-redis-sentinel-redux.svg?branch=master) [![PyPI version](https://badge.fury.io/py/django-redis-sentinel-redux.svg)](https://badge.fury.io/py/django-redis-sentinel-redux) 4 | 5 | Extension for the package django-redis() to fully support Redis Sentinel Clusters. This enables having django-redis as an HA Store or Cache in production environments that require HA. For more information about Redis Sentinel HA capabilities visit: http://redis.io/topics/sentinel 6 | 7 | ##### Note: 8 | 9 | There is already a django-redis-sentinel plugin repository , which intends the same of this one. The main differences and motivations to write a redux version were: 10 | 11 | - Lack of support and continuity, as last commits were 1 year ago and most of the project 1 year ago 12 | - Lack of proper testing, as tests were just mocked and not using a real Redis Sentinel cluster, as the current does through **docker-compose** 13 | - Full set of tests equivalent to the original package **django-redis**, making it a truly drop-in client in the place of the *django_redis.client.DefaultClient*, everything working on previous **django-redis** with single Redis should keep working identically. 14 | - The approach discovering the the urls of the master and slaves and pool connections from them was not the best in my opinion, as it would end up with stale pooled connections to wrong servers once *failover* occurs. Instead, I give instructions on how to test and see how operates during failover in this manual, which is robust as it does not pool connections based on fixed urls, but trying to discover master and slave in each connection. 15 | 16 | 17 | Tested on: 18 | 19 | * Python 2.7.10+ 20 | * Python 3.5+ (should work on 3.4.x) 21 | * django-redis>=4.5.0 22 | * Redis 3.2+ 23 | * Django>=1.8 (latest) 24 | * Django>=1.9 (latest) 25 | * Django>=1.10 (latest) 26 | 27 | The newly client does the following: 28 | 29 | - Connects to a bunch or single Sentinel server using ``redis.sentinel.Sentinel`` client 30 | - Discovers master 31 | - Send writes to ``Sentinel.get_master`` StrictRedis wrapped to work as a ``django_redis.client. DefaultClient`` 32 | - Send reads to ``Sentinel.get_slave`` StrictRedis wrapped to work as a ``django_redis.client. DefaultClient``, falling back to master if no slaves available for reading 33 | 34 | ##### Nice to have / Future 35 | 36 | - Current version does not cache clients as ``django_redis`` does, not master not slaves 37 | - Every access means a request to Sentinel for current master or available slave. While this can mean slower results and bigger delay it ensures cache requests are the most available possible, as it's primary mission is to fulfill HA requirements 38 | - Future versions might locally cache clients and only request for new masters and slaves servers from ``get_master`` and ``get_slaves`` methods, when MasterNotFound or SlaveNotFound or other error comes from a **failover** process 39 | - Add Connection Pooling to cached clients can also improve performance, but the risk to have many clients and lots of connections stale after a failover process should be considered 40 | - Support for more Clients, not just StrictRedis wrapped as DefaultClient 41 | 42 | ## How to install 43 | 44 | You can install it with: ``pip install django-redis-sentinel`` 45 | 46 | ## How to use it 47 | 48 | Just plug ``django_redis_sentinel.cache.RedisSentinelCache`` and ``django_redis_sentinel.client.SentinelClient`` into the **django-redis** backend and client configuration like this: 49 | 50 | CACHES = { 51 | "default": { 52 | "BACKEND": "django_redis_sentinel.cache.RedisSentinelCache", 53 | "LOCATION": [ 54 | ("sentinel1", 26379), 55 | ("sentinel2", 26379), 56 | ("sentinel3", 26379) 57 | ], 58 | "OPTIONS": { 59 | "CLIENT_CLASS": "django_redis_sentinel.client.SentinelClient", 60 | "SENTINEL_SERVICE_NAME": "rmaster", 61 | "REDIS_CLIENT_KWARGS": { 62 | "db": 1 63 | } 64 | } 65 | } 66 | } 67 | 68 | Notice that the format of *sentinels* in in ``tuple(host, port)`` form. If you have a single sentinel or a service pointing to the sentinels (like a load balancer or k8s Service etc.) still use this format: 69 | 70 | "LOCATION": [ 71 | ("sentinels_service", 26379), 72 | ], 73 | 74 | All the settings and parameters for the ``django_redis.client.DefaultClient`` and ``redis.StrictRedis`` still work but they should be passed as a separated parameter in **OPTIONS** ``REDIS_CLIENT_KWARGS`` like the **database you want to connect to**: 75 | 76 | "OPTIONS": { 77 | "CLIENT_CLASS": "django_redis_sentinel.client.SentinelClient", 78 | "SENTINEL_SERVICE_NAME": "rmaster", 79 | "REDIS_CLIENT_KWARGS": { 80 | "db": 1 81 | } 82 | } 83 | 84 | Other parameters for the Sentinel connection: 85 | 86 | "OPTIONS": { 87 | "SOCKET_TIMEOUT: 0.1, 88 | "SOCKET_CONNECT_TIMEOUT: 0.1 89 | } 90 | 91 | From now on you can use django-redis 92 | 93 | ## Running Tests 94 | 95 | $ docker-compose build 96 | $ docker-compose up -d 97 | $ ./run_docker_tests.sh 98 | 99 | ## Testing Failover 100 | 101 | Running tests after a failover: 102 | 103 | ---------------------- 104 | ## In a first terminal 105 | $ docker-compose build 106 | $ docker-compose up -d 107 | $ ./bash-container 108 | root@7809ac6b537b:/django-redis-sentinel# cd tests/ 109 | root@7809ac6b537b:/django-redis-sentinel# ./run_sentinel_tests.sh 110 | ---------------------- 111 | ## In a second terminal, not closing first 112 | $ docker pause redis-master 113 | ---------------------- 114 | ## Wait for 5 seconds and run again first terminal tests 115 | 116 | Running Redis Sentinel Cluster low-level testing: 117 | 118 | $ docker-compose build 119 | $ docker-compose up -d 120 | $ cd sentinel/ 121 | $ ./test_sentinel.sh 122 | 123 | 124 | 125 | 126 | 127 | 128 | 129 | 130 | ## Changelog 131 | 132 | #### Version 0.2.0 133 | 134 | Date: 2016-10-22 135 | 136 | - Added automated CI testing 137 | 138 | #### Version 0.1.0 139 | 140 | Date: 2016-10-21 141 | 142 | - Support for django-redis 4.5.0 143 | -------------------------------------------------------------------------------- /tests/redis_backend_testapp/tests.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | from __future__ import absolute_import, unicode_literals, print_function 4 | 5 | import base64 6 | import datetime 7 | import sys 8 | import time 9 | import unittest 10 | from datetime import timedelta 11 | 12 | from django_redis_sentinel.client.sentinel import SentinelClient 13 | 14 | try: 15 | from unittest.mock import patch 16 | except ImportError: 17 | from mock import patch 18 | 19 | from django.conf import settings 20 | from django.core.cache import cache 21 | from django import VERSION 22 | from django.test import TestCase 23 | 24 | import django_redis_sentinel.cache 25 | from django_redis_sentinel import pool 26 | 27 | from django_redis.serializers import json as json_serializer 28 | from django_redis.serializers import msgpack as msgpack_serializer 29 | 30 | FAKE_REDIS = settings.CACHES["default"]["OPTIONS"].get("REDIS_CLIENT_CLASS") \ 31 | == "fakeredis.FakeStrictRedis" 32 | 33 | if sys.version_info[0] < 3: 34 | text_type = unicode 35 | bytes_type = str 36 | else: 37 | text_type = str 38 | bytes_type = bytes 39 | long = int 40 | 41 | 42 | def make_key(key, prefix, version): 43 | return "{}#{}#{}".format(prefix, version, key) 44 | 45 | 46 | def reverse_key(key): 47 | return key.split("#", 2)[2] 48 | 49 | 50 | class DjangoRedisConnectionStrings(TestCase): 51 | def setUp(self): 52 | # SentinelConnectionPool needs SENTINEL_SERVICE_NAME as mandatory connection parameter 53 | self.cf = pool.get_connection_factory(options={"SENTINEL_SERVICE_NAME": "rmaster"}) 54 | self.constring4 = "unix://tmp/foo.bar?db=1" 55 | self.constring5 = "redis://localhost/2" 56 | self.constring6 = "rediss://localhost:3333?db=2" 57 | 58 | def test_new_connection_strings(self): 59 | res1 = self.cf.make_connection_params(self.constring4) 60 | res2 = self.cf.make_connection_params(self.constring5) 61 | res3 = self.cf.make_connection_params(self.constring6) 62 | 63 | self.assertEqual(res1["url"], self.constring4) 64 | self.assertEqual(res2["url"], self.constring5) 65 | self.assertEqual(res3["url"], self.constring6) 66 | 67 | 68 | class DjangoRedisCacheTestCustomKeyFunction(TestCase): 69 | def setUp(self): 70 | self.old_kf = settings.CACHES['default'].get('KEY_FUNCTION') 71 | self.old_rkf = settings.CACHES['default'].get('REVERSE_KEY_FUNCTION') 72 | settings.CACHES['default']['KEY_FUNCTION'] = 'redis_backend_testapp.tests.make_key' 73 | settings.CACHES['default']['REVERSE_KEY_FUNCTION'] = 'redis_backend_testapp.tests.reverse_key' 74 | 75 | self.cache = caches['default'] 76 | try: 77 | self.cache.clear() 78 | except Exception: 79 | pass 80 | 81 | def test_custom_key_function(self): 82 | for key in ["foo-aa", "foo-ab", "foo-bb", "foo-bc"]: 83 | self.cache.set(key, "foo") 84 | 85 | res = self.cache.delete_pattern("*foo-a*") 86 | self.assertTrue(bool(res)) 87 | 88 | keys = self.cache.keys("foo*") 89 | self.assertEqual(set(keys), set(["foo-bb", "foo-bc"])) 90 | # ensure our custom function was actually called 91 | try: 92 | self.assertEqual(set(k.decode('utf-8') for k in self.cache.raw_client.keys('*')), 93 | set(['#1#foo-bc', '#1#foo-bb'])) 94 | except (NotImplementedError, AttributeError): 95 | # not all clients support .keys() 96 | pass 97 | 98 | def tearDown(self): 99 | settings.CACHES['default']['KEY_FUNCTION'] = self.old_kf 100 | settings.CACHES['default']['REVERSE_KEY_FUNCTION'] = self.old_rkf 101 | 102 | 103 | class DjangoRedisCacheTests(TestCase): 104 | def setUp(self): 105 | self.cache = cache 106 | 107 | try: 108 | self.cache.clear() 109 | except Exception: 110 | pass 111 | 112 | def test_setnx(self): 113 | # we should ensure there is no test_key_nx in redis 114 | self.cache.delete("test_key_nx") 115 | res = self.cache.get("test_key_nx", None) 116 | self.assertEqual(res, None) 117 | 118 | res = self.cache.set("test_key_nx", 1, nx=True) 119 | self.assertTrue(res) 120 | # test that second set will have 121 | res = self.cache.set("test_key_nx", 2, nx=True) 122 | self.assertFalse(res) 123 | res = self.cache.get("test_key_nx") 124 | self.assertEqual(res, 1) 125 | 126 | self.cache.delete("test_key_nx") 127 | res = self.cache.get("test_key_nx", None) 128 | self.assertEqual(res, None) 129 | 130 | def test_setnx_timeout(self): 131 | # test that timeout still works for nx=True 132 | res = self.cache.set("test_key_nx", 1, timeout=2, nx=True) 133 | self.assertTrue(res) 134 | time.sleep(3) 135 | res = self.cache.get("test_key_nx", None) 136 | self.assertEqual(res, None) 137 | 138 | # test that timeout will not affect key, if it was there 139 | self.cache.set("test_key_nx", 1) 140 | res = self.cache.set("test_key_nx", 2, timeout=2, nx=True) 141 | self.assertFalse(res) 142 | time.sleep(3) 143 | res = self.cache.get("test_key_nx", None) 144 | self.assertEqual(res, 1) 145 | 146 | self.cache.delete("test_key_nx") 147 | res = self.cache.get("test_key_nx", None) 148 | self.assertEqual(res, None) 149 | 150 | def test_save_and_integer(self): 151 | self.cache.set("test_key", 2) 152 | res = self.cache.get("test_key", "Foo") 153 | 154 | self.assertIsInstance(res, int) 155 | self.assertEqual(res, 2) 156 | 157 | def test_save_string(self): 158 | self.cache.set("test_key", "hello" * 1000) 159 | res = self.cache.get("test_key") 160 | 161 | type(res) 162 | self.assertIsInstance(res, text_type) 163 | self.assertEqual(res, "hello" * 1000) 164 | 165 | self.cache.set("test_key", "2") 166 | res = self.cache.get("test_key") 167 | 168 | self.assertIsInstance(res, text_type) 169 | self.assertEqual(res, "2") 170 | 171 | def test_save_unicode(self): 172 | self.cache.set("test_key", "heló") 173 | res = self.cache.get("test_key") 174 | 175 | self.assertIsInstance(res, text_type) 176 | self.assertEqual(res, "heló") 177 | 178 | def test_save_dict(self): 179 | if isinstance(self.cache.client._serializer, 180 | json_serializer.JSONSerializer): 181 | self.skipTest("Datetimes are not JSON serializable") 182 | 183 | if isinstance(self.cache.client._serializer, 184 | msgpack_serializer.MSGPackSerializer): 185 | # MSGPackSerializer serializers use the isoformat for datetimes 186 | # https://github.com/msgpack/msgpack-python/issues/12 187 | now_dt = datetime.datetime.now().isoformat() 188 | else: 189 | now_dt = datetime.datetime.now() 190 | 191 | test_dict = {"id": 1, "date": now_dt, "name": "Foo"} 192 | 193 | self.cache.set("test_key", test_dict) 194 | res = self.cache.get("test_key") 195 | 196 | self.assertIsInstance(res, dict) 197 | self.assertEqual(res["id"], 1) 198 | self.assertEqual(res["name"], "Foo") 199 | self.assertEqual(res["date"], now_dt) 200 | 201 | def test_save_float(self): 202 | float_val = 1.345620002 203 | 204 | self.cache.set("test_key", float_val) 205 | res = self.cache.get("test_key") 206 | 207 | self.assertIsInstance(res, float) 208 | self.assertEqual(res, float_val) 209 | 210 | def test_timeout(self): 211 | self.cache.set("test_key", 222, timeout=3) 212 | time.sleep(4) 213 | 214 | res = self.cache.get("test_key", None) 215 | self.assertEqual(res, None) 216 | 217 | def test_timeout_0(self): 218 | self.cache.set("test_key", 222, timeout=0) 219 | res = self.cache.get("test_key", None) 220 | self.assertEqual(res, None) 221 | 222 | def test_timeout_parameter_as_positional_argument(self): 223 | self.cache.set("test_key", 222, -1) 224 | res = self.cache.get("test_key", None) 225 | self.assertIsNone(res) 226 | 227 | self.cache.set("test_key", 222, 1) 228 | res1 = self.cache.get("test_key", None) 229 | time.sleep(2) 230 | res2 = self.cache.get("test_key", None) 231 | self.assertEqual(res1, 222) 232 | self.assertEqual(res2, None) 233 | 234 | # nx=True should not overwrite expire of key already in db 235 | self.cache.set("test_key", 222, 0) 236 | self.cache.set("test_key", 222, -1, nx=True) 237 | res = self.cache.get("test_key", None) 238 | self.assertEqual(res, 222) 239 | 240 | def test_timeout_negative(self): 241 | self.cache.set("test_key", 222, timeout=-1) 242 | res = self.cache.get("test_key", None) 243 | self.assertIsNone(res) 244 | 245 | self.cache.set("test_key", 222, timeout=0) 246 | self.cache.set("test_key", 222, timeout=-1) 247 | res = self.cache.get("test_key", None) 248 | self.assertIsNone(res) 249 | 250 | # nx=True should not overwrite expire of key already in db 251 | self.cache.set("test_key", 222, timeout=0) 252 | self.cache.set("test_key", 222, timeout=-1, nx=True) 253 | res = self.cache.get("test_key", None) 254 | self.assertEqual(res, 222) 255 | 256 | def test_set_add(self): 257 | self.cache.set("add_key", "Initial value") 258 | self.cache.add("add_key", "New value") 259 | res = cache.get("add_key") 260 | 261 | self.assertEqual(res, "Initial value") 262 | 263 | def test_get_many(self): 264 | self.cache.set("a", 1) 265 | self.cache.set("b", 2) 266 | self.cache.set("c", 3) 267 | 268 | res = self.cache.get_many(["a", "b", "c"]) 269 | self.assertEqual(res, {"a": 1, "b": 2, "c": 3}) 270 | 271 | def test_get_many_unicode(self): 272 | self.cache.set("a", "1") 273 | self.cache.set("b", "2") 274 | self.cache.set("c", "3") 275 | 276 | res = self.cache.get_many(["a", "b", "c"]) 277 | self.assertEqual(res, {"a": "1", "b": "2", "c": "3"}) 278 | 279 | def test_set_many(self): 280 | self.cache.set_many({"a": 1, "b": 2, "c": 3}) 281 | res = self.cache.get_many(["a", "b", "c"]) 282 | self.assertEqual(res, {"a": 1, "b": 2, "c": 3}) 283 | 284 | def test_delete(self): 285 | self.cache.set_many({"a": 1, "b": 2, "c": 3}) 286 | res = self.cache.delete("a") 287 | self.assertTrue(bool(res)) 288 | 289 | res = self.cache.get_many(["a", "b", "c"]) 290 | self.assertEqual(res, {"b": 2, "c": 3}) 291 | 292 | res = self.cache.delete("a") 293 | self.assertFalse(bool(res)) 294 | 295 | def test_delete_many(self): 296 | self.cache.set_many({"a": 1, "b": 2, "c": 3}) 297 | res = self.cache.delete_many(["a", "b"]) 298 | self.assertTrue(bool(res)) 299 | 300 | res = self.cache.get_many(["a", "b", "c"]) 301 | self.assertEqual(res, {"c": 3}) 302 | 303 | res = self.cache.delete_many(["a", "b"]) 304 | self.assertFalse(bool(res)) 305 | 306 | def test_delete_many_generator(self): 307 | self.cache.set_many({"a": 1, "b": 2, "c": 3}) 308 | res = self.cache.delete_many(key for key in ["a", "b"]) 309 | self.assertTrue(bool(res)) 310 | 311 | res = self.cache.get_many(["a", "b", "c"]) 312 | self.assertEqual(res, {"c": 3}) 313 | 314 | res = self.cache.delete_many(["a", "b"]) 315 | self.assertFalse(bool(res)) 316 | 317 | def test_delete_many_empty_generator(self): 318 | res = self.cache.delete_many(key for key in []) 319 | self.assertFalse(bool(res)) 320 | 321 | def test_incr(self): 322 | if FAKE_REDIS: 323 | raise unittest.SkipTest("FakeRedis doesn't support eval") 324 | try: 325 | self.cache.set("num", 1) 326 | 327 | self.cache.incr("num") 328 | res = self.cache.get("num") 329 | self.assertEqual(res, 2) 330 | 331 | self.cache.incr("num", 10) 332 | res = self.cache.get("num") 333 | self.assertEqual(res, 12) 334 | 335 | # max 64 bit signed int 336 | self.cache.set("num", 9223372036854775807) 337 | 338 | self.cache.incr("num") 339 | res = self.cache.get("num") 340 | self.assertEqual(res, 9223372036854775808) 341 | 342 | self.cache.incr("num", 2) 343 | res = self.cache.get("num") 344 | self.assertEqual(res, 9223372036854775810) 345 | 346 | self.cache.set("num", long(3)) 347 | 348 | self.cache.incr("num", 2) 349 | res = self.cache.get("num") 350 | self.assertEqual(res, 5) 351 | 352 | except NotImplementedError as e: 353 | print(e) 354 | 355 | def test_incr_error(self): 356 | if FAKE_REDIS: 357 | raise unittest.SkipTest("FakeRedis doesn't support eval") 358 | try: 359 | with self.assertRaises(ValueError): 360 | # key not exists 361 | self.cache.incr('numnum') 362 | except NotImplementedError: 363 | raise unittest.SkipTest("`incr` not supported in herd client") 364 | 365 | def test_get_set_bool(self): 366 | self.cache.set("bool", True) 367 | res = self.cache.get("bool") 368 | 369 | self.assertIsInstance(res, bool) 370 | self.assertEqual(res, True) 371 | 372 | self.cache.set("bool", False) 373 | res = self.cache.get("bool") 374 | 375 | self.assertIsInstance(res, bool) 376 | self.assertEqual(res, False) 377 | 378 | def test_decr(self): 379 | if FAKE_REDIS: 380 | raise unittest.SkipTest("FakeRedis doesn't support eval") 381 | try: 382 | self.cache.set("num", 20) 383 | 384 | self.cache.decr("num") 385 | res = self.cache.get("num") 386 | self.assertEqual(res, 19) 387 | 388 | self.cache.decr("num", 20) 389 | res = self.cache.get("num") 390 | self.assertEqual(res, -1) 391 | 392 | self.cache.decr("num", long(2)) 393 | res = self.cache.get("num") 394 | self.assertEqual(res, -3) 395 | 396 | self.cache.set("num", long(20)) 397 | 398 | self.cache.decr("num") 399 | res = self.cache.get("num") 400 | self.assertEqual(res, 19) 401 | 402 | # max 64 bit signed int + 1 403 | self.cache.set("num", 9223372036854775808) 404 | 405 | self.cache.decr("num") 406 | res = self.cache.get("num") 407 | self.assertEqual(res, 9223372036854775807) 408 | 409 | self.cache.decr("num", 2) 410 | res = self.cache.get("num") 411 | self.assertEqual(res, 9223372036854775805) 412 | except NotImplementedError as e: 413 | print(e) 414 | 415 | def test_version(self): 416 | self.cache.set("keytest", 2, version=2) 417 | res = self.cache.get("keytest") 418 | self.assertEqual(res, None) 419 | 420 | res = self.cache.get("keytest", version=2) 421 | self.assertEqual(res, 2) 422 | 423 | def test_incr_version(self): 424 | try: 425 | self.cache.set("keytest", 2) 426 | self.cache.incr_version("keytest") 427 | 428 | res = self.cache.get("keytest") 429 | self.assertEqual(res, None) 430 | 431 | res = self.cache.get("keytest", version=2) 432 | self.assertEqual(res, 2) 433 | except NotImplementedError as e: 434 | print(e) 435 | 436 | def test_delete_pattern(self): 437 | for key in ["foo-aa", "foo-ab", "foo-bb", "foo-bc"]: 438 | self.cache.set(key, "foo") 439 | 440 | res = self.cache.delete_pattern("*foo-a*") 441 | self.assertTrue(bool(res)) 442 | 443 | keys = self.cache.keys("foo*") 444 | self.assertEqual(set(keys), set(["foo-bb", "foo-bc"])) 445 | 446 | res = self.cache.delete_pattern("*foo-a*") 447 | self.assertFalse(bool(res)) 448 | 449 | def test_close(self): 450 | cache = caches["default"] 451 | cache.set("f", "1") 452 | cache.close() 453 | 454 | def test_ttl(self): 455 | if FAKE_REDIS: 456 | raise unittest.SkipTest("FakeRedis ttl is broken, see https://github.com/jamesls/fakeredis/issues/119") 457 | 458 | cache = caches["default"] 459 | _params = cache._params 460 | _is_herd = (_params["OPTIONS"]["CLIENT_CLASS"] == 461 | "django_redis.client.HerdClient") 462 | _is_shard = (_params["OPTIONS"]["CLIENT_CLASS"] == 463 | "django_redis.client.ShardClient") 464 | 465 | # Not supported for shard client. 466 | if _is_shard: 467 | return 468 | 469 | # Test ttl 470 | cache.set("foo", "bar", 10) 471 | ttl = cache.ttl("foo") 472 | 473 | if _is_herd: 474 | self.assertAlmostEqual(ttl, 12) 475 | else: 476 | self.assertAlmostEqual(ttl, 10) 477 | 478 | # Test ttl None 479 | cache.set("foo", "foo", timeout=None) 480 | ttl = cache.ttl("foo") 481 | self.assertEqual(ttl, None) 482 | 483 | # Test ttl with expired key 484 | cache.set("foo", "foo", timeout=-1) 485 | ttl = cache.ttl("foo") 486 | self.assertEqual(ttl, 0) 487 | 488 | # Test ttl with not existent key 489 | ttl = cache.ttl("not-existent-key") 490 | self.assertEqual(ttl, 0) 491 | 492 | def test_persist(self): 493 | if FAKE_REDIS: 494 | raise unittest.SkipTest("FakeRedis ttl is broken, see https://github.com/jamesls/fakeredis/issues/119") 495 | 496 | self.cache.set("foo", "bar", timeout=20) 497 | self.cache.persist("foo") 498 | 499 | ttl = self.cache.ttl("foo") 500 | self.assertIsNone(ttl) 501 | 502 | def test_expire(self): 503 | self.cache.set("foo", "bar", timeout=None) 504 | self.cache.expire("foo", 20) 505 | ttl = self.cache.ttl("foo") 506 | self.assertAlmostEqual(ttl, 20) 507 | 508 | def test_lock(self): 509 | if FAKE_REDIS: 510 | raise unittest.SkipTest("FakeRedis doesn't support locks") 511 | lock = self.cache.lock("foobar") 512 | lock.acquire(blocking=True) 513 | 514 | self.assertTrue(self.cache.has_key("foobar")) 515 | lock.release() 516 | self.assertFalse(self.cache.has_key("foobar")) 517 | 518 | def test_iter_keys(self): 519 | cache = caches["default"] 520 | _params = cache._params 521 | _is_shard = (_params["OPTIONS"]["CLIENT_CLASS"] == 522 | "django_redis.client.ShardClient") 523 | 524 | if _is_shard: 525 | return 526 | 527 | cache.set("foo1", 1) 528 | cache.set("foo2", 1) 529 | cache.set("foo3", 1) 530 | 531 | # Test simple result 532 | result = set(cache.iter_keys("foo*")) 533 | self.assertEqual(result, set(["foo1", "foo2", "foo3"])) 534 | 535 | # Test limited result 536 | result = list(cache.iter_keys("foo*", itersize=2)) 537 | self.assertEqual(len(result), 3) 538 | 539 | # Test generator object 540 | result = cache.iter_keys("foo*") 541 | self.assertNotEqual(next(result), None) 542 | 543 | def test_master_slave_switching(self): 544 | if isinstance(self.cache.client, 545 | SentinelClient): 546 | self.skipTest("SentinelClient does not use default master-slave setup") 547 | try: 548 | cache = caches["sample"] 549 | client = cache.client 550 | client._server = ["foo", "bar", ] 551 | client._clients = ["Foo", "Bar"] 552 | 553 | self.assertEqual(client.get_client(write=True), "Foo") 554 | self.assertEqual(client.get_client(write=False), "Bar") 555 | except NotImplementedError: 556 | pass 557 | 558 | def test_sentinel_switching(self): 559 | if not isinstance(self.cache.client, 560 | SentinelClient): 561 | self.skipTest("Not Sentinel clients use default master-slave setup") 562 | try: 563 | cache = caches["sample"] 564 | client = cache.client 565 | master = client.get_client(write=True) 566 | slave = client.get_client(write=False) 567 | 568 | master.set("Foo", "Bar") 569 | self.assertEqual(slave.get("Foo"), "Bar") 570 | self.assertEqual(master.info()['role'], "master") 571 | self.assertEqual(slave.info()['role'], "slave") 572 | except NotImplementedError: 573 | pass 574 | 575 | def test_clear_with_cache_prefix(self): 576 | """ 577 | Tests that cache.clear() does only delete keys which starts with the 578 | correct prefix configured with KEY_PREFIX. 579 | """ 580 | cache_normal = caches['sample'] 581 | cache_with_prefix = caches['with_prefix'] 582 | cache_normal.set('some_key', 'some_value') 583 | cache_with_prefix.set('other_key', 'other_value') 584 | 585 | cache_with_prefix.clear() 586 | 587 | self.assertIsNone(cache_with_prefix.get('other_key')) 588 | self.assertEqual(cache_normal.get('some_key'), 'some_value') 589 | 590 | def test_zlib_compressor(self): 591 | pass 592 | 593 | 594 | import django_redis_sentinel.cache 595 | 596 | 597 | class DjangoOmitExceptionsTests(TestCase): 598 | def setUp(self): 599 | self._orig_setting = django_redis_sentinel.cache.DJANGO_REDIS_IGNORE_EXCEPTIONS 600 | django_redis_sentinel.cache.DJANGO_REDIS_IGNORE_EXCEPTIONS = True 601 | self.cache = caches["doesnotexist"] 602 | self.cache._orig_ignore_exceptions = self.cache._ignore_exceptions 603 | self.cache._ignore_exceptions = True 604 | 605 | def tearDown(self): 606 | django_redis_sentinel.cache.DJANGO_REDIS_IGNORE_EXCEPTIONS = self._orig_setting 607 | self.cache._ignore_exceptions = self.cache._orig_ignore_exceptions 608 | 609 | def test_get_many_returns_default_arg(self): 610 | self.assertEqual(self.cache.get_many(["key1", "key2", "key3"]), {}) 611 | 612 | def test_get(self): 613 | self.assertIsNone(self.cache.get("key")) 614 | self.assertEqual(self.cache.get("key", "default"), "default") 615 | self.assertEqual(self.cache.get("key", default="default"), "default") 616 | 617 | 618 | from django.contrib.sessions.backends.cache import SessionStore as CacheSession 619 | 620 | from django.core.cache import caches 621 | from django.test import override_settings 622 | from django.test.utils import patch_logger 623 | from django.utils import six, timezone 624 | 625 | 626 | class SessionTestsMixin(object): 627 | # This does not inherit from TestCase to avoid any tests being run with this 628 | # class, which wouldn't work, and to allow different TestCase subclasses to 629 | # be used. 630 | 631 | backend = None # subclasses must specify 632 | 633 | def setUp(self): 634 | self.session = self.backend() 635 | 636 | def tearDown(self): 637 | # NB: be careful to delete any sessions created; stale sessions fill up 638 | # the /tmp (with some backends) and eventually overwhelm it after lots 639 | # of runs (think buildbots) 640 | self.session.delete() 641 | 642 | def test_new_session(self): 643 | self.assertFalse(self.session.modified) 644 | self.assertFalse(self.session.accessed) 645 | 646 | def test_get_empty(self): 647 | self.assertEqual(self.session.get('cat'), None) 648 | 649 | def test_store(self): 650 | self.session['cat'] = "dog" 651 | self.assertTrue(self.session.modified) 652 | self.assertEqual(self.session.pop('cat'), 'dog') 653 | 654 | def test_pop(self): 655 | self.session['some key'] = 'exists' 656 | # Need to reset these to pretend we haven't accessed it: 657 | self.accessed = False 658 | self.modified = False 659 | 660 | self.assertEqual(self.session.pop('some key'), 'exists') 661 | self.assertTrue(self.session.accessed) 662 | self.assertTrue(self.session.modified) 663 | self.assertEqual(self.session.get('some key'), None) 664 | 665 | def test_pop_default(self): 666 | self.assertEqual(self.session.pop('some key', 'does not exist'), 667 | 'does not exist') 668 | self.assertTrue(self.session.accessed) 669 | self.assertFalse(self.session.modified) 670 | 671 | def test_setdefault(self): 672 | self.assertEqual(self.session.setdefault('foo', 'bar'), 'bar') 673 | self.assertEqual(self.session.setdefault('foo', 'baz'), 'bar') 674 | self.assertTrue(self.session.accessed) 675 | self.assertTrue(self.session.modified) 676 | 677 | def test_update(self): 678 | self.session.update({'update key': 1}) 679 | self.assertTrue(self.session.accessed) 680 | self.assertTrue(self.session.modified) 681 | self.assertEqual(self.session.get('update key', None), 1) 682 | 683 | def test_has_key(self): 684 | self.session['some key'] = 1 685 | self.session.modified = False 686 | self.session.accessed = False 687 | self.assertIn('some key', self.session) 688 | self.assertTrue(self.session.accessed) 689 | self.assertFalse(self.session.modified) 690 | 691 | def test_values(self): 692 | self.assertEqual(list(self.session.values()), []) 693 | self.assertTrue(self.session.accessed) 694 | self.session['some key'] = 1 695 | self.assertEqual(list(self.session.values()), [1]) 696 | 697 | def test_iterkeys(self): 698 | self.session['x'] = 1 699 | self.session.modified = False 700 | self.session.accessed = False 701 | i = six.iterkeys(self.session) 702 | self.assertTrue(hasattr(i, '__iter__')) 703 | self.assertTrue(self.session.accessed) 704 | self.assertFalse(self.session.modified) 705 | self.assertEqual(list(i), ['x']) 706 | 707 | def test_itervalues(self): 708 | self.session['x'] = 1 709 | self.session.modified = False 710 | self.session.accessed = False 711 | i = six.itervalues(self.session) 712 | self.assertTrue(hasattr(i, '__iter__')) 713 | self.assertTrue(self.session.accessed) 714 | self.assertFalse(self.session.modified) 715 | self.assertEqual(list(i), [1]) 716 | 717 | def test_iteritems(self): 718 | self.session['x'] = 1 719 | self.session.modified = False 720 | self.session.accessed = False 721 | i = six.iteritems(self.session) 722 | self.assertTrue(hasattr(i, '__iter__')) 723 | self.assertTrue(self.session.accessed) 724 | self.assertFalse(self.session.modified) 725 | self.assertEqual(list(i), [('x', 1)]) 726 | 727 | def test_clear(self): 728 | self.session['x'] = 1 729 | self.session.modified = False 730 | self.session.accessed = False 731 | self.assertEqual(list(self.session.items()), [('x', 1)]) 732 | self.session.clear() 733 | self.assertEqual(list(self.session.items()), []) 734 | self.assertTrue(self.session.accessed) 735 | self.assertTrue(self.session.modified) 736 | 737 | def test_save(self): 738 | if (hasattr(self.session, '_cache') and 'DummyCache' in 739 | settings.CACHES[settings.SESSION_CACHE_ALIAS]['BACKEND']): 740 | raise unittest.SkipTest("Session saving tests require a real cache backend") 741 | self.session.save() 742 | self.assertTrue(self.session.exists(self.session.session_key)) 743 | 744 | def test_delete(self): 745 | self.session.save() 746 | self.session.delete(self.session.session_key) 747 | self.assertFalse(self.session.exists(self.session.session_key)) 748 | 749 | def test_flush(self): 750 | self.session['foo'] = 'bar' 751 | self.session.save() 752 | prev_key = self.session.session_key 753 | self.session.flush() 754 | self.assertFalse(self.session.exists(prev_key)) 755 | self.assertNotEqual(self.session.session_key, prev_key) 756 | self.assertIsNone(self.session.session_key) 757 | self.assertTrue(self.session.modified) 758 | self.assertTrue(self.session.accessed) 759 | 760 | def test_cycle(self): 761 | self.session['a'], self.session['b'] = 'c', 'd' 762 | self.session.save() 763 | prev_key = self.session.session_key 764 | prev_data = list(self.session.items()) 765 | self.session.cycle_key() 766 | self.assertNotEqual(self.session.session_key, prev_key) 767 | self.assertEqual(list(self.session.items()), prev_data) 768 | 769 | def test_save_doesnt_clear_data(self): 770 | self.session['a'] = 'b' 771 | self.session.save() 772 | self.assertEqual(self.session['a'], 'b') 773 | 774 | def test_invalid_key(self): 775 | # Submitting an invalid session key (either by guessing, or if the db has 776 | # removed the key) results in a new key being generated. 777 | try: 778 | session = self.backend('1') 779 | try: 780 | session.save() 781 | except AttributeError: 782 | self.fail( 783 | "The session object did not save properly. " 784 | "Middleware may be saving cache items without namespaces." 785 | ) 786 | self.assertNotEqual(session.session_key, '1') 787 | self.assertEqual(session.get('cat'), None) 788 | session.delete() 789 | finally: 790 | # Some backends leave a stale cache entry for the invalid 791 | # session key; make sure that entry is manually deleted 792 | session.delete('1') 793 | 794 | if VERSION[:2] != (1, 8): 795 | def test_session_key_empty_string_invalid(self): 796 | """Falsey values (Such as an empty string) are rejected.""" 797 | self.session._session_key = '' 798 | self.assertIsNone(self.session.session_key) 799 | 800 | def test_session_key_too_short_invalid(self): 801 | """Strings shorter than 8 characters are rejected.""" 802 | self.session._session_key = '1234567' 803 | self.assertIsNone(self.session.session_key) 804 | 805 | def test_session_key_valid_string_saved(self): 806 | """Strings of length 8 and up are accepted and stored.""" 807 | self.session._session_key = '12345678' 808 | self.assertEqual(self.session.session_key, '12345678') 809 | 810 | def test_session_key_is_read_only(self): 811 | def set_session_key(session): 812 | session.session_key = session._get_new_session_key() 813 | 814 | self.assertRaises(AttributeError, set_session_key, self.session) 815 | 816 | # Custom session expiry 817 | def test_default_expiry(self): 818 | # A normal session has a max age equal to settings 819 | self.assertEqual(self.session.get_expiry_age(), settings.SESSION_COOKIE_AGE) 820 | 821 | # So does a custom session with an idle expiration time of 0 (but it'll 822 | # expire at browser close) 823 | self.session.set_expiry(0) 824 | self.assertEqual(self.session.get_expiry_age(), settings.SESSION_COOKIE_AGE) 825 | 826 | def test_custom_expiry_seconds(self): 827 | modification = timezone.now() 828 | 829 | self.session.set_expiry(10) 830 | 831 | date = self.session.get_expiry_date(modification=modification) 832 | self.assertEqual(date, modification + timedelta(seconds=10)) 833 | 834 | age = self.session.get_expiry_age(modification=modification) 835 | self.assertEqual(age, 10) 836 | 837 | def test_custom_expiry_timedelta(self): 838 | modification = timezone.now() 839 | 840 | # Mock timezone.now, because set_expiry calls it on this code path. 841 | original_now = timezone.now 842 | try: 843 | timezone.now = lambda: modification 844 | self.session.set_expiry(timedelta(seconds=10)) 845 | finally: 846 | timezone.now = original_now 847 | 848 | date = self.session.get_expiry_date(modification=modification) 849 | self.assertEqual(date, modification + timedelta(seconds=10)) 850 | 851 | age = self.session.get_expiry_age(modification=modification) 852 | self.assertEqual(age, 10) 853 | 854 | def test_custom_expiry_datetime(self): 855 | modification = timezone.now() 856 | 857 | self.session.set_expiry(modification + timedelta(seconds=10)) 858 | 859 | date = self.session.get_expiry_date(modification=modification) 860 | self.assertEqual(date, modification + timedelta(seconds=10)) 861 | 862 | age = self.session.get_expiry_age(modification=modification) 863 | self.assertEqual(age, 10) 864 | 865 | def test_custom_expiry_reset(self): 866 | self.session.set_expiry(None) 867 | self.session.set_expiry(10) 868 | self.session.set_expiry(None) 869 | self.assertEqual(self.session.get_expiry_age(), settings.SESSION_COOKIE_AGE) 870 | 871 | def test_get_expire_at_browser_close(self): 872 | # Tests get_expire_at_browser_close with different settings and different 873 | # set_expiry calls 874 | with override_settings(SESSION_EXPIRE_AT_BROWSER_CLOSE=False): 875 | self.session.set_expiry(10) 876 | self.assertFalse(self.session.get_expire_at_browser_close()) 877 | 878 | self.session.set_expiry(0) 879 | self.assertTrue(self.session.get_expire_at_browser_close()) 880 | 881 | self.session.set_expiry(None) 882 | self.assertFalse(self.session.get_expire_at_browser_close()) 883 | 884 | with override_settings(SESSION_EXPIRE_AT_BROWSER_CLOSE=True): 885 | self.session.set_expiry(10) 886 | self.assertFalse(self.session.get_expire_at_browser_close()) 887 | 888 | self.session.set_expiry(0) 889 | self.assertTrue(self.session.get_expire_at_browser_close()) 890 | 891 | self.session.set_expiry(None) 892 | self.assertTrue(self.session.get_expire_at_browser_close()) 893 | 894 | def test_decode(self): 895 | # Ensure we can decode what we encode 896 | data = {'a test key': 'a test value'} 897 | encoded = self.session.encode(data) 898 | self.assertEqual(self.session.decode(encoded), data) 899 | 900 | def test_decode_failure_logged_to_security(self): 901 | bad_encode = base64.b64encode(b'flaskdj:alkdjf') 902 | with patch_logger('django.security.SuspiciousSession', 'warning') as calls: 903 | self.assertEqual({}, self.session.decode(bad_encode)) 904 | # check that the failed decode is logged 905 | self.assertEqual(len(calls), 1) 906 | self.assertIn('corrupted', calls[0]) 907 | 908 | def test_actual_expiry(self): 909 | # this doesn't work with JSONSerializer (serializing timedelta) 910 | with override_settings(SESSION_SERIALIZER='django.contrib.sessions.serializers.PickleSerializer'): 911 | self.session = self.backend() # reinitialize after overriding settings 912 | 913 | # Regression test for #19200 914 | old_session_key = None 915 | new_session_key = None 916 | try: 917 | self.session['foo'] = 'bar' 918 | self.session.set_expiry(-timedelta(seconds=10)) 919 | self.session.save() 920 | old_session_key = self.session.session_key 921 | # With an expiry date in the past, the session expires instantly. 922 | new_session = self.backend(self.session.session_key) 923 | new_session_key = new_session.session_key 924 | self.assertNotIn('foo', new_session) 925 | finally: 926 | self.session.delete(old_session_key) 927 | self.session.delete(new_session_key) 928 | 929 | 930 | class SessionTests(SessionTestsMixin, TestCase): 931 | backend = CacheSession 932 | 933 | def test_actual_expiry(self): 934 | pass 935 | --------------------------------------------------------------------------------