├── tests ├── __init__.py ├── load │ ├── __init__.py │ ├── tests_load.py │ └── load_test_compression.py ├── misc │ ├── __init__.py │ ├── tests_simple.py │ ├── tests_readme.py │ └── tests_asserts.py ├── unit │ ├── __init__.py │ ├── tests_insertion_order_methods.py │ ├── tests_standard_types.py │ ├── tests_extend_json_types.py │ ├── tests_encrypt.py │ └── tests_extend_types.py └── fuzzing │ └── tests_hypothesis_basic_types.py ├── src └── redis_dict │ ├── py.typed │ ├── __init__.py │ ├── type_management.py │ ├── python_dict.py │ └── core.py ├── requirements.txt ├── scripts ├── start_redis_docker.sh ├── start_valkey_docker.sh ├── serve_docs.sh ├── view_docs.sh ├── formatter.sh ├── build_local_pkg.sh ├── build_dev.sh ├── tests.sh ├── build_docs.sh ├── build_dev_checks.sh ├── verify.sh ├── lint.sh └── generate_sphinx_config.py ├── .coveragerc ├── .gitignore ├── dev-requirements.txt ├── LICENSE ├── .github └── workflows │ ├── valkey_support.yml │ ├── deploy_docs.yml │ ├── ci.yml │ └── build_package_pypi.yml ├── docs └── tutorials │ └── encrypted_redis.MD ├── pyproject.toml └── README.md /tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /src/redis_dict/py.typed: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/load/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/misc/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/unit/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | redis==4.5.4 2 | -------------------------------------------------------------------------------- /scripts/start_redis_docker.sh: -------------------------------------------------------------------------------- 1 | docker run --name my-redis -p 6379:6379 -d redis -------------------------------------------------------------------------------- /scripts/start_valkey_docker.sh: -------------------------------------------------------------------------------- 1 | docker run --name my-valkey -p 6379:6379 -d valkey/valkey 2 | -------------------------------------------------------------------------------- /scripts/serve_docs.sh: -------------------------------------------------------------------------------- 1 | open docs/build/html/index.html # On macOS 2 | #xdg-open docs/build/html/index.html # On Linux 3 | -------------------------------------------------------------------------------- /scripts/view_docs.sh: -------------------------------------------------------------------------------- 1 | open docs/build/html/index.html # On macOS 2 | #xdg-open docs/build/html/index.html # On Linux 3 | -------------------------------------------------------------------------------- /.coveragerc: -------------------------------------------------------------------------------- 1 | [run] 2 | omit = 3 | */site-packages/* 4 | *test*.py 5 | tests/* 6 | *tests.py 7 | */tests/* 8 | -------------------------------------------------------------------------------- /scripts/formatter.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -e 3 | 4 | # Adds whitespaces to ":", "," within f strings for some reason 5 | .venv_dev/bin/autopep8 --ignore E203,E225,E231 src/ 6 | -------------------------------------------------------------------------------- /scripts/build_local_pkg.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | mkdir test_install_0.1.0 3 | cd test_install_0.1.0 4 | python3 -m venv venv 5 | ./venv/bin/pip install -e .. 6 | ./venv/bin/python ../tests/misc/simple_test.py 7 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .tox 2 | __pycache__ 3 | *.pyc 4 | *.egg-info 5 | 6 | build 7 | dist 8 | venv 9 | .venv 10 | .venv_* 11 | dev_venv 12 | 13 | .hypothesis/ 14 | 15 | .coverage* 16 | htmlcov 17 | 18 | 19 | .idea/ 20 | -------------------------------------------------------------------------------- /scripts/build_dev.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -e 3 | 4 | rm -rf .venv_dev 5 | python3 -m venv .venv_dev 6 | source .venv_dev/bin/activate 7 | 8 | pip install --upgrade pip 9 | pip install -e ".[dev]" 10 | 11 | deactivate 12 | -------------------------------------------------------------------------------- /scripts/tests.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -e 3 | 4 | if [ ! -d ".venv_dev" ]; then 5 | echo "Virtual environment not found. Running build script..." 6 | ./scripts/build_dev.sh 7 | fi 8 | 9 | .venv_dev/bin/python -m unittest discover -s tests --failfast -v 10 | -------------------------------------------------------------------------------- /scripts/build_docs.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -e 3 | 4 | rm -rf docs/Makefile docs/build/* docs/source/* 5 | 6 | python3 -m venv .venv_docs 7 | 8 | source .venv_docs/bin/activate 9 | pip install --upgrade pip 10 | pip install -e ".[docs]" 11 | 12 | pip freeze 13 | 14 | python3 scripts/generate_sphinx_config.py 15 | 16 | sphinx-apidoc -o docs/source src/redis_dict 17 | 18 | cd docs 19 | make html 20 | -------------------------------------------------------------------------------- /tests/misc/tests_simple.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime 2 | 3 | from redis_dict import RedisDict, PythonRedisDict 4 | 5 | for obj in [RedisDict, PythonRedisDict]: 6 | dic = obj(namespace='assert_test') 7 | assert 'random' not in dic 8 | dic['random'] = 4 9 | assert dic['random'] == 4 10 | assert 'random' in dic 11 | del dic['random'] 12 | assert 'random' not in dic 13 | 14 | now = datetime.now() 15 | dic['datetime'] = now 16 | assert dic['datetime'] == now 17 | dic.clear() 18 | 19 | print("passed assert test") 20 | -------------------------------------------------------------------------------- /scripts/build_dev_checks.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -e 3 | 4 | rm -rf .venv_dev 5 | python3 -m venv .venv_dev 6 | source .venv_dev/bin/activate 7 | 8 | pip install --upgrade pip 9 | pip install -e ".[dev]" 10 | 11 | # Type Check 12 | python -m mypy 13 | 14 | # Doctype Check 15 | darglint src/redis_dict/ 16 | 17 | # Security Check 18 | bandit -r src/redis_dict 19 | 20 | # Multiple linters 21 | python -m pylama -i E501,E231 src 22 | 23 | # Unit tests 24 | python -m unittest discover -s tests 25 | 26 | # Docstring Check 27 | # pydocstyle src/redis_dict/ 28 | 29 | deactivate 30 | -------------------------------------------------------------------------------- /scripts/verify.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -e 3 | 4 | if [ ! -d ".venv_dev" ]; then 5 | echo "Virtual environment not found. Running build script..." 6 | ./scripts/build_dev.sh 7 | fi 8 | 9 | source .venv_dev/bin/activate 10 | 11 | # Type Check 12 | python -m mypy 13 | 14 | # Doctype Check 15 | darglint src/redis_dict/ 16 | 17 | # Multiple linters 18 | python -m pylama -i E501,E231 src 19 | 20 | # Unit tests 21 | python -m unittest discover -s tests 22 | 23 | # Security Check 24 | bandit -r src/redis_dict 25 | 26 | # Docstring Check 27 | pydocstyle src/redis_dict/ 28 | 29 | deactivate 30 | -------------------------------------------------------------------------------- /scripts/lint.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -e 3 | 4 | #!/bin/bash 5 | set -e 6 | 7 | if [ ! -d ".venv_dev" ]; then 8 | echo "Virtual environment not found. Running build script..." 9 | ./scripts/build_dev.sh 10 | fi 11 | 12 | source .venv_dev/bin/activate 13 | 14 | # Type Check 15 | python -m mypy 16 | 17 | # Doctype Check 18 | darglint src/redis_dict/ 19 | 20 | # Multiple linters 21 | python -m pylama -i E501,E231 src 22 | 23 | # Security Check 24 | bandit -r src/redis_dict 25 | 26 | # Docstring Check 27 | pydocstyle src/redis_dict/ 28 | 29 | # Pylint 30 | pylint src/ 31 | 32 | deactivate 33 | -------------------------------------------------------------------------------- /src/redis_dict/__init__.py: -------------------------------------------------------------------------------- 1 | """__init__ module for redis dict.""" 2 | from importlib.metadata import version, PackageNotFoundError 3 | 4 | from .core import RedisDict 5 | from .python_dict import PythonRedisDict 6 | from .type_management import decoding_registry, encoding_registry, RedisDictJSONEncoder, RedisDictJSONDecoder 7 | 8 | __all__ = [ 9 | 'RedisDict', 10 | 'PythonRedisDict', 11 | 'decoding_registry', 12 | 'encoding_registry', 13 | 'RedisDictJSONEncoder', 14 | 'RedisDictJSONDecoder', 15 | ] 16 | try: 17 | __version__ = version("redis-dict") 18 | except PackageNotFoundError: 19 | __version__ = "0.0.0" 20 | -------------------------------------------------------------------------------- /dev-requirements.txt: -------------------------------------------------------------------------------- 1 | astroid==3.2.4 2 | attrs==22.2.0 3 | cffi==1.15.1 4 | coverage==5.5 5 | cryptography==44.0.1 6 | darglint==1.8.1 7 | dill==0.3.9 8 | exceptiongroup==1.1.1 9 | future==0.18.3 10 | hypothesis==6.70.1 11 | isort==5.13.2 12 | mccabe==0.7.0 13 | mypy==1.13.0 14 | mypy-extensions==1.0.0 15 | platformdirs==4.3.6 16 | pycodestyle==2.10.0 17 | pycparser==2.21 18 | pydocstyle==6.3.0 19 | pyflakes==3.0.1 20 | pylama==8.4.1 21 | pylint==3.2.7 22 | redis==5.2.0 23 | setuptools==78.1.1 24 | snowballstemmer==2.2.0 25 | sortedcontainers==2.4.0 26 | tomli==2.0.1 27 | tomlkit==0.13.2 28 | types-cffi==1.16.0.20240331 29 | types-pyOpenSSL==24.1.0.20240722 30 | types-redis==4.6.0.20241004 31 | types-setuptools==75.2.0.20241025 32 | typing_extensions==4.12.2 33 | -------------------------------------------------------------------------------- /tests/misc/tests_readme.py: -------------------------------------------------------------------------------- 1 | from redis_dict import RedisDict 2 | ### Insertion Order 3 | from redis_dict import PythonRedisDict 4 | 5 | dic = PythonRedisDict() 6 | dic["1"] = "one" 7 | dic["2"] = "two" 8 | dic["3"] = "three" 9 | 10 | assert list(dic.keys()) == ["1", "2", "3"] 11 | 12 | ### Extending RedisDict with Custom Types 13 | import json 14 | 15 | class Person: 16 | def __init__(self, name, age): 17 | self.name = name 18 | self.age = age 19 | 20 | def encode(self) -> str: 21 | return json.dumps(self.__dict__) 22 | 23 | @classmethod 24 | def decode(cls, encoded_str: str) -> 'Person': 25 | return cls(**json.loads(encoded_str)) 26 | 27 | redis_dict = RedisDict() 28 | 29 | # Extend redis dict with the new type 30 | redis_dict.extends_type(Person) 31 | 32 | # RedisDict can now seamlessly handle Person instances. 33 | person = Person(name="John", age=32) 34 | redis_dict["person1"] = person 35 | 36 | result = redis_dict["person1"] 37 | 38 | assert result.name == person.name 39 | assert result.age == person.age -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2017 Melvin Bijman 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /.github/workflows/valkey_support.yml: -------------------------------------------------------------------------------- 1 | name: Supports Valkey 2 | 3 | on: 4 | pull_request: 5 | branches: 6 | - "**" 7 | 8 | permissions: 9 | contents: read 10 | 11 | jobs: 12 | build: 13 | runs-on: ${{ matrix.os }} 14 | 15 | services: 16 | redis: 17 | image: valkey/valkey 18 | ports: 19 | - 6379:6379 20 | 21 | strategy: 22 | matrix: 23 | os: [ubuntu-latest] 24 | python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"] 25 | 26 | steps: 27 | - uses: actions/checkout@v4 28 | - name: Set up Python ${{ matrix.python-version }} 29 | uses: actions/setup-python@v5 30 | with: 31 | python-version: ${{ matrix.python-version }} 32 | 33 | - name: Install dependencies 34 | run: | 35 | pip install --upgrade pip setuptools 36 | pip install -r requirements.txt 37 | 38 | - name: Install test dependencies 39 | run: | 40 | pip install ".[dev]" 41 | 42 | - name: Run Unit Tests 43 | env: 44 | PYTHONPATH: src 45 | run: | 46 | coverage run -m unittest discover -s tests 47 | -------------------------------------------------------------------------------- /.github/workflows/deploy_docs.yml: -------------------------------------------------------------------------------- 1 | name: Build and Deploy Documentation 2 | 3 | on: 4 | push: 5 | branches: ["main"] 6 | workflow_dispatch: 7 | 8 | permissions: 9 | id-token: write 10 | pages: write 11 | contents: read 12 | 13 | concurrency: 14 | group: "pages" 15 | cancel-in-progress: false 16 | 17 | jobs: 18 | deploy: 19 | environment: 20 | name: github-pages 21 | url: ${{ steps.deployment.outputs.page_url }} 22 | runs-on: ubuntu-latest 23 | steps: 24 | - name: Checkout 25 | uses: actions/checkout@v4 26 | 27 | - name: Setup Python 28 | uses: actions/setup-python@v5 29 | with: 30 | python-version: '3.x' 31 | 32 | - name: Build documentation 33 | run: bash scripts/build_docs.sh 34 | 35 | - name: Setup Pages 36 | uses: actions/configure-pages@v5 37 | 38 | - name: Upload artifact 39 | uses: actions/upload-pages-artifact@v3 40 | with: 41 | path: 'docs/build/html' 42 | 43 | - name: Deploy to GitHub Pages 44 | id: deployment 45 | uses: actions/deploy-pages@v4 46 | 47 | - name: Output documentation URL 48 | run: | 49 | echo "📚 Documentation URL: ${{ steps.deployment.outputs.page_url }}" 50 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: CI 2 | 3 | on: 4 | push: 5 | branches: 6 | - "**" 7 | pull_request: 8 | branches: 9 | - "**" 10 | 11 | permissions: 12 | contents: read 13 | 14 | jobs: 15 | build: 16 | runs-on: ${{ matrix.os }} 17 | 18 | services: 19 | redis: 20 | image: redis 21 | ports: 22 | - 6379:6379 23 | 24 | strategy: 25 | matrix: 26 | os: [ubuntu-latest] 27 | python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"] 28 | 29 | steps: 30 | - uses: actions/checkout@v4 31 | - name: Set up Python ${{ matrix.python-version }} 32 | uses: actions/setup-python@v5 33 | with: 34 | python-version: ${{ matrix.python-version }} 35 | 36 | - name: Install dependencies 37 | run: | 38 | pip install --upgrade pip setuptools 39 | pip install -r requirements.txt 40 | 41 | - name: Install test dependencies 42 | run: | 43 | pip install ".[dev]" 44 | 45 | - name: Run Pylama 46 | run: | 47 | python -m pylama -i E501,E231 src 48 | 49 | - name: Run type check with mypy strict 50 | run: | 51 | mypy 52 | 53 | - name: Run Doctype Check 54 | run: | 55 | darglint src/redis_dict/ 56 | pydocstyle src/redis_dict/ 57 | 58 | - name: Run Security check 59 | run: | 60 | bandit -r src/redis_dict 61 | 62 | - name: Run Unit Tests 63 | env: 64 | PYTHONPATH: src 65 | run: | 66 | coverage run -m unittest discover -s tests 67 | 68 | - name: Upload coverage reports to Codecov, send only once 69 | if: matrix.python-version == '3.12' 70 | uses: codecov/codecov-action@v4.0.1 71 | with: 72 | token: ${{ secrets.CODECOV_TOKEN }} 73 | -------------------------------------------------------------------------------- /.github/workflows/build_package_pypi.yml: -------------------------------------------------------------------------------- 1 | name: Build and Upload to PyPI 2 | 3 | on: 4 | push: 5 | tags: 6 | - 'v*' 7 | pull_request: 8 | branches: [ main ] 9 | 10 | jobs: 11 | 12 | build-and-test: 13 | services: 14 | redis: 15 | image: redis 16 | ports: 17 | - 6379:6379 18 | 19 | runs-on: ubuntu-latest 20 | steps: 21 | - uses: actions/checkout@v4 22 | 23 | - name: Set up Python 24 | uses: actions/setup-python@v4 25 | with: 26 | python-version: '3.11' 27 | 28 | - name: Setup virtual environment 29 | run: | 30 | python3 -m venv .venv_build 31 | source .venv_build/bin/activate 32 | pip install --upgrade pip 33 | 34 | - name: Install dependencies 35 | run: | 36 | python -m pip install -e ".[dev,build]" 37 | 38 | - name: Run quality checks 39 | run: | 40 | python -m mypy 41 | darglint src/redis_dict/ 42 | python -m pylama -i E501,E231 src 43 | python -m unittest discover -s tests 44 | 45 | - name: Run Security check 46 | run: | 47 | bandit -r src/redis_dict 48 | 49 | - name: Build package 50 | run: python -m build 51 | 52 | - name: Store distribution packages 53 | uses: actions/upload-artifact@v4 54 | with: 55 | name: python-package-distributions 56 | path: dist/ 57 | retention-days: 1 58 | 59 | publish-to-pypi: 60 | needs: [build-and-test] 61 | if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/v') 62 | runs-on: ubuntu-latest 63 | permissions: 64 | id-token: write 65 | 66 | steps: 67 | - name: Download all the dists 68 | uses: actions/download-artifact@v4 69 | with: 70 | name: python-package-distributions 71 | path: dist/ 72 | - name: Publish to PyPI 73 | uses: pypa/gh-action-pypi-publish@release/v1 74 | with: 75 | password: ${{ secrets.PYPI_API_TOKEN }} 76 | -------------------------------------------------------------------------------- /tests/misc/tests_asserts.py: -------------------------------------------------------------------------------- 1 | import time 2 | from datetime import datetime 3 | 4 | from redis_dict import RedisDict 5 | 6 | dic = RedisDict(namespace='assert_test') 7 | assert 'random' not in dic 8 | dic['random'] = 4 9 | assert dic['random'] == 4 10 | assert 'random' in dic 11 | del dic['random'] 12 | assert 'random' not in dic 13 | 14 | now = datetime.now() 15 | dic['datetime'] = now 16 | assert dic['datetime'] == now 17 | dic.clear() 18 | 19 | deep = ['key', 'key1', 'key2'] 20 | deep_val = 'mister' 21 | dic.chain_set(deep, deep_val) 22 | 23 | assert deep_val == dic.chain_get(deep) 24 | dic.chain_del(deep) 25 | 26 | try: 27 | dic.chain_get(deep) 28 | except KeyError: 29 | pass 30 | except Exception: 31 | print('failed to throw KeyError') 32 | else: 33 | print('failed to throw KeyError') 34 | 35 | assert 'random' not in dic 36 | dic['random'] = 4 37 | dd = RedisDict(namespace='app_name_too') 38 | assert len(dd) == 0 39 | 40 | dd['random'] = 5 41 | 42 | assert dic['random'] == 4 43 | assert 'random' in dic 44 | 45 | assert dd['random'] == 5 46 | assert 'random' in dd 47 | 48 | del dic['random'] 49 | assert 'random' not in dic 50 | 51 | assert dd['random'] == 5 52 | assert 'random' in dd 53 | 54 | del dd['random'] 55 | assert 'random' not in dd 56 | 57 | with dd.expire_at(1): 58 | dd['gone_in_one_sec'] = 'gone' 59 | 60 | assert dd['gone_in_one_sec'] == 'gone' 61 | 62 | time.sleep(1.1) 63 | 64 | try: 65 | dd['gone_in_one_sec'] 66 | except KeyError: 67 | pass 68 | except Exception: 69 | print('failed to throw KeyError') 70 | else: 71 | print('failed to throw KeyError') 72 | 73 | assert len(dd) == 0 74 | 75 | items = {'k1': 'v1', 'k2': 'v2', 'k3': 'v3'} 76 | for key, val in items.items(): 77 | dd.chain_set(['keys', key], val) 78 | 79 | assert len(dd) == len(items) 80 | assert sorted(dd.multi_get('keys')) == sorted(list(items.values())) 81 | assert dd.multi_dict('keys') == items 82 | 83 | long_key = 'thekeyislongbutstill' 84 | items = {'K1': 'V1', 'KK22': 'VV22', 'KKK333': 'VVV333'} 85 | for key, val in items.items(): 86 | dd.chain_set([long_key, key], val) 87 | 88 | assert sorted(dd.multi_get(long_key)) == sorted(list(items.values())) 89 | assert dd.multi_dict(long_key) == items 90 | dd.multi_del(long_key) 91 | 92 | dd['one_item'] = 'im here' 93 | dd.multi_del('keys') 94 | 95 | assert len(dd) == 1 96 | 97 | del dd['one_item'] 98 | assert len(dd) == 0 99 | 100 | print('all is well') 101 | -------------------------------------------------------------------------------- /docs/tutorials/encrypted_redis.MD: -------------------------------------------------------------------------------- 1 | # Setup Guide encrypted Redis for Redis-dict 2 | 3 | ### Introduction 4 | 5 | In this document, we will do the following: 6 | 1. Creating a Self-Signed Certificate: This is the initial step of our guide. 7 | 2. Setting Up Redis with Docker and TLS: Next, we will set up an encrypted Docker Redis instance that will use the created certificate. 8 | 3. Python Code: After setting up, we will install Redis-dict. 9 | 4. Running the Test: Finally, we will configure Redis-dict to use the encrypted Redis instance. 10 | For production use, one might already have a Redis instance and/or certificates." 11 | 12 | ### Steps 13 | 14 | 1. **Creating Self-Signed Certificate**: 15 | 16 | Use OpenSSL to generate a self-signed certificate: 17 | 18 | ```bash 19 | openssl req -x509 -newkey rsa:4096 -keyout test_redis_key.pem -out test_redis_cert.pem -days 365 -nodes 20 | ``` 21 | 22 | This command will create a private key `test_redis_key.pem` and a self-signed certificate `test_redis_cert.pem`. 23 | 24 | 2. **Setting Up Redis with Docker and TLS**: 25 | 26 | Create a `redis.conf` file: 27 | 28 | ```bash 29 | echo "port 6379 30 | tls-port 6380 31 | tls-cert-file /tls/test_redis_cert.pem 32 | tls-key-file /tls/test_redis_key.pem 33 | tls-ca-cert-file /tls/test_redis_cert.pem 34 | " > redis.conf 35 | ``` 36 | 37 | Run a Redis Docker container with the self-signed certificate and key: 38 | 39 | ```bash 40 | docker run -v `pwd`:/tls -v `pwd`/redis.conf:/usr/local/etc/redis/redis.conf -p 6379:6379 -p 6380:6380 redis redis-server /usr/local/etc/redis/redis.conf 41 | ``` 42 | 43 | 3. **Python Code**: 44 | 45 | Install Python Redis package, if not already installed: 46 | 47 | ```bash 48 | pip install redis-dict 49 | ``` 50 | 51 | Create a `main.py` Python file with the following code: 52 | 53 | ```python 54 | from redis_dict import RedisDict 55 | 56 | redis_config = { 57 | 'host': '127.0.0.1', 58 | 'port': 6380, 59 | 'ssl': True, 60 | 'ssl_keyfile': 'test_redis_key.pem', 61 | 'ssl_certfile': 'test_redis_cert.pem', 62 | 'ssl_cert_reqs': 'required', 63 | 'ssl_ca_certs': 'test_redis_cert.pem' 64 | } 65 | 66 | dic = RedisDict(**redis_config) 67 | 68 | dic['foo'] = 'bar' 69 | print(dic['foo']) # should return 'bar' 70 | ``` 71 | 72 | This will connect to the Redis instance via TLS and set a key-value pair, then retrieve and print the value. 73 | 74 | 4. **Running the Test**: 75 | 76 | Run the Python script: 77 | 78 | ```bash 79 | python main.py 80 | ``` 81 | -------------------------------------------------------------------------------- /tests/load/tests_load.py: -------------------------------------------------------------------------------- 1 | import random 2 | import time 3 | import string 4 | import statistics 5 | from redis_dict import RedisDict 6 | 7 | BATCH_SIZE = 1000 8 | OPERATIONS = 100000 9 | SEED = 42 10 | BATCHING = False 11 | 12 | random.seed(SEED) 13 | 14 | data_types = ["str", "int", "float", "bool", "list", "dict"] 15 | 16 | 17 | def generate_random_data(data_type): 18 | if data_type == "str": 19 | return ''.join(random.choice(string.ascii_letters) for _ in range(10)) 20 | elif data_type == "int": 21 | return random.randint(1, 100) 22 | elif data_type == "float": 23 | return random.uniform(1, 100) 24 | elif data_type == "bool": 25 | return random.choice([True, False]) 26 | elif data_type == "list": 27 | return [random.randint(1, 100) for _ in range(5)] 28 | else: 29 | return {f'key{i}': random.randint(1, 100) for i in range(5)} 30 | 31 | 32 | def main(): 33 | start_total = time.time() 34 | r = RedisDict(namespace="load_test") 35 | operation_times = [] 36 | batched = BATCHING 37 | 38 | if batched: 39 | data = [] 40 | for i in range(OPERATIONS): 41 | key = f"key{i}" 42 | data_type = random.choice(data_types) 43 | value = generate_random_data(data_type) 44 | data.append((key, value)) 45 | 46 | if i % BATCH_SIZE == 0: 47 | with r.pipeline(): 48 | for key, value in data: 49 | start_time = time.time() 50 | r[key] = value 51 | end_time = time.time() 52 | 53 | operation_times.append(end_time - start_time) 54 | 55 | print(f"\r{i}/{OPERATIONS} operations completed", end='') 56 | data = [] 57 | if len(data) > 0: 58 | for key, value in data: 59 | start_time = time.time() 60 | r[key] = value 61 | end_time = time.time() 62 | 63 | operation_times.append(end_time - start_time) 64 | 65 | print() 66 | 67 | else: 68 | for i in range(OPERATIONS): 69 | key = f"key{i}" 70 | data_type = random.choice(data_types) 71 | value = generate_random_data(data_type) 72 | 73 | start_time = time.time() 74 | r[key] = value 75 | _ = r[key] 76 | end_time = time.time() 77 | 78 | operation_times.append(end_time - start_time) 79 | 80 | if i % BATCH_SIZE == 0: 81 | print(f"\r{i}/{OPERATIONS} operations completed", end='') 82 | 83 | print() 84 | r.clear() 85 | 86 | mean_time = statistics.mean(operation_times) 87 | min_time = min(operation_times) 88 | max_time = max(operation_times) 89 | std_dev = statistics.stdev(operation_times) 90 | 91 | # Adding 'noqa' at the end of lines to suppress the E231 warning due to a bug in pylama with Python 3.12 92 | print(f"used batching: {batched}, Total operations: {OPERATIONS}, Batch-size: {BATCH_SIZE}") # noqa: E231 93 | print(f"Mean time: {mean_time:.6f} s") # noqa: E231 94 | print(f"Minimum time: {min_time:.6f} s") # noqa: E231 95 | print(f"Maximum time: {max_time:.6f} s") # noqa: E231 96 | print(f"Standard deviation: {std_dev:.6f} s") # noqa: E231 97 | 98 | end_total = time.time() 99 | total_time = end_total - start_total 100 | print(f"Total time: {total_time:.6f} s") # noqa: E231 101 | 102 | 103 | if __name__ == "__main__": 104 | main() 105 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["setuptools>=68.0", "wheel"] 3 | build-backend = "setuptools.build_meta" 4 | 5 | [project] 6 | name = "redis-dict" 7 | version = "3.2.5" 8 | description = "Dictionary with Redis as storage backend" 9 | authors = [ 10 | {name = "Melvin Bijman", email = "bijman.m.m@gmail.com"}, 11 | ] 12 | readme = "README.md" 13 | 14 | requires-python = ">=3.8" 15 | license = {text = "MIT"} 16 | dependencies = [ 17 | "redis>=4.0.0", 18 | ] 19 | classifiers = [ 20 | "Development Status :: 5 - Production/Stable", 21 | "Intended Audience :: Developers", 22 | "Intended Audience :: Information Technology", 23 | "Intended Audience :: Science/Research", 24 | "Topic :: Internet", 25 | "Topic :: Scientific/Engineering", 26 | "Topic :: Database", 27 | "Topic :: System :: Distributed Computing", 28 | "Topic :: Software Development :: Libraries :: Python Modules", 29 | "Topic :: Software Development :: Object Brokering", 30 | "Topic :: Database :: Database Engines/Servers", 31 | "License :: OSI Approved :: MIT License", 32 | "Programming Language :: Python :: 3", 33 | "Programming Language :: Python :: 3.8", 34 | "Programming Language :: Python :: 3.9", 35 | "Programming Language :: Python :: 3.10", 36 | "Programming Language :: Python :: 3.11", 37 | "Programming Language :: Python :: 3.12", 38 | "Typing :: Typed", 39 | ] 40 | 41 | keywords = [ 42 | "redis", "python", "dictionary", "dict", "key-value", 43 | "database", "caching", "distributed-computing", 44 | "dictionary-interface", "large-datasets", 45 | "scientific-computing", "data-persistence", 46 | "high-performance", "scalable", "pipelining", 47 | "batching", "big-data", "data-types", 48 | "distributed-algorithms", "encryption", 49 | "data-management", 50 | ] 51 | 52 | [project.optional-dependencies] 53 | dev = [ 54 | "coverage==5.5", 55 | "hypothesis==6.70.1", 56 | 57 | "mypy>=1.8.0", 58 | "mypy-extensions>=1.0.0", 59 | "types-pyOpenSSL>=24.0.0.0", 60 | "types-redis>=4.6.0", 61 | "typing_extensions>=4.5.0", 62 | 63 | "attrs==22.2.0", 64 | "cffi==1.15.1", 65 | "cryptography==44.0.1", 66 | "exceptiongroup==1.1.1", 67 | "future==0.18.3", 68 | "pycparser==2.21", 69 | "snowballstemmer==2.2.0", 70 | "sortedcontainers==2.4.0", 71 | "tomli==2.0.1", 72 | "setuptools>=68.0.0", 73 | 74 | "bandit", 75 | "pylama>=8.4.1", 76 | "pycodestyle==2.10.0", 77 | "pydocstyle==6.3.0", 78 | "pyflakes==3.0.1", 79 | "mccabe==0.7.0", 80 | "pylint==3.2.7", 81 | "darglint", 82 | "pydocstyle", 83 | 84 | "autopep8", 85 | ] 86 | 87 | docs = [ 88 | "sphinx", 89 | "sphinx-rtd-theme", 90 | "sphinx-autodoc-typehints", 91 | "tomli", 92 | "myst-parser", 93 | ] 94 | 95 | build = [ 96 | "build", 97 | "twine", 98 | ] 99 | 100 | 101 | [tool.setuptools] 102 | package-dir = {"" = "src"} 103 | packages = ["redis_dict"] 104 | 105 | [tool.setuptools.package-data] 106 | redis_dict = ["py.typed"] 107 | 108 | [tool.coverage.run] 109 | source = ["redis_dict"] 110 | branch = true 111 | 112 | [tool.coverage.report] 113 | exclude_lines = [ 114 | "pragma: no cover", 115 | "def __repr__", 116 | "if __name__ == .__main__.:", 117 | "raise NotImplementedError", 118 | "if TYPE_CHECKING:", 119 | ] 120 | show_missing = true 121 | 122 | [tool.mypy] 123 | python_version = "3.8" 124 | strict = true 125 | mypy_path = "src" 126 | files = ["src"] 127 | namespace_packages = true 128 | explicit_package_bases = true 129 | 130 | [tool.pylama] 131 | ignore = "E501,E231" 132 | skip = "*/.tox/*,*/.env/*,build/*" 133 | linters = "pycodestyle,pyflakes,mccabe" 134 | max_line_length = 120 135 | paths = ["src/redis_dict"] 136 | 137 | [tool.autopep8] 138 | max_line_length = 120 139 | aggressive = 1 140 | recursive = true 141 | -in-place = true 142 | 143 | [project.urls] 144 | Homepage = "https://github.com/Attumm/redisdict" 145 | Documentation = "https://attumm.github.io/redis-dict/" 146 | Repository = "https://github.com/Attumm/redisdict.git" 147 | Changelog = "https://github.com/Attumm/redisdict/releases" 148 | -------------------------------------------------------------------------------- /tests/fuzzing/tests_hypothesis_basic_types.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | 3 | from hypothesis import given, strategies as st 4 | 5 | from redis_dict import RedisDict, PythonRedisDict 6 | 7 | 8 | class TestRedisDictWithHypothesis(unittest.TestCase): 9 | """ 10 | A test suite employing Hypothesis for property-based testing of RedisDict. 11 | 12 | This class uses the Hypothesis library to perform fuzz testing on 13 | RedisDict instances. Through the generation of diverse inputs, edge cases, and randomized 14 | scenarios, this test suite aims to evaluate the correctness and resilience of the RedisDict 15 | implementation under various conditions. The goal is to cover a broad spectrum of potential 16 | interactions and behaviors, ensuring the implementation can handle complex and unforeseen 17 | situations. 18 | """ 19 | 20 | def setUp(self): 21 | self.r = RedisDict(namespace="test_with_fuzzing") 22 | 23 | def tearDown(self): 24 | self.r.clear() 25 | 26 | @given(key=st.text(min_size=1), value=st.text()) 27 | def test_set_get_text(self, key, value): 28 | self.r[key] = value 29 | self.assertEqual(self.r[key], value) 30 | 31 | @given(key=st.text(min_size=1), value=st.integers()) 32 | def test_set_get_integer(self, key, value): 33 | self.r[key] = value 34 | self.assertEqual(self.r[key], value) 35 | 36 | @given(key=st.text(min_size=1), value=st.floats(allow_nan=False, allow_infinity=False)) 37 | def test_set_get_float(self, key, value): 38 | self.r[key] = value 39 | self.assertEqual(self.r[key], value) 40 | 41 | @given(key=st.text(min_size=1), value=st.booleans()) 42 | def test_set_get_boolean(self, key, value): 43 | self.r[key] = value 44 | self.assertEqual(self.r[key], value) 45 | 46 | @given(key=st.text(min_size=1), value=st.none()) 47 | def test_set_get_none(self, key, value): 48 | self.r[key] = value 49 | self.assertEqual(self.r[key], value) 50 | 51 | @given(key=st.text(min_size=1), value=st.lists(st.integers())) 52 | def test_set_get_list_of_integers(self, key, value): 53 | self.r[key] = value 54 | self.assertEqual(self.r[key], value) 55 | 56 | @given(key=st.text(min_size=1), value=st.lists(st.text())) 57 | def test_set_get_list_of_text(self, key, value): 58 | self.r[key] = value 59 | self.assertEqual(self.r[key], value) 60 | 61 | @given(key=st.text(min_size=1), value=st.dictionaries(st.text(min_size=1), st.text())) 62 | def test_set_get_dictionary(self, key, value): 63 | self.r[key] = value 64 | self.assertEqual(self.r[key], value) 65 | 66 | @given(key=st.text(min_size=1), value=st.dictionaries(st.text(min_size=1), st.integers())) 67 | def test_set_get_dictionary_with_integer_values(self, key, value): 68 | self.r[key] = value 69 | self.assertEqual(self.r[key], value) 70 | 71 | @given(key=st.text(min_size=1), 72 | value=st.dictionaries(st.text(min_size=1), st.floats(allow_nan=False, allow_infinity=False))) 73 | def test_set_get_dictionary_with_float_values(self, key, value): 74 | self.r[key] = value 75 | self.assertEqual(self.r[key], value) 76 | 77 | @given(key=st.text(min_size=1), value=st.dictionaries(st.text(min_size=1), st.lists(st.integers()))) 78 | def test_set_get_dictionary_with_list_values(self, key, value): 79 | self.r[key] = value 80 | self.assertEqual(self.r[key], value) 81 | 82 | @given(key=st.text(min_size=1), 83 | value=st.dictionaries(st.text(min_size=1), st.dictionaries(st.text(min_size=1), st.text()))) 84 | def test_set_get_nested_dictionary(self, key, value): 85 | """ 86 | Test setting and getting a nested dictionary. 87 | """ 88 | self.r[key] = value 89 | self.assertEqual(self.r[key], value) 90 | 91 | @given(key=st.text(min_size=1), value=st.lists(st.lists(st.integers()))) 92 | def test_set_get_nested_list(self, key, value): 93 | """ 94 | Test setting and getting a nested list. 95 | """ 96 | self.r[key] = value 97 | self.assertEqual(self.r[key], value) 98 | 99 | @given(key=st.text(min_size=1), 100 | value=st.tuples(st.integers(), st.text(), st.floats(allow_nan=False, allow_infinity=False), st.booleans())) 101 | def test_set_get_tuple(self, key, value): 102 | """ 103 | Test setting and getting a tuple. 104 | """ 105 | self.r[key] = value 106 | self.assertEqual(self.r[key], value) 107 | 108 | @given(key=st.text(min_size=1), value=st.sets(st.integers())) 109 | def test_set_get_set(self, key, value): 110 | """ 111 | Test setting and getting a set. 112 | """ 113 | self.r[key] = value 114 | self.assertEqual(self.r[key], value) 115 | 116 | 117 | class TestPythonRedisDictWithHypothesis(TestRedisDictWithHypothesis): 118 | """ 119 | A test suite employing Hypothesis for property-based testing of PythonRedisDict. 120 | """ 121 | 122 | def setUp(self): 123 | self.r = PythonRedisDict(namespace="test_with_fuzzing") 124 | 125 | 126 | if __name__ == '__main__': 127 | unittest.main() 128 | -------------------------------------------------------------------------------- /scripts/generate_sphinx_config.py: -------------------------------------------------------------------------------- 1 | import tomli 2 | import os 3 | from pathlib import Path 4 | 5 | 6 | def generate_configs(): 7 | """Generate Sphinx configuration files from pyproject.toml.""" 8 | print("Current working directory:", os.getcwd()) 9 | 10 | root_dir = Path(os.getcwd()) 11 | package_dir = root_dir / 'src' / 'redis_dict' 12 | docs_dir = root_dir / 'docs' 13 | 14 | print(f"Package directory: {package_dir}") 15 | print(f"Docs directory: {docs_dir}") 16 | 17 | with open('pyproject.toml', 'rb') as f: 18 | config = tomli.load(f) 19 | 20 | project_info = config['project'] 21 | 22 | docs_path = Path('docs') 23 | docs_path.mkdir(exist_ok=True) 24 | 25 | source_path = docs_path / 'source' 26 | source_path.mkdir(exist_ok=True) 27 | 28 | module_docs_path = source_path / 'redis_dict' 29 | module_docs_path.mkdir(exist_ok=True) 30 | 31 | docs_path = Path('docs') 32 | docs_path.mkdir(exist_ok=True) 33 | 34 | source_path = docs_path / 'source' 35 | source_path.mkdir(exist_ok=True) 36 | 37 | tutorials_source = docs_path / 'tutorials' 38 | tutorials_source.mkdir(exist_ok=True) 39 | 40 | tutorials_build = source_path / 'tutorials' 41 | tutorials_build.mkdir(exist_ok=True) 42 | 43 | conf_content = f""" 44 | import os 45 | import sys 46 | 47 | # Add the package directory to Python path 48 | package_path = os.path.abspath('{package_dir}') 49 | src_path = os.path.dirname(package_path) 50 | print(f"Adding to path: {{src_path}}") 51 | print(f"Package path: {{package_path}}") 52 | sys.path.insert(0, src_path) 53 | 54 | project = "{project_info['name']}" 55 | copyright = "2024, {project_info['authors'][0]['name']}" 56 | author = "{project_info['authors'][0]['name']}" 57 | version = "{project_info['version']}" 58 | 59 | extensions = [ 60 | 'sphinx.ext.autodoc', 61 | 'sphinx.ext.napoleon', 62 | 'sphinx.ext.viewcode', 63 | 'sphinx_autodoc_typehints', 64 | 'myst_parser', 65 | ] 66 | 67 | # Configure autodoc to show the module source 68 | autodoc_default_options = {{ 69 | 'members': True, 70 | 'undoc-members': True, 71 | 'show-inheritance': True, 72 | 'special-members': '__init__', 73 | }} 74 | 75 | myst_update_mathjax = False 76 | myst_enable_extensions = [ 77 | "colon_fence", 78 | "deflist", 79 | ] 80 | myst_heading_anchors = 3 81 | 82 | html_extra_path = ['../tutorials'] 83 | 84 | def setup(app): 85 | print(f"Python path: {{sys.path}}") 86 | 87 | html_sidebars = {{ 88 | '**': [ 89 | 'globaltoc.html', 90 | 'relations.html', 91 | 'sourcelink.html', 92 | 'searchbox.html' 93 | ] 94 | }} 95 | 96 | toc_object_entries = True 97 | toc_object_entries_show_parents = 'domain' 98 | 99 | html_theme = 'sphinx_rtd_theme' 100 | """ 101 | 102 | index_content = """Redis Dict Documentation 103 | ======================== 104 | 105 | .. toctree:: 106 | :maxdepth: 4 107 | :caption: CONTENTS 108 | 109 | modules 110 | readme 111 | 112 | .. include:: ../../README.md 113 | :parser: myst_parser.sphinx_ 114 | 115 | .. toctree:: 116 | :maxdepth: 2 117 | 118 | redis_dict/core 119 | redis_dict/python_dict 120 | redis_dict/type_management 121 | """ 122 | 123 | readme_content = """Overview 124 | ======== 125 | 126 | .. include:: ../../README.md 127 | :parser: myst_parser.sphinx_ 128 | """ 129 | 130 | core_content = """redis_dict.core module 131 | ====================== 132 | 133 | .. automodule:: redis_dict.core 134 | :members: 135 | :undoc-members: 136 | :show-inheritance: 137 | :special-members: __init__ 138 | :noindex: 139 | """ 140 | 141 | 142 | type_management = """Redis Dict Type Management 143 | ============================== 144 | 145 | .. automodule:: redis_dict.type_management 146 | :members: 147 | :undoc-members: 148 | :show-inheritance: 149 | :noindex: 150 | """ 151 | python_redis_dict = """Python Redis Dict 152 | ============================== 153 | 154 | .. automodule:: redis_dict.python_dict 155 | :members: 156 | :undoc-members: 157 | :show-inheritance: 158 | :noindex: 159 | """ 160 | 161 | makefile_content = """ 162 | # Minimal makefile for Sphinx documentation 163 | SPHINXOPTS ?= 164 | SPHINXBUILD ?= sphinx-build 165 | SOURCEDIR = source 166 | BUILDDIR = build 167 | 168 | help: 169 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 170 | 171 | .PHONY: help Makefile 172 | 173 | %: Makefile 174 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 175 | """ 176 | 177 | with open(source_path / 'conf.py', 'w') as f: 178 | f.write(conf_content) 179 | 180 | with open(docs_path / 'Makefile', 'w') as f: 181 | f.write(makefile_content) 182 | 183 | with open(source_path / 'index.rst', 'w') as f: 184 | f.write(index_content) 185 | 186 | with open(source_path / 'readme.rst', 'w') as f: 187 | f.write(readme_content) 188 | 189 | with open(module_docs_path / 'core.rst', 'w') as f: 190 | f.write(core_content) 191 | 192 | with open(module_docs_path / 'type_management.rst', 'w') as f: 193 | f.write(type_management) 194 | 195 | with open(module_docs_path / 'python_dict.rst', 'w') as f: 196 | f.write(python_redis_dict) 197 | 198 | if __name__ == '__main__': 199 | generate_configs() -------------------------------------------------------------------------------- /tests/unit/tests_insertion_order_methods.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | 3 | from redis_dict import PythonRedisDict 4 | 5 | TEST_NAMESPACE_PREFIX = "TEST_NAMESPACE_PREFIX_eojfe" 6 | 7 | class TestRedisDictInsertionOrder(unittest.TestCase): 8 | 9 | @classmethod 10 | def setUpClass(cls): 11 | cls.r = cls.create_redis_dict() 12 | 13 | @classmethod 14 | def tearDownClass(cls): 15 | cls.clear_test_namespace() 16 | 17 | @classmethod 18 | def create_redis_dict(cls, namespace=TEST_NAMESPACE_PREFIX, **kwargs): 19 | return PythonRedisDict(namespace=namespace, **kwargs) 20 | 21 | @classmethod 22 | def clear_test_namespace(cls): 23 | cls.r.redis.flushdb() 24 | 25 | def setUp(self): 26 | self.clear_test_namespace() 27 | self.redis_dict = self.create_redis_dict() 28 | 29 | 30 | def test_insertion_order_empty(self): 31 | result = list(self.redis_dict._insertion_order_iter()) 32 | self.assertEqual([], result) 33 | self.assertFalse(self.redis_dict._insertion_order_len()) 34 | self.assertIsNone(self.redis_dict._insertion_order_latest()) 35 | 36 | def test_insertion_order_add_single(self): 37 | self.assertTrue(self.redis_dict._insertion_order_add("foo")) 38 | 39 | items = list(self.redis_dict._insertion_order_iter()) 40 | self.assertEqual(1, len(items)) 41 | self.assertEqual("foo", items[0]) 42 | 43 | self.assertTrue(self.redis_dict._insertion_order_len()) 44 | self.assertEqual("foo", self.redis_dict._insertion_order_latest()) 45 | 46 | def test_insertion_order_delete_single(self): 47 | self.redis_dict._insertion_order_add("foo") 48 | self.assertTrue(self.redis_dict._insertion_order_delete("foo")) 49 | 50 | items = list(self.redis_dict._insertion_order_iter()) 51 | self.assertEqual(0, len(items)) 52 | self.assertFalse(self.redis_dict._insertion_order_len()) 53 | self.assertIsNone(self.redis_dict._insertion_order_latest()) 54 | 55 | def test_insertion_order_multiple_items(self): 56 | self.redis_dict._insertion_order_add("foo1") 57 | self.redis_dict._insertion_order_add("foo2") 58 | 59 | items = list(self.redis_dict._insertion_order_iter()) 60 | self.assertEqual(2, len(items)) 61 | self.assertTrue(self.redis_dict._insertion_order_len()) 62 | self.assertEqual("foo2", self.redis_dict._insertion_order_latest()) 63 | 64 | def test_insertion_order_clear(self): 65 | self.redis_dict._insertion_order_add("foo1") 66 | self.redis_dict._insertion_order_add("foo2") 67 | 68 | self.assertTrue(self.redis_dict._insertion_order_clear()) 69 | 70 | items = list(self.redis_dict._insertion_order_iter()) 71 | self.assertEqual(0, len(items)) 72 | self.assertFalse(self.redis_dict._insertion_order_len()) 73 | self.assertIsNone(self.redis_dict._insertion_order_latest()) 74 | 75 | def test_insertion_order_add_empty_string(self): 76 | self.assertTrue(self.redis_dict._insertion_order_add("")) 77 | self.assertEqual("", self.redis_dict._insertion_order_latest()) 78 | self.assertTrue(self.redis_dict._insertion_order_len()) 79 | 80 | def test_insertion_order_add_duplicate(self): 81 | self.assertTrue(self.redis_dict._insertion_order_add("foo")) 82 | self.assertFalse(self.redis_dict._insertion_order_add("foo")) 83 | items = list(self.redis_dict._insertion_order_iter()) 84 | self.assertEqual(1, len(items)) 85 | 86 | def test_insertion_order_delete_nonexistent(self): 87 | self.assertFalse(self.redis_dict._insertion_order_delete("nonexistent")) 88 | 89 | def test_insertion_order_delete_empty_string(self): 90 | self.redis_dict._insertion_order_add("") 91 | self.assertTrue(self.redis_dict._insertion_order_delete("")) 92 | self.assertIsNone(self.redis_dict._insertion_order_latest()) 93 | 94 | def test_insertion_order_iter_100(self): 95 | expected, expected_items = "", 100 96 | for i in range(expected_items): 97 | expected = f"foo{i}" 98 | self.redis_dict._insertion_order_add(expected) 99 | items = list(self.redis_dict._insertion_order_iter()) 100 | self.assertEqual(expected_items, len(items)) 101 | self.assertEqual(expected_items, self.redis_dict._insertion_order_len()) 102 | 103 | def test_insertion_order_iter_10000(self): 104 | expected, expected_items = "", 10000 105 | with self.redis_dict.pipeline(): 106 | for i in range(expected_items): 107 | expected = f"foo{i}" 108 | self.redis_dict._insertion_order_add(expected) 109 | 110 | items = list(self.redis_dict._insertion_order_iter()) 111 | self.assertEqual(expected_items, len(items)) 112 | self.assertEqual(self.redis_dict._insertion_order_latest(), expected) 113 | self.assertEqual(expected_items, self.redis_dict._insertion_order_len()) 114 | 115 | def test_insertion_order_latest_after_delete_last(self): 116 | self.redis_dict._insertion_order_add("foo1") 117 | self.redis_dict._insertion_order_add("foo2") 118 | self.redis_dict._insertion_order_delete("foo2") 119 | self.assertEqual("foo1", self.redis_dict._insertion_order_latest()) 120 | 121 | if __name__ == "__main__": 122 | unittest.main() 123 | -------------------------------------------------------------------------------- /tests/load/load_test_compression.py: -------------------------------------------------------------------------------- 1 | import time 2 | import statistics 3 | 4 | from redis_dict import RedisDict 5 | import json 6 | 7 | # Constants 8 | BATCH_SIZE = 1000 9 | 10 | 11 | import os 12 | import csv 13 | import zipfile 14 | import requests 15 | from typing import Iterator, Dict 16 | from io import TextIOWrapper 17 | import gzip 18 | import base64 19 | 20 | class GzippedDict: 21 | """ 22 | A class that can encode its attributes to a compressed string and decode from a compressed string, 23 | optimized for the fastest possible gzipping. 24 | 25 | Methods: 26 | encode: Compresses and encodes the object's attributes to a base64 string using the fastest settings. 27 | decode: Creates a new object from a compressed and encoded base64 string. 28 | """ 29 | 30 | def encode(self) -> str: 31 | """ 32 | Encodes the object's attributes to a compressed base64 string using the fastest possible settings. 33 | 34 | Returns: 35 | str: A base64 encoded string of the compressed object attributes. 36 | """ 37 | json_data = json.dumps(self.__dict__, separators=(',', ':')) 38 | compressed_data = gzip.compress(json_data.encode('utf-8'), compresslevel=1) 39 | return base64.b64encode(compressed_data).decode('ascii') 40 | 41 | @classmethod 42 | def decode(cls, encoded_str: str) -> 'GzippedDict': 43 | """ 44 | Creates a new object from a compressed and encoded base64 string. 45 | 46 | Args: 47 | encoded_str (str): A base64 encoded string of compressed object attributes. 48 | 49 | Returns: 50 | GzippedDict: A new instance of the class with decoded attributes. 51 | """ 52 | json_data = gzip.decompress(base64.b64decode(encoded_str)).decode('utf-8') 53 | attributes = json.loads(json_data) 54 | return cls(**attributes) 55 | 56 | 57 | def encode_dict(dic: dict) -> str: 58 | json_data = json.dumps(dic, separators=(',', ':')) 59 | compressed_data = gzip.compress(json_data.encode('utf-8'), compresslevel=1) 60 | return str(base64.b64encode(compressed_data).decode('ascii')) 61 | 62 | 63 | def decode_dict(s) -> dict: 64 | return json.loads(gzip.decompress(base64.b64decode(s)).decode('utf-8')) 65 | 66 | import binascii 67 | 68 | def encode_dict(dic: dict) -> str: 69 | json_data = json.dumps(dic, separators=(',', ':')) 70 | compressed_data = gzip.compress(json_data.encode('utf-8'), compresslevel=1) 71 | return binascii.hexlify(compressed_data).decode('ascii') 72 | 73 | def decode_dict(s: str) -> dict: 74 | compressed_data = binascii.unhexlify(s) 75 | return json.loads(gzip.decompress(compressed_data).decode('utf-8')) 76 | 77 | 78 | import os 79 | import zipfile 80 | import gzip 81 | import csv 82 | from typing import Iterator, Dict 83 | from io import TextIOWrapper 84 | import requests 85 | from urllib.parse import urlparse 86 | 87 | def download_file(url: str, filename: str): 88 | response = requests.get(url) 89 | with open(filename, 'wb') as f: 90 | f.write(response.content) 91 | 92 | def csv_iterator(file) -> Iterator[Dict[str, str]]: 93 | reader = csv.DictReader(file) 94 | for row in reader: 95 | yield row 96 | 97 | def get_filename_from_url(url: str) -> str: 98 | return os.path.basename(urlparse(url).path) 99 | 100 | def create_data_gen(url: str) -> Iterator[Dict[str, str]]: 101 | filename = get_filename_from_url(url) 102 | print(filename) 103 | if not os.path.exists(filename): 104 | download_file(url, filename) 105 | 106 | if filename.endswith('.zip'): 107 | with zipfile.ZipFile(filename, 'r') as zip_ref: 108 | csv_filename = zip_ref.namelist()[0] 109 | with zip_ref.open(csv_filename) as csv_file: 110 | text_file = TextIOWrapper(csv_file, encoding='utf-8') 111 | yield from csv_iterator(text_file) 112 | elif filename.endswith('.gz'): 113 | with gzip.open(filename, 'rt', encoding='utf-8') as gz_file: 114 | yield from csv_iterator(gz_file) 115 | else: 116 | raise ValueError("Unsupported file format. Use .zip or .gz files.") 117 | 118 | 119 | def run_load_test(dataset, times=1, use_compression=False): 120 | redis_dict = RedisDict() 121 | redis_dict.clear() 122 | initial_size = redis_dict.redis.info(section="memory")["used_memory"] 123 | if use_compression: 124 | redis_dict.extends_type(dict, encode_dict, decode_dict) 125 | 126 | 127 | operation_times = [] 128 | start_total = time.time() 129 | 130 | total_operations = 0 131 | 132 | for _ in range(times): 133 | key = "bla" 134 | for i, value in enumerate(create_data_gen(dataset), 1): 135 | #key = f"key{i}" 136 | #print(value) 137 | start_time = time.time() 138 | redis_dict[key] = value 139 | _ = redis_dict[key] 140 | end_time = time.time() 141 | 142 | operation_times.append(end_time - start_time) 143 | 144 | total_operations += i 145 | 146 | print(f"\nTotal operations completed: {total_operations}") 147 | 148 | end_total = time.time() 149 | total_time = end_total - start_total 150 | 151 | final_size = redis_dict.redis.info(section="memory")["used_memory"] 152 | redis_dict.clear() 153 | 154 | return { 155 | "dataset": dataset, 156 | "compression": use_compression, 157 | "total_operations": total_operations, 158 | "batch_size": BATCH_SIZE, 159 | "mean_time": statistics.mean(operation_times) if operation_times else None, 160 | "min_time": min(operation_times) if operation_times else None, 161 | "max_time": max(operation_times) if operation_times else None, 162 | "std_dev": statistics.stdev(operation_times) if len(operation_times) > 1 else None, 163 | "total_time": total_time, 164 | "initial_size": human_readable_size(initial_size), 165 | "final_size": human_readable_size(final_size), 166 | "size_difference": human_readable_size(final_size - initial_size), 167 | } 168 | 169 | def format_value(value): 170 | if isinstance(value, bool): 171 | return "With" if value else "Without" 172 | elif isinstance(value, float): 173 | return f"{value:.6f}" 174 | return str(value) 175 | 176 | 177 | def human_readable_size(size_in_bytes): 178 | for unit in ['B', 'KB', 'MB', 'GB', 'TB']: 179 | if size_in_bytes < 1024.0: 180 | return f"{size_in_bytes:.2f} {unit}" 181 | size_in_bytes /= 1024.0 182 | return f"{size_in_bytes:.2f} PB" 183 | 184 | 185 | def display_results(results, sort_key="mean_time", reverse=False): 186 | if not results: 187 | print("No results to display.") 188 | return 189 | 190 | sorted_results = sorted(results, key=lambda x: x[sort_key], reverse=reverse) 191 | 192 | keys = list(sorted_results[0].keys()) 193 | 194 | headers = [key.replace("_", " ").capitalize() for key in keys] 195 | 196 | col_widths = [max(len(header), max(len(format_value(result[key])) for result in sorted_results)) for header, key in zip(headers, keys)] 197 | 198 | header = " | ".join(header.ljust(width) for header, width in zip(headers, col_widths)) 199 | print(header) 200 | print("-" * len(header)) 201 | 202 | # Print each result row 203 | for result in sorted_results: 204 | row = [format_value(result[key]).ljust(width) for key, width in zip(keys, col_widths)] 205 | print(" | ".join(row)) 206 | 207 | 208 | if __name__ == "__main__": 209 | times = 1 210 | results = [] 211 | datasets = [ 212 | "https://www.briandunning.com/sample-data/us-500.zip", 213 | #"https://datasets.imdbws.com/name.basics.tsv.gz", 214 | "https://datasets.imdbws.com/title.basics.tsv.gz" 215 | ] 216 | for dataset in datasets: 217 | print("Running load test without compression...") 218 | results.append(run_load_test(times=times, use_compression=False, dataset=dataset)) 219 | print("\nRunning load test with compression...") 220 | results.append(run_load_test(times=times, use_compression=True, dataset=dataset)) 221 | 222 | print("\nPerformance Comparison (sorted by Mean Time):") 223 | display_results(results) -------------------------------------------------------------------------------- /tests/unit/tests_standard_types.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import unittest 3 | 4 | from uuid import UUID 5 | from pathlib import Path 6 | from decimal import Decimal 7 | from datetime import datetime, date, time, timedelta, timezone 8 | from collections import OrderedDict, defaultdict 9 | 10 | from redis_dict import RedisDict 11 | 12 | sys.path.append(str(Path(__file__).parent.parent.parent / "src")) 13 | from redis_dict.type_management import _default_decoder 14 | 15 | 16 | class TypeCodecTests(unittest.TestCase): 17 | def setUp(self): 18 | self.dic = RedisDict() 19 | 20 | def _assert_value_encodes_decodes(self, expected_value): 21 | """Helper method to test encoding and decoding of a value""" 22 | expected_type = type(expected_value).__name__ 23 | encoded_value = self.dic.encoding_registry.get(expected_type, str)(expected_value) 24 | 25 | self.assertIsInstance(encoded_value, str) 26 | 27 | result = self.dic.decoding_registry.get(expected_type, _default_decoder)(encoded_value) 28 | 29 | self.assertEqual(type(result).__name__, expected_type) 30 | self.assertEqual(expected_value, result) 31 | 32 | def _ensure_testcases_have_all_types(self, test_cases): 33 | """ 34 | Ensure the testcases tests all the current standard types. 35 | """ 36 | test_types = {i[1] for i in test_cases} 37 | registry_types = set(self.dic.decoding_registry.keys()) 38 | 39 | missing_types = registry_types - test_types 40 | 41 | extra_types = test_types - registry_types 42 | len_test_types = len(test_types) 43 | len_registry_types = len(self.dic.decoding_registry.keys()) 44 | self.assertEqual( 45 | len_test_types, 46 | len_registry_types, 47 | f"\nMissing types in tests: {missing_types if missing_types else 'no missing'}" 48 | f"\nExtra types in tests: {extra_types if extra_types else 'None'}" 49 | f"\nThere are types {len_test_types} and {len_registry_types}" 50 | f"\nthere are still {len_registry_types - len_test_types} missing types" 51 | ) 52 | 53 | def test_happy_path(self): 54 | test_cases = [ 55 | ("Hello World", "str"), 56 | (42, "int"), 57 | (3.14, "float"), 58 | (True, "bool"), 59 | (None, "NoneType"), 60 | 61 | ([1, 2, 3], "list"), 62 | ({"a": 1, "b": 2}, "dict"), 63 | ((1, 2, 3), "tuple"), 64 | ({1, 2, 3}, "set"), 65 | 66 | (datetime(2024, 1, 1, 12, 30, 45), "datetime"), 67 | (date(2024, 1, 1), "date"), 68 | (time(12, 30, 45), "time"), 69 | (timedelta(days=1, hours=2), "timedelta"), 70 | 71 | (Decimal("3.14159"), "Decimal"), 72 | (complex(1, 2), "complex"), 73 | (bytes([72, 101, 108, 108, 111]), "bytes"), 74 | (UUID('12345678-1234-5678-1234-567812345678'), "UUID"), 75 | 76 | (OrderedDict([('a', 1), ('b', 2)]), "OrderedDict"), 77 | (defaultdict(type(None), {'a': 1, 'b': 2}), "defaultdict"), 78 | (frozenset([1, 2, 3]), "frozenset"), 79 | ] 80 | self._ensure_testcases_have_all_types(test_cases) 81 | 82 | for value, type_name in test_cases: 83 | with self.subTest(f"Testing happy path: {type_name}"): 84 | self._assert_value_encodes_decodes(value) 85 | 86 | def test_min_boundary_values(self): 87 | test_cases = [ 88 | ("", "str"), 89 | (0, "int"), 90 | (0.0, "float"), 91 | (False, "bool"), 92 | (None, "NoneType"), 93 | 94 | ([], "list"), 95 | ({}, "dict"), 96 | ((), "tuple"), 97 | (set(), "set"), 98 | 99 | (datetime(1970, 1, 1, 0, 0, 0), "datetime"), 100 | (date(1970, 1, 1), "date"), 101 | (time(0, 0, 0), "time"), 102 | (timedelta(0), "timedelta"), 103 | 104 | (Decimal("0"), "Decimal"), 105 | (complex(0, 0), "complex"), 106 | (bytes(), "bytes"), 107 | (UUID('00000000-0000-0000-0000-000000000000'), "UUID"), 108 | (OrderedDict(), "OrderedDict"), 109 | (defaultdict(type(None)), "defaultdict"), 110 | (frozenset(), "frozenset") 111 | ] 112 | self._ensure_testcases_have_all_types(test_cases) 113 | 114 | for value, type_name in test_cases: 115 | with self.subTest(f"Testing min boundary value {type_name}"): 116 | self._assert_value_encodes_decodes(value) 117 | 118 | def test_max_boundary_values(self): 119 | test_cases = [ 120 | ("א" * 10000, "str"), 121 | (sys.maxsize, "int"), 122 | (float('inf'), "float"), 123 | (True, "bool"), 124 | (None, "NoneType"), 125 | 126 | ([1] * 1000, "list"), 127 | ({"k" + str(i): i for i in range(1000)}, "dict"), 128 | (tuple(range(1000)), "tuple"), 129 | (set(range(1000)), "set"), 130 | 131 | (datetime(9999, 12, 31, 23, 59, 59, 999999), "datetime"), 132 | (date(9999, 12, 31), "date"), 133 | (time(23, 59, 59, 999999), "time"), 134 | (timedelta(days=999999999), "timedelta"), 135 | 136 | (Decimal('1E+308'), "Decimal"), 137 | (complex(float('inf'), float('inf')), "complex"), 138 | (bytes([255] * 1000), "bytes"), 139 | (UUID('ffffffff-ffff-ffff-ffff-ffffffffffff'), "UUID"), 140 | (OrderedDict([(str(i), i) for i in range(1000)]), "OrderedDict"), 141 | (defaultdict(type(None), {str(i): i for i in range(1000)}), "defaultdict"), 142 | (frozenset(range(1000)), "frozenset") 143 | ] 144 | self._ensure_testcases_have_all_types(test_cases) 145 | 146 | for value, type_name in test_cases: 147 | with self.subTest(f"Testing max boundary value {type_name}"): 148 | self._assert_value_encodes_decodes(value) 149 | 150 | def test_datetime_edge_cases(self): 151 | test_cases = [ 152 | (date(2024, 1, 1), "start of year date"), 153 | (date(2024, 12, 31), "end of year date"), 154 | (date(2024, 2, 29), "leap year date"), 155 | 156 | (time(0, 0, 0), "midnight"), 157 | (time(12, 0, 0), "noon"), 158 | (time(23, 59, 59, 999999), "just before midnight"), 159 | (time(12, 0, 0, tzinfo=timezone.utc), "noon with timezone"), 160 | 161 | (timedelta(days=1), "one day"), 162 | (timedelta(weeks=1), "one week"), 163 | (timedelta(hours=24), "24 hours"), 164 | (timedelta(milliseconds=1), "one millisecond"), 165 | (timedelta(microseconds=1), "one microsecond"), 166 | (timedelta(days=1, hours=1, minutes=1, seconds=1), "mixed time units"), 167 | 168 | (datetime(2024, 1, 1, 0, 0, 0), "start of year"), 169 | (datetime(2024, 12, 31, 23, 59, 59, 999999), "end of year"), 170 | (datetime(2024, 2, 29, 0, 0, 0), "leap year"), 171 | (datetime(2024, 1, 1, 0, 0, 0, tzinfo=timezone.utc), "with timezone"), 172 | 173 | (datetime(2024, 2, 28, 23, 59, 59), "day before leap day"), 174 | (datetime(2024, 3, 1, 0, 0, 0), "day after leap day"), 175 | 176 | (datetime(2024, 2, 29, 0, 0, 0), "leap year divisible by 4"), 177 | (datetime(2000, 2, 29, 0, 0, 0), "leap year divisible by 100 and 400"), 178 | (datetime(1900, 2, 28, 0, 0, 0), "non leap year divisible by 100"), 179 | (datetime(2100, 2, 28, 0, 0, 0), "future non leap year divisible by 100"), 180 | 181 | (date(2024, 2, 29), "leap year date divisible by 4"), 182 | (date(2000, 2, 29), "leap year date divisible by 100 and 400"), 183 | (date(1900, 2, 28), "non leap year date divisible by 100"), 184 | (date(2100, 2, 28), "future non leap year date divisible by 100"), 185 | ] 186 | 187 | for value, test_name in test_cases: 188 | with self.subTest(f"Testing datetime edge case {test_name}"): 189 | self._assert_value_encodes_decodes(value) 190 | 191 | if __name__ == '__main__': 192 | unittest.main() 193 | -------------------------------------------------------------------------------- /src/redis_dict/type_management.py: -------------------------------------------------------------------------------- 1 | """Type management module.""" 2 | 3 | import json 4 | import base64 5 | from collections import OrderedDict, defaultdict 6 | from datetime import datetime, date, time, timedelta 7 | 8 | from typing import Callable, Any, Dict, Tuple, Set 9 | 10 | from uuid import UUID 11 | from decimal import Decimal 12 | 13 | 14 | SENTINEL = object() 15 | 16 | EncodeFuncType = Callable[[Any], str] 17 | DecodeFuncType = Callable[[str], Any] 18 | EncodeType = Dict[str, EncodeFuncType] 19 | DecodeType = Dict[str, DecodeFuncType] 20 | 21 | 22 | def _create_default_encode(custom_encode_method: str) -> EncodeFuncType: 23 | def default_encode(obj: Any) -> str: 24 | return getattr(obj, custom_encode_method)() # type: ignore[no-any-return] 25 | return default_encode 26 | 27 | 28 | def _create_default_decode(cls: type, custom_decode_method: str) -> DecodeFuncType: 29 | def default_decode(encoded_str: str) -> Any: 30 | return getattr(cls, custom_decode_method)(encoded_str) 31 | return default_decode 32 | 33 | 34 | def _decode_tuple(val: str) -> Tuple[Any, ...]: 35 | """ 36 | Deserialize a JSON-formatted string to a tuple. 37 | 38 | This function takes a JSON-formatted string, deserializes it to a list, and 39 | then converts the list to a tuple. 40 | 41 | Args: 42 | val (str): A JSON-formatted string representing a list. 43 | 44 | Returns: 45 | Tuple[Any, ...]: A tuple with the deserialized values from the input string. 46 | """ 47 | return tuple(json.loads(val)) 48 | 49 | 50 | def _encode_tuple(val: Tuple[Any, ...]) -> str: 51 | """ 52 | Serialize a tuple to a JSON-formatted string. 53 | 54 | This function takes a tuple, converts it to a list, and then serializes 55 | the list to a JSON-formatted string. 56 | 57 | Args: 58 | val (Tuple[Any, ...]): A tuple with values to be serialized. 59 | 60 | Returns: 61 | str: A JSON-formatted string representing the input tuple. 62 | """ 63 | return json.dumps(list(val)) 64 | 65 | 66 | def _decode_set(val: str) -> Set[Any]: 67 | """ 68 | Deserialize a JSON-formatted string to a set. 69 | 70 | This function takes a JSON-formatted string, deserializes it to a list, and 71 | then converts the list to a set. 72 | 73 | Args: 74 | val (str): A JSON-formatted string representing a list. 75 | 76 | Returns: 77 | set[Any]: A set with the deserialized values from the input string. 78 | """ 79 | return set(json.loads(val)) 80 | 81 | 82 | def _encode_set(val: Set[Any]) -> str: 83 | """ 84 | Serialize a set to a JSON-formatted string. 85 | 86 | This function takes a set, converts it to a list, and then serializes the 87 | list to a JSON-formatted string. 88 | 89 | Args: 90 | val (set[Any]): A set with values to be serialized. 91 | 92 | Returns: 93 | str: A JSON-formatted string representing the input set. 94 | """ 95 | return json.dumps(list(val)) 96 | 97 | 98 | decoding_registry: DecodeType = { 99 | type('').__name__: str, 100 | type(1).__name__: int, 101 | type(0.1).__name__: float, 102 | type(True).__name__: lambda x: x == "True", 103 | type(None).__name__: lambda x: None, 104 | 105 | "list": json.loads, 106 | "dict": json.loads, 107 | "tuple": _decode_tuple, 108 | type(set()).__name__: _decode_set, 109 | 110 | datetime.__name__: datetime.fromisoformat, 111 | date.__name__: date.fromisoformat, 112 | time.__name__: time.fromisoformat, 113 | timedelta.__name__: lambda x: timedelta(seconds=float(x)), 114 | 115 | Decimal.__name__: Decimal, 116 | complex.__name__: lambda x: complex(*map(float, x.split(','))), 117 | bytes.__name__: base64.b64decode, 118 | 119 | UUID.__name__: UUID, 120 | OrderedDict.__name__: lambda x: OrderedDict(json.loads(x)), 121 | defaultdict.__name__: lambda x: defaultdict(type(None), json.loads(x)), 122 | frozenset.__name__: lambda x: frozenset(json.loads(x)), 123 | } 124 | 125 | 126 | encoding_registry: EncodeType = { 127 | "list": json.dumps, 128 | "dict": json.dumps, 129 | "tuple": _encode_tuple, 130 | type(set()).__name__: _encode_set, 131 | 132 | datetime.__name__: datetime.isoformat, 133 | date.__name__: date.isoformat, 134 | time.__name__: time.isoformat, 135 | timedelta.__name__: lambda x: str(x.total_seconds()), 136 | 137 | complex.__name__: lambda x: f"{x.real},{x.imag}", 138 | bytes.__name__: lambda x: base64.b64encode(x).decode('ascii'), 139 | OrderedDict.__name__: lambda x: json.dumps(list(x.items())), 140 | defaultdict.__name__: lambda x: json.dumps(dict(x)), 141 | frozenset.__name__: lambda x: json.dumps(list(x)), 142 | } 143 | 144 | 145 | class RedisDictJSONEncoder(json.JSONEncoder): 146 | """ 147 | Extends JSON encoding capabilities by reusing RedisDict type conversion. 148 | 149 | Uses existing decoding_registry to know which types to handle specially and 150 | encoding_registry (falls back to str) for converting to JSON-compatible formats. 151 | 152 | Example: 153 | The encoded format looks like:: 154 | 155 | { 156 | "__type__": "TypeName", 157 | "value": 158 | } 159 | 160 | Notes: 161 | Uses decoding_registry (containing all supported types) to check if type 162 | needs special handling. For encoding, defaults to str() if no encoder exists 163 | in encoding_registry. 164 | """ 165 | 166 | def default(self, o: Any) -> Any: 167 | """Overwrite default from json encoder. 168 | 169 | Args: 170 | o (Any): Object to be serialized. 171 | 172 | Raises: 173 | TypeError: If the object `o` cannot be serialized. 174 | 175 | Returns: 176 | Any: Serialized value. 177 | """ 178 | type_name = type(o).__name__ 179 | if type_name in decoding_registry: 180 | return { 181 | "__type__": type_name, 182 | "value": encoding_registry.get(type_name, _default_encoder)(o) 183 | } 184 | try: 185 | return json.JSONEncoder.default(self, o) 186 | except TypeError as e: 187 | raise TypeError(f"Object of type {type_name} is not JSON serializable") from e 188 | 189 | 190 | class RedisDictJSONDecoder(json.JSONDecoder): 191 | """ 192 | JSON decoder leveraging RedisDict existing type conversion system. 193 | 194 | Works with RedisDictJSONEncoder to reconstruct Python objects from JSON using 195 | RedisDict decoding_registry. 196 | 197 | Still needs work but allows for more types than without. 198 | """ 199 | 200 | def __init__(self, *args: Any, **kwargs: Any) -> None: 201 | """ 202 | Overwrite the __init__ method from JSON decoder. 203 | 204 | Args: 205 | *args (Any): Positional arguments for initialization. 206 | **kwargs (Any): Keyword arguments for initialization. 207 | 208 | """ 209 | def _object_hook(obj: Dict[Any, Any]) -> Any: 210 | if "__type__" in obj and "value" in obj: 211 | type_name = obj["__type__"] 212 | if type_name in decoding_registry: 213 | return decoding_registry[type_name](obj["value"]) 214 | return obj 215 | 216 | super().__init__(object_hook=_object_hook, *args, **kwargs) 217 | 218 | 219 | def encode_json(obj: Any) -> str: 220 | """ 221 | Encode a Python object to a JSON string using the existing encoding registry. 222 | 223 | Args: 224 | obj (Any): The Python object to be encoded. 225 | 226 | Returns: 227 | str: The JSON-encoded string representation of the object. 228 | """ 229 | return json.dumps(obj, cls=RedisDictJSONEncoder) 230 | 231 | 232 | def decode_json(s: str) -> Any: 233 | """ 234 | Decode a JSON string to a Python object using the existing decoding registry. 235 | 236 | Args: 237 | s (str): The JSON string to be decoded. 238 | 239 | Returns: 240 | Any: The decoded Python object. 241 | """ 242 | return json.loads(s, cls=RedisDictJSONDecoder) 243 | 244 | 245 | def _default_decoder(x: str) -> str: 246 | """ 247 | Pass-through decoder that returns the input string unchanged. 248 | 249 | Args: 250 | x (str): The input string. 251 | 252 | Returns: 253 | str: The same input string. 254 | """ 255 | return x 256 | 257 | 258 | def _default_encoder(x: Any) -> str: 259 | """ 260 | Take x and returns the result str of the object. 261 | 262 | Args: 263 | x (Any): The input object 264 | 265 | Returns: 266 | str: output of str of the object 267 | """ 268 | return str(x) 269 | 270 | 271 | encoding_registry["dict"] = encode_json 272 | decoding_registry["dict"] = decode_json 273 | 274 | 275 | encoding_registry["list"] = encode_json 276 | decoding_registry["list"] = decode_json 277 | -------------------------------------------------------------------------------- /src/redis_dict/python_dict.py: -------------------------------------------------------------------------------- 1 | """Python Redis Dict module.""" 2 | from typing import Any, Iterator, Tuple, Union, Optional, List, Dict 3 | 4 | import time 5 | from datetime import timedelta 6 | 7 | from redis import StrictRedis 8 | 9 | from .core import RedisDict 10 | 11 | 12 | class PythonRedisDict(RedisDict): 13 | """Python dictionary with Redis as backend. 14 | 15 | With support for advanced features, such as custom data types, pipelining, and key expiration. 16 | 17 | This class focuses on having one-to-on behavior of a dictionary while using Redis as storage layer, allowing 18 | for efficient storage and retrieval of key-value pairs. It supports various data types, including 19 | strings, integers, floats, lists, dictionaries, tuples, sets, and user-defined types. The class 20 | leverages the power of Redis pipelining to minimize network round-trip time, latency, and I/O load, 21 | thereby optimizing performance for batch operations. Additionally, it allows for the management of 22 | key expiration through the use of context managers. 23 | 24 | The RedisDict class is designed to be analogous to a standard Python dictionary while providing 25 | enhanced functionality, such as support for a wider range of data types and efficient batch operations. 26 | It aims to offer a seamless and familiar interface for developers familiar with Python dictionaries, 27 | enabling a smooth transition to a Redis-backed data store. 28 | 29 | Extendable Types: You can extend RedisDict by adding or overriding encoding and decoding functions. 30 | This functionality enables various use cases, such as managing encrypted data in Redis, 31 | To implement this, simply create and register your custom encoding and decoding functions. 32 | By delegating serialization to redis-dict, reduce complexity and have simple code in the codebase. 33 | """ 34 | 35 | def __init__(self, 36 | namespace: str = 'main', 37 | expire: Union[int, timedelta, None] = None, 38 | preserve_expiration: Optional[bool] = False, 39 | redis: "Optional[StrictRedis[Any]]" = None, 40 | **redis_kwargs: Any) -> None: # noqa: D202 pydocstyle clashes with Sphinx 41 | """ 42 | Initialize a RedisDict instance. 43 | 44 | Init the RedisDict instance. 45 | 46 | Args: 47 | namespace (str): A prefix for keys stored in Redis. 48 | expire (Union[int, timedelta, None], optional): Expiration time for keys. 49 | preserve_expiration (Optional[bool], optional): Preserve expiration on key updates. 50 | redis (Optional[StrictRedis[Any]], optional): A Redis connection instance. 51 | **redis_kwargs (Any): Additional kwargs for Redis connection if not provided. 52 | """ 53 | super().__init__( 54 | namespace=namespace, 55 | expire=expire, 56 | preserve_expiration=preserve_expiration, 57 | redis=redis, 58 | raise_key_error_delete=True, 59 | **redis_kwargs 60 | ) 61 | self._insertion_order_key = f"redis-dict-insertion-order-{namespace}" 62 | 63 | def __delitem__(self, key: str) -> None: 64 | """ 65 | Delete the value associated with the given key, analogous to a dictionary. 66 | 67 | For distributed systems, we intentionally don't raise KeyError when the key doesn't exist. 68 | This ensures identical code running across different systems won't randomly fail 69 | when another system already achieved the deletion goal (key not existing). 70 | 71 | Warning: 72 | Setting dict_compliant=True will raise KeyError when key doesn't exist. 73 | This is not recommended for distributed systems as it can cause KeyErrors 74 | that are hard to debug when multiple systems interact with the same keys. 75 | 76 | Args: 77 | key (str): The key to delete 78 | 79 | Raises: 80 | KeyError: Only if dict_compliant=True and key doesn't exist 81 | """ 82 | formatted_key = self._format_key(key) 83 | 84 | result = self.redis.delete(formatted_key) 85 | self._insertion_order_delete(formatted_key) 86 | if not result: 87 | raise KeyError(key) 88 | 89 | def _store(self, key: str, value: Any) -> None: 90 | """ 91 | Store a value in Redis with the given key. 92 | 93 | Args: 94 | key (str): The key to store the value. 95 | value (Any): The value to be stored. 96 | 97 | Raises: 98 | ValueError: If the value or key fail validation. 99 | 100 | Note: Validity checks could be refactored to allow for custom exceptions that inherit from ValueError, 101 | providing detailed information about why a specific validation failed. 102 | This would enable users to specify which validity checks should be executed, add custom validity functions, 103 | and choose whether to fail on validation errors, or drop the data and only issue a warning and continue. 104 | Example use case is caching, to cache data only when it's between min and max sizes. 105 | Allowing for simple dict set operation, but only cache data that makes sense. 106 | 107 | """ 108 | if not self._valid_input(value) or not self._valid_input(key): 109 | raise ValueError("Invalid input value or key size exceeded the maximum limit.") 110 | 111 | formatted_key = self._format_key(key) 112 | formatted_value = self._format_value(value) 113 | 114 | with self.pipeline(): 115 | self._insertion_order_add(formatted_key) 116 | self._store_set(formatted_key, formatted_value) 117 | 118 | def setdefault(self, key: str, default_value: Optional[Any] = None) -> Any: 119 | """Get value under key, and if not present set default value. 120 | 121 | Return the value associated with the given key if it exists, otherwise set the value to the 122 | default value and return it. Analogous to a dictionary's setdefault method. 123 | 124 | Args: 125 | key (str): The key to retrieve the value. 126 | default_value (Optional[Any], optional): The value to set if the key is not found. 127 | 128 | Returns: 129 | Any: The value associated with the key or the default value. 130 | """ 131 | formatted_key = self._format_key(key) 132 | formatted_value = self._format_value(default_value) 133 | 134 | # Todo bind both commands 135 | args, options = self._create_set_get_command(formatted_key, formatted_value) 136 | result = self.get_redis.execute_command(*args, **options) 137 | self._insertion_order_add(formatted_key) 138 | 139 | if result is None: 140 | return default_value 141 | 142 | return self._transform(result) 143 | 144 | def __len__(self) -> int: 145 | """ 146 | Get the number of items in the RedisDict, analogous to a dictionary. 147 | 148 | Returns: 149 | int: The number of items in the RedisDict. 150 | """ 151 | return self._insertion_order_len() 152 | 153 | def _scan_keys(self, search_term: str = '', full_scan: bool = False) -> Iterator[str]: 154 | return self._insertion_order_iter() 155 | 156 | def clear(self) -> None: 157 | """Remove all key-value pairs from the RedisDict in one batch operation using pipelining. 158 | 159 | This method mimics the behavior of the `clear` method from a standard Python dictionary. 160 | Redis pipelining is employed to group multiple commands into a single request, minimizing 161 | network round-trip time, latency, and I/O load, thereby enhancing the overall performance. 162 | 163 | """ 164 | with self.pipeline(): 165 | self._insertion_order_clear() 166 | for key in self._scan_keys(full_scan=True): 167 | self.redis.delete(key) 168 | 169 | def popitem(self) -> Tuple[str, Any]: 170 | """Remove and return a random (key, value) pair from the RedisDict as a tuple. 171 | 172 | This method is analogous to the `popitem` method of a standard Python dictionary. 173 | 174 | if dict_compliant set true stays true to In Python 3.7+, removes the last inserted item (LIFO order) 175 | 176 | Returns: 177 | tuple: A tuple containing a randomly chosen (key, value) pair. 178 | 179 | Raises: 180 | KeyError: If RedisDict is empty. 181 | """ 182 | key = self._insertion_order_latest() 183 | if key is None: 184 | raise KeyError("popitem(): dictionary is empty") 185 | return self._parse_key(key), self._transform(self._pop(key)) 186 | 187 | def _pop(self, formatted_key: str) -> Any: 188 | """ 189 | Remove the value associated with the given key and return it. 190 | 191 | Or return the default value if the key is not found. 192 | 193 | Args: 194 | formatted_key (str): The formatted key to remove the value. 195 | 196 | Returns: 197 | Any: The value associated with the key or the default value. 198 | """ 199 | # TODO bind both commands 200 | self._insertion_order_delete(formatted_key) 201 | return self.get_redis.execute_command("GETDEL", formatted_key) 202 | 203 | def multi_get(self, _key: str) -> List[Any]: 204 | """ 205 | Not part of Python Redis Dict. 206 | 207 | Args: 208 | _key (str): Not used. 209 | 210 | Raises: 211 | NotImplementedError: Not part of Python Redis Dict. 212 | """ 213 | raise NotImplementedError("Not part of PythonRedisDict") 214 | 215 | def multi_chain_get(self, _keys: List[str]) -> List[Any]: 216 | """ 217 | Not part of Python Redis Dict. 218 | 219 | Args: 220 | _keys (List[str]): Not used. 221 | 222 | Raises: 223 | NotImplementedError: Not part of Python Redis Dict. 224 | """ 225 | raise NotImplementedError("Not part of PythonRedisDict") 226 | 227 | def multi_dict(self, _key: str) -> Dict[str, Any]: 228 | """ 229 | Not part of Python Redis Dict. 230 | 231 | Args: 232 | _key (str): Not used. 233 | 234 | Raises: 235 | NotImplementedError: Not part of Python Redis Dict. 236 | """ 237 | raise NotImplementedError("Not part of PythonRedisDict") 238 | 239 | def multi_del(self, _key: str) -> int: 240 | """ 241 | Not part of Python Redis Dict. 242 | 243 | Args: 244 | _key (str): Not used. 245 | 246 | Raises: 247 | NotImplementedError: Not part of Python Redis Dict. 248 | """ 249 | raise NotImplementedError("Not part of PythonRedisDict") 250 | 251 | def _insertion_order_add(self, formatted_key: str) -> bool: 252 | """Record a key's insertion into the dictionary. 253 | 254 | This private method updates the insertion order tracking when a new key is added 255 | to the dictionary. 256 | 257 | Args: 258 | formatted_key (str): The key being added to the dictionary. 259 | 260 | Returns: 261 | bool: True if the insertion order was updated, False otherwise. 262 | """ 263 | return bool(self.redis.zadd(self._insertion_order_key, {formatted_key: time.time()})) 264 | 265 | def _insertion_order_delete(self, formatted_key: str) -> bool: 266 | """Remove a key from the insertion order tracking. 267 | 268 | This private method updates the insertion order tracking when a key is removed 269 | from the dictionary. 270 | 271 | Args: 272 | formatted_key (str): The key being removed from the dictionary. 273 | 274 | Returns: 275 | bool: True if the insertion order was updated, False otherwise. 276 | """ 277 | return bool(self.redis.zrem(self._insertion_order_key, formatted_key)) 278 | 279 | def _insertion_order_iter(self) -> Iterator[str]: 280 | """Create an iterator for dictionary keys in their insertion order. 281 | 282 | This private method allows for iterating over the dictionary's keys in the order 283 | they were inserted. 284 | 285 | Yields: 286 | str: Keys in their insertion order. 287 | """ 288 | # TODO add full_scan boolean and search terms. 289 | first = True 290 | cursor = -1 291 | while cursor != 0: 292 | if first: 293 | cursor = 0 294 | first = False 295 | cursor, data = self.get_redis.zscan( 296 | name=self._insertion_order_key, 297 | cursor=cursor, 298 | count=1 299 | ) 300 | yield from (item[0] for item in data) 301 | 302 | def _insertion_order_clear(self) -> bool: 303 | """Clear all insertion order information. 304 | 305 | This private method resets the insertion order tracking for the dictionary. 306 | 307 | Returns: 308 | bool: True if the insertion order was successfully cleared, False otherwise. 309 | """ 310 | return bool(self.redis.delete(self._insertion_order_key)) 311 | 312 | def _insertion_order_len(self) -> int: 313 | """Get the number of keys in the insertion order tracking. 314 | 315 | This private method returns the count of keys being tracked for insertion order. 316 | 317 | Returns: 318 | int: The number of keys in the insertion order tracking. 319 | """ 320 | return self.get_redis.zcard(self._insertion_order_key) 321 | 322 | def _insertion_order_latest(self) -> Union[str, None]: 323 | """Get the most recently inserted key in the dictionary. 324 | 325 | This private method retrieves the key that was most recently added to the dictionary. 326 | 327 | Returns: 328 | Union[str, None]: The most recently inserted key, or None if the dictionary is empty. 329 | """ 330 | result = self.redis.zrange(self._insertion_order_key, -1, -1) 331 | return result[0] if result else None 332 | -------------------------------------------------------------------------------- /tests/unit/tests_extend_json_types.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import json 3 | import unittest 4 | 5 | from typing import Any 6 | 7 | from collections import Counter, ChainMap 8 | from dataclasses import dataclass 9 | from ipaddress import IPv4Address, IPv6Address 10 | from pathlib import Path 11 | from queue import Queue, PriorityQueue 12 | from typing import NamedTuple 13 | from enum import Enum 14 | 15 | from datetime import datetime, date, time, timedelta 16 | from decimal import Decimal 17 | from collections import OrderedDict, defaultdict 18 | from uuid import UUID 19 | 20 | sys.path.append(str(Path(__file__).parent.parent.parent / "src")) 21 | from redis_dict.type_management import encode_json, decode_json, RedisDictJSONEncoder, RedisDictJSONDecoder 22 | 23 | 24 | class TestJsonEncoding(unittest.TestCase): 25 | def setUp(self): 26 | 27 | # Below are tests that contain types that handled by default json encoding/decoding 28 | self.skip_assert_raise_type_error_test = { 29 | "str", "int", "float", "dict", "list", 30 | "NoneType", "defaultdict", "OrderedDict", 31 | "bool", "str,int,bool in list", "None,float,list in list", 32 | "str,dict,set in list", 33 | } 34 | 35 | def _assert_value_encodes_decodes(self, value: Any) -> None: 36 | """Helper method to assert a value can be encoded and decoded correctly""" 37 | encoded = json.dumps(value, cls=RedisDictJSONEncoder) 38 | result = json.loads(encoded, cls=RedisDictJSONDecoder) 39 | self.assertEqual(value, result) 40 | 41 | def test_happy_path(self): 42 | test_cases = [ 43 | ("Hello World", "str"), 44 | (42, "int"), 45 | (3.14, "float"), 46 | (True, "bool"), 47 | (None, "NoneType"), 48 | 49 | ([1, 2, 3], "list"), 50 | ({"a": 1, "b": 2}, "dict"), 51 | #((1, 2, 3), "tuple"), 52 | ({1, 2, 3}, "set"), 53 | 54 | (datetime(2024, 1, 1, 12, 30, 45), "datetime"), 55 | (date(2024, 1, 1), "date"), 56 | (time(12, 30, 45), "time"), 57 | (timedelta(days=1, hours=2), "timedelta"), 58 | 59 | (Decimal("3.14159"), "Decimal"), 60 | (complex(1, 2), "complex"), 61 | (bytes([72, 101, 108, 108, 111]), "bytes"), 62 | (UUID('12345678-1234-5678-1234-567812345678'), "UUID"), 63 | 64 | (OrderedDict([('a', 1), ('b', 2)]), "OrderedDict"), 65 | (defaultdict(type(None), {'a': 1, 'b': 2}), "defaultdict"), 66 | (frozenset([1, 2, 3]), "frozenset"), 67 | ] 68 | 69 | for test_case_input, test_case_title in test_cases: 70 | with self.subTest(f"Testing happy mixed list path: {test_case_title}"): 71 | self._assert_value_encodes_decodes(test_case_input) 72 | 73 | if test_case_title not in self.skip_assert_raise_type_error_test: 74 | with self.assertRaises(TypeError): 75 | json.loads(json.dumps(test_case_input)) 76 | 77 | def test_empty_path(self): 78 | test_cases = [ 79 | ("", "str"), 80 | (0, "int"), 81 | (0.0, "float"), 82 | (False, "bool"), 83 | (None, "NoneType"), 84 | 85 | ([], "list"), 86 | ({}, "dict"), 87 | # ((), "tuple"), TODO Handle tuple 88 | (set(), "set"), 89 | 90 | (datetime.min, "datetime"), 91 | (date.min, "date"), 92 | (time.min, "time"), 93 | (timedelta(), "timedelta"), 94 | 95 | (Decimal("0"), "Decimal"), 96 | (complex(0, 0), "complex"), 97 | (bytes(), "bytes"), 98 | (UUID('00000000-0000-0000-0000-000000000000'), "UUID"), 99 | 100 | (OrderedDict(), "OrderedDict"), 101 | (defaultdict(type(None)), "defaultdict"), 102 | (frozenset(), "frozenset"), 103 | ] 104 | 105 | for test_case_input, test_case_title in test_cases: 106 | with self.subTest(f"Testing happy mixed list path: {test_case_title}"): 107 | self._assert_value_encodes_decodes(test_case_input) 108 | 109 | if test_case_title not in self.skip_assert_raise_type_error_test: 110 | with self.assertRaises(TypeError): 111 | json.loads(json.dumps(test_case_input)) 112 | 113 | def test_happy_nested_dict(self): 114 | test_cases = [ 115 | ({"value": "Hello World"}, "str"), 116 | ({"value": 42}, "int"), 117 | ({"value": 3.14}, "float"), 118 | ({"value": True}, "bool"), 119 | ({"value": None}, "NoneType"), 120 | 121 | ({"value": [1, 2, 3]}, "list"), 122 | ({"value": {"a": 1, "b": 2}}, "dict"), 123 | # ({"value": (1, 2, 3)}, "tuple"), TODO Handle tuple 124 | ({"value": {1, 2, 3}}, "set"), 125 | 126 | ({"value": datetime(2024, 1, 1, 12, 30, 45)}, "datetime"), 127 | ({"value": date(2024, 1, 1)}, "date"), 128 | ({"value": time(12, 30, 45)}, "time"), 129 | ({"value": timedelta(days=1, hours=2)}, "timedelta"), 130 | 131 | ({"value": Decimal("3.14159")}, "Decimal"), 132 | ({"value": complex(1, 2)}, "complex"), 133 | ({"value": bytes([72, 101, 108, 108, 111])}, "bytes"), 134 | ({"value": UUID('12345678-1234-5678-1234-567812345678')}, "UUID"), 135 | 136 | ({"value": OrderedDict([('a', 1), ('b', 2)])}, "OrderedDict"), 137 | ({"value": defaultdict(type(None), {'a': 1, 'b': 2})}, "defaultdict"), 138 | ({"value": frozenset([1, 2, 3])}, "frozenset"), 139 | ] 140 | 141 | for test_case_input, test_case_title in test_cases: 142 | with self.subTest(f"Testing happy mixed list path: {test_case_title}"): 143 | self._assert_value_encodes_decodes(test_case_input) 144 | 145 | if test_case_title not in self.skip_assert_raise_type_error_test: 146 | with self.assertRaises(TypeError): 147 | json.loads(json.dumps(test_case_input)) 148 | 149 | def test_happy_nested_dict_two_levels(self): 150 | test_cases = [ 151 | ({"level1": {"value": "Hello World"}}, "str"), 152 | ({"level1": {"value": 42}}, "int"), 153 | ({"level1": {"value": 3.14}}, "float"), 154 | ({"level1": {"value": True}}, "bool"), 155 | ({"level1": {"value": None}}, "NoneType"), 156 | 157 | ({"level1": {"value": [1, 2, 3]}}, "list"), 158 | ({"level1": {"value": {"a": 1, "b": 2}}}, "dict"), 159 | # ({"level1": {"value": (1, 2, 3)}}, "tuple"), TODO Handle tuple 160 | ({"level1": {"value": {1, 2, 3}}}, "set"), 161 | 162 | ({"level1": {"value": datetime(2024, 1, 1, 12, 30, 45)}}, "datetime"), 163 | ({"level1": {"value": date(2024, 1, 1)}}, "date"), 164 | ({"level1": {"value": time(12, 30, 45)}}, "time"), 165 | ({"level1": {"value": timedelta(days=1, hours=2)}}, "timedelta"), 166 | 167 | ({"level1": {"value": Decimal("3.14159")}}, "Decimal"), 168 | ({"level1": {"value": complex(1, 2)}}, "complex"), 169 | ({"level1": {"value": bytes([72, 101, 108, 108, 111])}}, "bytes"), 170 | ({"level1": {"value": UUID('12345678-1234-5678-1234-567812345678')}}, "UUID"), 171 | 172 | ({"level1": {"value": OrderedDict([('a', 1), ('b', 2)])}}, "OrderedDict"), 173 | ({"level1": {"value": defaultdict(type(None), {'a': 1, 'b': 2})}}, "defaultdict"), 174 | ({"level1": {"value": frozenset([1, 2, 3])}}, "frozenset"), 175 | ] 176 | 177 | for test_case_input, test_case_title in test_cases: 178 | with self.subTest(f"Testing happy mixed list path: {test_case_title}"): 179 | self._assert_value_encodes_decodes(test_case_input) 180 | 181 | if test_case_title not in self.skip_assert_raise_type_error_test: 182 | with self.assertRaises(TypeError): 183 | json.loads(json.dumps(test_case_input)) 184 | 185 | def test_happy_list(self): 186 | test_cases = [ 187 | (["Hello World"], "str"), 188 | ([42], "int"), 189 | ([3.14], "float"), 190 | ([True], "bool"), 191 | ([None], "NoneType"), 192 | 193 | ([[1, 2, 3]], "list"), 194 | ([{"a": 1, "b": 2}], "dict"), 195 | # ([(1, 2, 3)], "tuple"), TODO Handle tuple 196 | ([{1, 2, 3}], "set"), 197 | 198 | ([datetime(2024, 1, 1, 12, 30, 45)], "datetime"), 199 | ([date(2024, 1, 1)], "date"), 200 | ([time(12, 30, 45)], "time"), 201 | ([timedelta(days=1, hours=2)], "timedelta"), 202 | 203 | ([Decimal("3.14159")], "Decimal"), 204 | ([complex(1, 2)], "complex"), 205 | ([bytes([72, 101, 108, 108, 111])], "bytes"), 206 | ([UUID('12345678-1234-5678-1234-567812345678')], "UUID"), 207 | 208 | ([OrderedDict([('a', 1), ('b', 2)])], "OrderedDict"), 209 | ([defaultdict(type(None), {'a': 1, 'b': 2})], "defaultdict"), 210 | ([frozenset([1, 2, 3])], "frozenset"), 211 | ] 212 | 213 | for test_case_input, test_case_title in test_cases: 214 | with self.subTest(f"Testing happy mixed list path: {test_case_title}"): 215 | self._assert_value_encodes_decodes(test_case_input) 216 | 217 | if test_case_title not in self.skip_assert_raise_type_error_test: 218 | with self.assertRaises(TypeError): 219 | json.loads(json.dumps(test_case_input)) 220 | 221 | def test_happy_mixed_list(self): 222 | test_cases = [ 223 | (["Hello World", 42, True], "str,int,bool in list"), 224 | ([None, 3.14, [1, 2, 3]], "None,float,list in list"), 225 | (["test", {"a": 1}, {1, 2, 3}], "str,dict,set in list"), 226 | 227 | ([datetime(2024, 1, 1), date(2024, 1, 1), time(12, 30, 45)], "datetime,date,time in list"), 228 | ([timedelta(days=1), Decimal("3.14159"), complex(1, 2)], "timedelta,Decimal,complex in list"), 229 | ([bytes([72, 101]), UUID('12345678-1234-5678-1234-567812345678'), "test"], "bytes,UUID,str in list"), 230 | 231 | ([OrderedDict([('a', 1)]), defaultdict(type(None), {'b': 2}), frozenset([1, 2])], 232 | "OrderedDict,defaultdict,frozenset in list"), 233 | (["a", 1, Decimal("3.14159")], "str,int,Decimal in list"), 234 | ([True, None, datetime(2024, 1, 1)], "bool,None,datetime in list"), 235 | ] 236 | 237 | for test_case_input, test_case_title in test_cases: 238 | with self.subTest(f"Testing happy mixed list path: {test_case_title}"): 239 | self._assert_value_encodes_decodes(test_case_input) 240 | 241 | if test_case_title not in self.skip_assert_raise_type_error_test: 242 | with self.assertRaises(TypeError): 243 | json.loads(json.dumps(test_case_input)) 244 | 245 | def test_happy_list_dicts_mixed(self): 246 | test_cases = [ 247 | ( 248 | [ 249 | {"decimal": Decimal("3.14159"), "bytes": bytes([72, 101, 108, 108, 111])}, 250 | {"complex": complex(1, 2), "uuid": UUID('12345678-1234-5678-1234-567812345678')}, 251 | {"date": datetime(2024, 1, 1), "set": {1, 2, 3}} 252 | ], 253 | "list of dicts with decimal/bytes, complex/uuid, datetime/set" 254 | ), ( 255 | [ 256 | {"ordered": OrderedDict([('a', 1)]), "default": defaultdict(type(None), {'x': 1})}, 257 | {"frozen": frozenset([1, 2, 3]), "time": time(12, 30, 45)}, 258 | {"delta": timedelta(days=1), "nested": {"a": 1, "b": 2}} 259 | ], 260 | "list of dicts with OrderedDict/defaultdict, frozenset/time, timedelta/dict" 261 | ), ( 262 | [ 263 | {"bytes": bytes([65, 66, 67]), "decimal": Decimal("10.5"), "date": date(2024, 1, 1)}, 264 | {"uuid": UUID('12345678-1234-5678-1234-567812345678'), "complex": complex(3, 4), "set": {4, 5, 6}}, 265 | {"time": time(1, 2, 3), "delta": timedelta(hours=5), "list": [1, 2, 3]} 266 | ], 267 | "list of dicts with three mixed types each" 268 | ), 269 | ] 270 | 271 | for test_case_input, test_case_title in test_cases: 272 | with self.subTest(f"Testing happy list of dicts mixed path: {test_case_title}"): 273 | self._assert_value_encodes_decodes(test_case_input) 274 | 275 | encoded = encode_json(test_case_input) 276 | result = decode_json(encoded) 277 | 278 | for test_index, expected_test in enumerate(test_case_input): 279 | for index, key in enumerate(expected_test): 280 | expected_value = test_case_input[test_index][key] 281 | result_value = result[test_index][key] 282 | self.assertEqual(expected_value, result_value) 283 | 284 | # Ordered dict, becomes regular dictionary. Since the idea is to extend json types fixing this 285 | # issue is not within the scope of this feature 286 | if test_case_title == "list of dicts with OrderedDict/defaultdict, frozenset/time, timedelta/dict": 287 | continue 288 | self.assertEqual(type(expected_value), type(result_value)) 289 | 290 | def test_potential_candidates(self): 291 | """Test cases for types that could be added encoding/decoding in the future""" 292 | 293 | @dataclass 294 | class DataClassType: 295 | name: str 296 | value: int 297 | 298 | class NamedTupleType(NamedTuple): 299 | name: str 300 | value: int 301 | 302 | class EnumType_(Enum): 303 | ONE = 1 304 | TWO = 2 305 | 306 | potential_candidates = [ 307 | # (Counter(['a', 'b', 'a']), "Counter"), Encodes into other type 308 | (ChainMap({'a': 1}, {'b': 2}), "ChainMap"), 309 | (Queue(), "Queue"), 310 | (PriorityQueue(), "PriorityQueue"), 311 | 312 | (IPv4Address('192.168.1.1'), "IPv4Address"), 313 | (IPv6Address('2001:db8::1'), "IPv6Address"), 314 | 315 | (Path('/foo/bar.txt'), "Path"), 316 | 317 | (DataClassType("test", 42), "DataClass"), 318 | #(NamedTupleType("test", 42), "NamedTuple"), Encodes into other type 319 | (EnumType_.ONE, "Enum"), 320 | 321 | #(re_compile(r'\d+'), "Pattern"), 322 | #(memoryview(b'Hello'), "memoryview"), 323 | ] 324 | 325 | for test_case_input, test_case_title in potential_candidates: 326 | with self.subTest(f"Testing potential candidate: {test_case_title}"): 327 | 328 | # fails with json out of the box 329 | with self.assertRaises(TypeError, msg=test_case_title): 330 | result = json.dumps(test_case_input) 331 | print(result) 332 | 333 | # Since these types are not yet added 334 | with self.assertRaises(TypeError, msg=test_case_title): 335 | result = json.dumps(test_case_input, cls=RedisDictJSONEncoder) 336 | print(result) 337 | 338 | if __name__ == '__main__': 339 | unittest.main() -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Redis-dict 2 | 3 | [![PyPI](https://img.shields.io/pypi/v/redis-dict.svg)](https://pypi.org/project/redis-dict/) 4 | [![CI](https://github.com/Attumm/redis-dict/actions/workflows/ci.yml/badge.svg)](https://github.com/Attumm/redis-dict/actions/workflows/ci.yml) 5 | [![codecov](https://codecov.io/gh/Attumm/redis-dict/graph/badge.svg?token=Lqs7McQGEs)](https://codecov.io/gh/Attumm/redis-dict) 6 | [![Documentation](https://img.shields.io/badge/docs-sphinx-blue.svg)](https://attumm.github.io/redis-dict/) 7 | [![Supports Valkey](https://github.com/Attumm/redis-dict/actions/workflows/valkey_support.yml/badge.svg)](https://github.com/Attumm/redis-dict/actions/workflows/valkey_support.yml) 8 | [![Downloads](https://static.pepy.tech/badge/redis-dict/month)](https://pepy.tech/project/redis-dict) 9 | 10 | RedisDict is a Python library that offers a convenient and familiar interface for interacting with Redis, treating it as if it were a Python dictionary. Its goal is to help developers write clean, Pythonic code while using Redis as a storage solution for seamless distributed computing. Redis-Dict utilizes Redis as a key-value store and supports various data types, including strings, integers, floats, booleans, lists, and dictionaries. Additionally, developers can extend RedisDict to work with custom objects. 11 | 12 | The library includes utility functions for more complex use cases such as caching, batching, and more. By leveraging Redis for efficient key-value storage, RedisDict enables high-performance data management, maintaining efficiency even with large datasets and Redis instances. 13 | 14 | 15 | [Usage](#Usage) | [Types](#Types) | [Expiration](#Expiration) | [Batching](#Batching) | [Custom Types](#Custom-Types) | [Security](#Security) 16 | 17 | --- 18 | 19 | ## Features 20 | 21 | * Dictionary-like interface: Use familiar Python dictionary syntax to interact with Redis. 22 | * Data Type Support: Comprehensive support for various data types. 23 | * Pipelining support: Use pipelines for batch operations to improve performance. 24 | * Expiration Support: Enables the setting of expiration times either globally or individually per key, through the use of context managers. 25 | * Efficiency and Scalability: RedisDict is designed for use with large datasets and is optimized for efficiency. It retrieves only the data needed for a particular operation, ensuring efficient memory usage and fast performance. 26 | * Namespace Management: Provides simple and efficient namespace handling to help organize and manage data in Redis, streamlining data access and manipulation. 27 | * Distributed Computing: With its ability to seamlessly connect to other instances or servers with access to the same Redis instance, RedisDict enables easy distributed computing. 28 | * Custom data: types: Add custom types encoding/decoding to store your data types. 29 | * Encryption: allows for storing data encrypted, while retaining the simple dictionary interface. 30 | 31 | ## Usage 32 | 33 | ```bash 34 | pip install redis-dict 35 | ``` 36 | 37 | ```python 38 | >>> from redis_dict import RedisDict 39 | >>> dic = RedisDict() 40 | >>> dic['foo'] = 42 41 | >>> dic['foo'] 42 | 42 43 | >>> 'foo' in dic 44 | True 45 | >>> dic["baz"] = "hello world" 46 | >>> dic 47 | {'foo': 42, 'baz': 'hello world'} 48 | >>> from datetime import datetime 49 | >>> dic["datetime"] = datetime.now() 50 | ``` 51 | In Redis our example looks like this. 52 | ``` 53 | 127.0.0.1:6379> KEYS "*" 54 | 1) "main:foo" 55 | 2) "main:baz" 56 | 127.0.0.1:6379> GET "main:foo" 57 | "int:42" 58 | 127.0.0.1:6379> GET "main:baz" 59 | "str:hello world" 60 | 127.0.0.1:6379> GET "main:datetime" 61 | "datetime:2025-02-20T19:37:54.214274" 62 | ``` 63 | 64 | ## Types 65 | 66 | ### Standard types 67 | RedisDict supports a range of Python data types, from basic types to nested structures. 68 | Basic types are handled natively, while complex data types like lists and dictionaries, RedisDict uses JSON serialization, specifically avoiding [pickle](https://docs.python.org/3/library/pickle.html) due to its security vulnerabilities within distributed computing contexts. 69 | Although the library supports nested structures, the recommended best practice is to use RedisDict as a shallow dictionary. 70 | This approach optimizes Redis database performance and efficiency by ensuring that each set and get operation efficiently maps to Redis's key-value storage capabilities, while still preserving the library's Pythonic interface. 71 | Following types are supported: 72 | `str, int, float, bool, NoneType, list, dict, tuple, set, datetime, date, time, timedelta, Decimal, complex, bytes, UUID, OrderedDict, defaultdict, frozenset` 73 | ```python 74 | from redis_dict import RedisDict 75 | 76 | from uuid import UUID 77 | from decimal import Decimal 78 | from collections import OrderedDict, defaultdict 79 | from datetime import datetime, date, time, timedelta 80 | 81 | dic = RedisDict() 82 | 83 | dic["string"] = "Hello World" 84 | dic["number"] = 42 85 | dic["float"] = 3.14 86 | dic["bool"] = True 87 | dic["None"] = None 88 | 89 | dic["list"] = [1, 2, 3] 90 | dic["dict"] = {"a": 1, "b": 2} 91 | dic["tuple"] = (1, 2, 3) 92 | dic["set"] = {1, 2, 3} 93 | 94 | dic["datetime"] = datetime.date(2024, 1, 1, 12, 30, 45) 95 | dic["date"] = date(2024, 1, 1) 96 | dic["time"] = time(12, 30, 45) 97 | dic["delta"] = timedelta(days=1, hours=2) 98 | 99 | dic["decimal"] = Decimal("3.14159") 100 | dic["complex"] = complex(1, 2) 101 | dic["bytes"] = bytes([72, 101, 108, 108, 111]) 102 | dic["uuid"] = UUID('12345678-1234-5678-1234-567812345678') 103 | 104 | dic["ordered"] = OrderedDict([('a', 1), ('b', 2)]) 105 | dic["default"] = defaultdict(int, {'a': 1, 'b': 2}) 106 | dic["frozen"] = frozenset([1, 2, 3]) 107 | ``` 108 | 109 | ### Namespaces 110 | Acting as an identifier for your dictionary across different systems, RedisDict employs namespaces for organized data management. When a namespace isn't specified, "main" becomes the default. Thus allowing for data organization across systems and projects with the same redis instance. 111 | This approach also minimizes the risk of key collisions between different applications, preventing hard-to-debug issues. By leveraging namespaces, RedisDict ensures a cleaner and more maintainable data management experience for developers working on multiple projects. 112 | 113 | ## Advanced Features 114 | 115 | ### Expiration 116 | 117 | Redis provides a valuable feature that enables keys to expire. RedisDict supports this feature in the following ways: 118 | 1. Set a default expiration time when creating a RedisDict instance. In this example, the keys will have a default expiration time of 10 seconds. Use seconds with an integer or pass a datetime timedelta. 119 | 120 | ```python 121 | dic = RedisDict(expire=10) 122 | dic['gone'] = 'in ten seconds' 123 | ``` 124 | Or, for a more Pythonic approach, use a timedelta. 125 | ```python 126 | from datetime import timedelta 127 | 128 | dic = RedisDict(expire=timedelta(minutes=1)) 129 | dic['gone'] = 'in a minute' 130 | ``` 131 | 132 | 2. Temporarily set the default expiration time within the scope using a context manager. In this example, the key 'gone' will expire after 60 seconds. The default expiration time for other keys outside the context manager remains unchanged. Either pass an integer or a timedelta. 133 | 134 | ```python 135 | dic = RedisDict() 136 | 137 | seconds = 60 138 | with dic.expire_at(seconds): 139 | dic['gone'] = 'in sixty seconds' 140 | ``` 141 | 142 | 3. Updating keys while preserving the initial timeout In certain situations, there is a need to update the value while keeping the expiration intact. This is achievable by setting the 'preserve_expiration' to true. 143 | 144 | ```python 145 | import time 146 | 147 | dic = RedisDict(expire=10, preserve_expiration=True) 148 | dic['gone'] = 'in ten seconds' 149 | 150 | time.sleep(5) 151 | dic['gone'] = 'gone in 5 seconds' 152 | 153 | ``` 154 | 155 | ### Batching 156 | Efficiently batch your requests using the Pipeline feature, which can be easily utilized with a context manager. 157 | 158 | ```python 159 | dic = RedisDict(namespace="example") 160 | 161 | # one round trip to redis 162 | with dic.pipeline(): 163 | for index in range(100): 164 | dic[str(index)] = index 165 | ``` 166 | 167 | ### Distributed computing 168 | You can use RedisDict for distributed computing by starting multiple RedisDict instances on different servers or instances that have access to the same Redis instance: 169 | ```python 170 | # On server 1 171 | from redis_dict import RedisDict 172 | 173 | dic = RedisDict(namespace="example") 174 | dic["foo"] = "bar" 175 | 176 | # On server 2 177 | from redis_dict import RedisDict 178 | 179 | dic = RedisDict(namespace="example") 180 | print(dic["foo"]) # outputs "bar" 181 | ``` 182 | 183 | ## Additional Examples 184 | 185 | ### Caching made simple 186 | ```python 187 | import time 188 | from datetime import timedelta 189 | from redis_dict import RedisDict 190 | 191 | def expensive_function(x): 192 | time.sleep(x) 193 | return x * 2 194 | 195 | cache = RedisDict(namespace="cache", expire=timedelta(minutes=60)) 196 | 197 | def cached_expensive_function(x): 198 | if x not in cache: 199 | cache[x] = expensive_function(x) 200 | return cache[x] 201 | 202 | start_time = time.time() 203 | print(cached_expensive_function(5)) # Takes around 5 seconds to compute and caches the result. 204 | print(f"Time taken: {time.time() - start_time:.2f} seconds") 205 | 206 | start_time = time.time() 207 | print(cached_expensive_function(5)) # Fetches the result from the cache, taking almost no time. 208 | print(f"Time taken: {time.time() - start_time:.2f} seconds") 209 | ``` 210 | 211 | ### Redis-dict as dictionary 212 | ```python 213 | from redis_dict import RedisDict 214 | 215 | # Create a RedisDict instance with a namespace 216 | dic = RedisDict(namespace="example") 217 | 218 | # Set key-value pairs 219 | dic["name"] = "John Doe" 220 | dic["age"] = 32 221 | dic["city"] = "Amsterdam" 222 | 223 | # Get value by key, from any instance connected to the same redis/namespace 224 | print(dic["name"]) # Output: John Doe 225 | 226 | # Update value by key, got a year older 227 | dic["age"] = 33 228 | 229 | # Check if key exists 230 | print("name" in dic) # Output: True 231 | print("country" in dic) # Output: False 232 | 233 | # Get value with a default value if the key doesn't exist 234 | print(dic.get("country", "NL")) # Output: NL 235 | 236 | # Get the length (number of keys) of the RedisDict 237 | print(len(dic)) # Output: 3 238 | 239 | # Iterate over keys 240 | for key in dic: 241 | print(key, dic[key]) 242 | 243 | # Delete a key-value pair 244 | del dic["city"] 245 | 246 | # Clear all key-value pairs in the RedisDict 247 | dic.clear() 248 | 249 | # Get the length (number of keys) of the RedisDict 250 | print(len(dic)) # Output: 0 251 | 252 | # Update RedisDict with multiple key-value pairs 253 | dic.update({"a": 1, "b": 2, "c": 3}) 254 | 255 | # Use methods of a normal dict 256 | print(list(dic.keys())) # Output: ['a', 'b', 'c'] 257 | print(list(dic.values())) # Output: [1, 2, 3] 258 | print(list(dic.items())) # Output: [('a', 1), ('b', 2), ('c', 3)] 259 | 260 | # Using pop() and popitem() methods 261 | value = dic.pop("a") 262 | print(value) # Output: 1 263 | 264 | key, value = dic.popitem() 265 | print(key, value) # Output: 'c' 3 (example) 266 | 267 | # Using setdefault() method 268 | dic.setdefault("d", 4) 269 | print(dic["d"]) # Output: 4 270 | 271 | from datetime import datetime, timedelta 272 | 273 | # Redis dict support datetime 274 | dic["now"] = datetime.now() 275 | print(dic["now"]) # 2025-02-20 19:25:38.835816 276 | 277 | # SRedis dict support timedelta and more types 278 | dic["time"] = timedelta(days=1) 279 | print(dic["time"]) # 1 day, 0:00:00 280 | 281 | print(dic) 282 | {'now': datetime.datetime(2025, 2, 20, 19, 25, 38, 835816), 'time': datetime.timedelta(days=1), 'b': 2, 'd': 4} 283 | ``` 284 | 285 | ### Additional Examples 286 | For more advanced examples of RedisDict, please refer to the unit-test files in the repository. All features and functionalities are thoroughly tested in [unit tests (here)](https://github.com/Attumm/redis-dict/blob/main/tests/unit/tests.py#L1) Or take a look at load test for batching [load test](https://github.com/Attumm/redis-dict/blob/main/tests/load/tests_load.py#L1). 287 | The unit-tests can be as used as a starting point. 288 | 289 | ### Nested types 290 | RedisDict supports nested structures with mixed types through JSON serialization. The feature works by utilizing JSON encoding and decoding under the hood. While this represents an upgrade in functionality, the feature is not fully implemented and should be used with caution. For optimal performance, using shallow dictionaries is recommended. 291 | ```python 292 | from datetime import datetime, timedelta 293 | 294 | dic["mixed"] = [1, "foobar", 3.14, [1, 2, 3], datetime.now()] 295 | 296 | dic['dic'] = {"elapsed_time": timedelta(hours=60)} 297 | ``` 298 | 299 | ### JSON Encoding - Decoding 300 | The nested type support in RedisDict is implemented using custom JSON encoders and decoders. These JSON encoders and decoders are built on top of RedisDict's own encoding and decoding functionality, extending it for JSON compatibility. Since JSON serialization was a frequently requested feature, these enhanced encoders and decoders are available for use in other projects: 301 | ```python 302 | import json 303 | from datetime import datetime 304 | from redis_dict import RedisDictJSONDecoder, RedisDictJSONEncoder 305 | 306 | data = [1, "foobar", 3.14, [1, 2, 3], datetime.now()] 307 | encoded = json.dumps(data, cls=RedisDictJSONEncoder) 308 | result = json.loads(encoded, cls=RedisDictJSONDecoder) 309 | ``` 310 | 311 | ## Custom Types 312 | ### Extending RedisDict with Custom Types 313 | 314 | RedisDict supports custom type serialization. Here's how to add a new type: 315 | 316 | ```python 317 | import json 318 | 319 | class Person: 320 | def __init__(self, name, age): 321 | self.name = name 322 | self.age = age 323 | 324 | def encode(self) -> str: 325 | return json.dumps(self.__dict__) 326 | 327 | @classmethod 328 | def decode(cls, encoded_str: str) -> 'Person': 329 | return cls(**json.loads(encoded_str)) 330 | 331 | redis_dict = RedisDict() 332 | 333 | # Extend redis dict with the new type 334 | redis_dict.extends_type(Person) 335 | 336 | # RedisDict can now seamlessly handle Person instances. 337 | person = Person(name="John", age=32) 338 | redis_dict["person1"] = person 339 | 340 | result = redis_dict["person1"] 341 | 342 | assert result.name == person.name 343 | assert result.age == person.age 344 | ``` 345 | 346 | ### Insertion Order 347 | For insertion order, use the PythonRedisDict. This class is focused on Python dictionary behavior one-to-one. 348 | It will eventually become a drop-in replacement for dictionary. Currently, nested types and typed keys are not yet supported but will be added in the future. 349 | 350 | ```python 351 | from redis_dict import PythonRedisDict 352 | 353 | dic = PythonRedisDict() 354 | dic["1"] = "one" 355 | dic["2"] = "two" 356 | dic["3"] = "three" 357 | 358 | assert list(dic.keys()) == ["1", "2", "3"] 359 | ``` 360 | 361 | For more information on [extending types](https://github.com/Attumm/redis-dict/blob/main/tests/unit/tests_extend_types.py). 362 | 363 | ## Security 364 | 365 | Security is an important aspect of production projects. Redis-dict was developed within a strict compliance environment. 366 | Best practice in Redis is to use passwords and network encryption through TLS. However, Redis-dict offers an additional feature by using extended types. 367 | It is possible to store the values of keys encrypted. The values are encrypted with AES GCM, which is currently considered best practice for security. 368 | 369 | ### Storage Encryption 370 | For storing data values encrypted can be achieved using encrypted values, more documentation on that later. 371 | For now code example within this test file. [encrypted test](https://github.com/Attumm/redis-dict/blob/main/tests/unit/tests_encrypt.py). 372 | 373 | ### Encryption Network 374 | Setup guide for configuring and utilizing encrypted Redis TLS for redis-dict. 375 | [Setup guide](https://github.com/Attumm/redis-dict/blob/main/docs/tutorials/encrypted_redis.MD) 376 | 377 | ### Tests 378 | The RedisDict library includes a comprehensive suite of tests that ensure its correctness and resilience. The test suite covers various data types, edge cases, and error handling scenarios. It also employs the Hypothesis library for property-based testing, which provides fuzz testing to evaluate the implementation 379 | 380 | ### Redis config 381 | To configure RedisDict using your Redis config. 382 | 383 | Configure both the host and port. Or configuration with a setting dictionary. 384 | ```python 385 | dic = RedisDict(host='127.0.0.1', port=6380) 386 | 387 | redis_config = { 388 | 'host': '127.0.0.1', 389 | 'port': 6380, 390 | } 391 | 392 | config_dic = RedisDict(**redis_config) 393 | ``` 394 | 395 | ## Installation 396 | ```sh 397 | pip install redis-dict 398 | ``` 399 | 400 | ### Note 401 | * Please be aware that this project is currently being utilized by various organizations in their production environments. If you have any questions or concerns, feel free to raise issues 402 | * This project only uses redis as dependency 403 | -------------------------------------------------------------------------------- /tests/unit/tests_encrypt.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import base64 4 | import unittest 5 | 6 | from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes 7 | from cryptography.hazmat.backends import default_backend 8 | 9 | from redis_dict import RedisDict 10 | 11 | 12 | class EncryptedStringClassBased(str): 13 | """A class that behaves like a string but enables encrypted storage in Redis dictionaries. 14 | 15 | This class inherits from the built-in `str` class, providing all standard string 16 | functionality. However, when stored in a RedisDict, it is automatically 17 | encrypted. This allows for transparent encryption of sensitive data in Redis 18 | without changing how the string is used in your Python code. 19 | 20 | The class uses AES encryption with Galois/Counter Mode (GCM) for secure 21 | encryption and decryption. 22 | 23 | Attributes: 24 | iv (bytes): The initialization vector, retrieved from an environment variable. 25 | key (bytes): The encryption key, retrieved from an environment variable. 26 | """ 27 | 28 | def __init__(self, value: str): 29 | """Initializes an EncryptedStringClassBased object. 30 | 31 | Retrieves the initialization vector (IV) and encryption key from 32 | environment variables 'ENCRYPTION_IV' and 'ENCRYPTION_KEY' respectively, 33 | decoding them from Base64. 34 | 35 | Args: 36 | value (str): The string value to be encapsulated and potentially encrypted. 37 | """ 38 | self.value = value 39 | self.iv = base64.b64decode(os.environ['ENCRYPTION_IV']) 40 | self.key = base64.b64decode(os.environ['ENCRYPTION_KEY']) 41 | 42 | def __str__(self): 43 | """Returns the decrypted string value. 44 | 45 | This method is called when the object is used in a string context, 46 | returning the original string value. 47 | 48 | Returns: 49 | str: The original string value. 50 | """ 51 | return self.value 52 | 53 | def __repr__(self): 54 | """Returns a string representation of the EncryptedStringClassBased object. 55 | 56 | This method provides a string that, when evaluated, would recreate the object. 57 | 58 | Returns: 59 | str: A string representation of the object, in the format 60 | "EncryptedStringClassBased('value')". 61 | """ 62 | return f"EncryptedStringClassBased('{self.value}')" 63 | 64 | def encode(self) -> str: 65 | """Encrypts the string value using AES-GCM. 66 | 67 | Retrieves the encryption key and initialization vector (IV) from instance attributes 68 | (which are loaded from environment variables during initialization). 69 | Generates a random nonce for each encryption operation. 70 | Uses AES in GCM mode to encrypt the string value. 71 | Combines the nonce, IV, GCM tag, and ciphertext, then Base64 encodes the result. 72 | 73 | Returns: 74 | str: Base64 encoded string containing the nonce, IV, GCM tag, and ciphertext. 75 | This encoded string represents the encrypted value. 76 | """ 77 | key = self.key or base64.b64decode(os.environ['ENCRYPTION_KEY']) 78 | iv = self.iv or base64.b64decode(os.environ['ENCRYPTION_IV']) 79 | 80 | nonce = os.urandom(16) 81 | 82 | cipher = Cipher(algorithms.AES(key), modes.GCM(nonce), backend=default_backend()) 83 | encryptor = cipher.encryptor() 84 | 85 | encrypted_data = encryptor.update(self.value.encode('utf-8', errors='surrogatepass')) + encryptor.finalize() 86 | 87 | combined_data = nonce + iv + encryptor.tag + encrypted_data 88 | return str(base64.b64encode(combined_data).decode('utf-8')) 89 | 90 | @classmethod 91 | def decode(cls, encrypted_value: str) -> 'EncryptedStringClassBased': 92 | """Decrypts an encrypted string value. 93 | 94 | Decodes the Base64 encoded encrypted string. 95 | Extracts the nonce, IV, GCM tag, and ciphertext from the decoded bytes. 96 | Retrieves the encryption key and initialization vector (IV) from environment variables. 97 | Uses AES in GCM mode with the extracted nonce and tag to decrypt the ciphertext. 98 | 99 | Args: 100 | encrypted_value (str): Base64 encoded string representing the encrypted value. 101 | 102 | Returns: 103 | EncryptedStringClassBased: A new `EncryptedStringClassBased` object containing 104 | the decrypted string value. 105 | """ 106 | key = base64.b64decode(os.environ['ENCRYPTION_KEY']) 107 | iv = base64.b64decode(os.environ['ENCRYPTION_IV']) 108 | 109 | encrypted_data_bytes = base64.b64decode(encrypted_value) 110 | 111 | nonce_from_storage = encrypted_data_bytes[:16] 112 | iv_from_storage = encrypted_data_bytes[16:16 + len(iv)] 113 | tag = encrypted_data_bytes[16 + len(iv):16 + len(iv) + 16] 114 | ciphertext = encrypted_data_bytes[16 + len(iv) + 16:] 115 | 116 | cipher = Cipher(algorithms.AES(key), modes.GCM(nonce_from_storage, tag), 117 | backend=default_backend()) 118 | decryptor = cipher.decryptor() 119 | 120 | decrypted_data = decryptor.update(ciphertext) + decryptor.finalize() 121 | return cls(decrypted_data.decode('utf-8', errors='surrogatepass')) 122 | 123 | 124 | class TestRedisDictEncryptionClassBased(unittest.TestCase): 125 | @classmethod 126 | def setUpClass(cls): 127 | iv = b"0123456789abcdef" # 16 bytes 128 | key = b"0123456789abcdef0123456789abcdef" # 32 bytes (256-bit key) 129 | 130 | # Set test environment variables 131 | os.environ['ENCRYPTION_IV'] = base64.b64encode(iv).decode('utf-8') 132 | os.environ['ENCRYPTION_KEY'] = base64.b64encode(key).decode('utf-8') 133 | 134 | cls.original_env = { 135 | 'ENCRYPTION_IV': os.environ['ENCRYPTION_IV'], 136 | 'ENCRYPTION_KEY': os.environ['ENCRYPTION_KEY'], 137 | } 138 | 139 | def setUp(self): 140 | 141 | self.redis_dict = RedisDict() 142 | self.redis_dict.extends_type(EncryptedStringClassBased) 143 | 144 | def tearDown(self): 145 | self.redis_dict.clear() 146 | 147 | def helper_get_redis_internal_value(self, key): 148 | sep = ":" 149 | redis_dict = self.redis_dict 150 | 151 | stored_in_redis_as = redis_dict.redis.get(redis_dict._format_key(key)) 152 | internal_result_type, internal_result_value = stored_in_redis_as.split(sep, 1) 153 | return internal_result_type, internal_result_value 154 | 155 | def test_encrypted_string_encoding_and_decoding(self): 156 | """Test adding new type and test if encoding and decoding works.""" 157 | redis_dict = self.redis_dict 158 | 159 | iv = base64.b64decode(os.environ['ENCRYPTION_IV']) 160 | key = base64.b64decode(os.environ['ENCRYPTION_KEY']) 161 | 162 | #encode_encrypted_function = encode_encrypted_string(iv, key, EncryptedStringClassBased.nonce) 163 | 164 | redis_dict.extends_type(EncryptedStringClassBased) 165 | key = "foo" 166 | expected_type = EncryptedStringClassBased.__name__ 167 | expected = "foobar" 168 | #encoded_expected = encode_encrypted_function(expected) 169 | 170 | redis_dict[key] = EncryptedStringClassBased(expected) 171 | 172 | # Should be stored encrypted 173 | internal_result_type, internal_result_value = self.helper_get_redis_internal_value(key) 174 | self.assertNotEqual(internal_result_value, expected) 175 | 176 | self.assertEqual(internal_result_type, expected_type) 177 | #self.assertEqual(internal_result_value, encoded_expected) 178 | 179 | result = redis_dict[key] 180 | 181 | #self.assertNotEqual(encoded_expected, expected) 182 | self.assertIsInstance(result, EncryptedStringClassBased) 183 | self.assertEqual(result, expected) 184 | 185 | def test_encoding_decoding_should_remain_equal(self): 186 | """Test adding new type and test if encoding and decoding results in the same value""" 187 | redis_dict = self.redis_dict 188 | 189 | key = "foo" 190 | key2 = "bar" 191 | expected = "foobar" 192 | 193 | redis_dict[key] = EncryptedStringClassBased(expected) 194 | 195 | redis_dict[key2] = redis_dict[key] 196 | 197 | result_one = redis_dict[key] 198 | result_two = redis_dict[key2] 199 | 200 | self.assertEqual(result_one, EncryptedStringClassBased(expected)) 201 | self.assertEqual(result_one, result_two) 202 | 203 | self.assertEqual(result_one, expected) 204 | 205 | def test_values(self): 206 | """Test different values""" 207 | redis_dict = self.redis_dict 208 | expected_internal_type = EncryptedStringClassBased.__name__ 209 | test_cases = { 210 | "Empty string": "", 211 | "Single space": " ", 212 | "Multiple spaces": " ", 213 | "Various whitespace characters": "\t\n\r", 214 | "Single character": "a", 215 | "Two characters": "ab", 216 | "Three characters": "abc", 217 | "Normal string with punctuation": "Hello, World!", 218 | "Numeric string": "1234567890", 219 | "Special characters": "!@#$%^&*()_+-=[]{}|;:,.<>?", 220 | "Non-ASCII characters": "äöüßÄÖÜ", 221 | "Emoji": "😀🙈🚀", 222 | "Long string (1000 'a' characters)": "a" * 1000, 223 | "Very long text (Lorem ipsum)": "Lorem ipsum dolor sit amet, consectetur adipiscing elit. " * 100, 224 | "JSON-like string": '{"key": "value"}', 225 | "HTML-like string": "Test", 226 | "SQL-like string": "SELECT * FROM users;", 227 | "URL-like string": "https://www.example.com/path?query=value", 228 | "String with null byte": "prefix\0suffix", 229 | "String with low ASCII characters": "\u0000\u0001\u0002\u0003", 230 | "String with high Unicode character (U+FFFF)": "\uFFFF", 231 | "Surrogate pair (Unicode smiley face)": "\uD83D\uDE00", 232 | "Mathematical script letters": "𝔘𝔫𝔦𝔠𝔬𝔡𝔢", 233 | "Chinese characters": "中文字符测试", 234 | "Japanese Hiragana": "こんにちは", 235 | "Korean Hangul": "한글 테스트", 236 | "String with right-to-left override character": "\u202Eexample", 237 | "String with escaped newlines and tabs": "\\n\\t\\r", 238 | "String with double quotes": "\"quoted\"", 239 | "String with single quotes": "'single quotes'", 240 | "String with backslash": "back\\slash", 241 | "Windows file path": "C:\\Program Files\\", 242 | "Unix file path": "/usr/local/bin/", 243 | "Decimal number string": "3.14159", 244 | "Negative decimal number string": "-273.15", 245 | "Scientific notation string": "1e10", 246 | "Not-a-Number (NaN) string": "NaN", 247 | "Infinity string": "Infinity", 248 | "String with byte values (0x00 and 0xFF)": "\x00\xFF", 249 | "Octal number string": "01234567", 250 | "Hexadecimal string": "0xDEADBEEF", 251 | "Null-like string": "null", 252 | "Undefined-like string": "undefined", 253 | "Boolean true string": "true", 254 | "Boolean false string": "false", 255 | "String with Python-like code": "import os\nos.system('echo Hello')", 256 | "String with a print statement": "print('Hello')", 257 | "Very long string (10,000 'a' characters)": "a" * 10000, 258 | } 259 | 260 | for test_num, (test_name, expected) in enumerate(test_cases.items()): 261 | key = f"test_{test_num+1}" 262 | redis_dict[key] = EncryptedStringClassBased(expected) 263 | result = redis_dict[key] 264 | # Assert result is same as the expected input value 265 | self.assertEqual(result, expected, f"testcase {test_num+1} failed {test_name}") 266 | 267 | # Assert that the value internally stored in Redis is encoded, and the type is correct. 268 | internal_result_type, internal_result_value = self.helper_get_redis_internal_value(key) 269 | 270 | self.assertNotEqual(internal_result_value, expected, f"testcase {test_num+1} failed") 271 | self.assertNotEqual(internal_result_value, expected, f"testcase {test_num+1} failed") 272 | self.assertEqual(internal_result_type, expected_internal_type, f"testcase {test_num+1} failed") 273 | 274 | 275 | class EncryptedString(str): 276 | """ 277 | A class that behaves exactly like a string but has a distinct type for redis-dict encoding and decoding. 278 | 279 | This class inherits from the built-in str class, automatically providing all 280 | string functionality. The only difference is its class name, which allows for 281 | type checking of "encrypted" strings. Used to encode and decode for storage. 282 | 283 | Usage: 284 | >>> normal_string = "Hello, World!" 285 | >>> encrypted_string = EncryptedString("Hello, World!") 286 | >>> assert normal_string == encrypted_string 287 | >>> assert type(encrypted_string) == EncryptedString 288 | >>> assert isinstance(encrypted_string, str) 289 | """ 290 | pass 291 | 292 | def encode(value: str, iv: bytes, key: bytes, nonce: bytes) -> str: 293 | cipher = Cipher(algorithms.AES(key), modes.GCM(nonce), backend=default_backend()) 294 | encryptor = cipher.encryptor() 295 | 296 | encrypted_data = encryptor.update(value.encode('utf-8', errors='surrogatepass')) + encryptor.finalize() 297 | return base64.b64encode(iv + nonce + encryptor.tag + encrypted_data).decode('utf-8') 298 | 299 | 300 | def decode(encrypted_value: str, iv: bytes, key: bytes, nonce: bytes) -> str: 301 | encrypted_data = base64.b64decode(encrypted_value) 302 | tag = encrypted_data[len(iv) + len(nonce):len(iv) + len(nonce) + 16] 303 | ciphertext = encrypted_data[len(iv) + len(nonce) + 16:] 304 | 305 | cipher = Cipher(algorithms.AES(key), modes.GCM(nonce, tag), backend=default_backend()) 306 | decryptor = cipher.decryptor() 307 | 308 | decrypted_data = decryptor.update(ciphertext) + decryptor.finalize() 309 | return decrypted_data.decode('utf-8', errors='surrogatepass') 310 | 311 | 312 | def encode_encrypted_string(iv, key, nonce): 313 | def encode_value(value): 314 | return encode(value, iv, key, nonce) 315 | return encode_value 316 | 317 | 318 | def decode_encrypted_string(iv, key, nonce): 319 | def decode_value(value): 320 | return EncryptedString(decode(value, iv, key, nonce)) 321 | return decode_value 322 | 323 | 324 | class TestRedisDictEncryption(unittest.TestCase): 325 | def setUp(self): 326 | self.redis_dict = RedisDict() 327 | 328 | iv = b"0123456789abcdef" # 16 bytes 329 | key = b"0123456789abcdef0123456789abcdef" # 32 bytes (256-bit key) 330 | nonce = b"0123456789abcdef" # 16 bytes 331 | 332 | encode_encrypted = encode_encrypted_string(iv, key, nonce) 333 | decode_encrypted = decode_encrypted_string(iv, key, nonce) 334 | 335 | self.redis_dict.extends_type( 336 | EncryptedString, 337 | encode_encrypted, 338 | decode_encrypted, 339 | ) 340 | 341 | def tearDown(self): 342 | self.redis_dict.clear() 343 | 344 | def helper_get_redis_internal_value(self, key): 345 | sep = ":" 346 | redis_dict = self.redis_dict 347 | 348 | stored_in_redis_as = redis_dict.redis.get(redis_dict._format_key(key)) 349 | internal_result_type, internal_result_value = stored_in_redis_as.split(sep, 1) 350 | return internal_result_type, internal_result_value 351 | 352 | def test_encrypted_string_encoding_and_decoding(self): 353 | """Test adding new type and test if encoding and decoding works.""" 354 | redis_dict = self.redis_dict 355 | 356 | iv = b"0123456789abcdef" # 16 bytes 357 | key = b"0123456789abcdef0123456789abcdef" # 32 bytes (256-bit key) 358 | nonce = b"0123456789abcdef" # 16 bytes 359 | 360 | encode_encrypted = encode_encrypted_string(iv, key, nonce) 361 | decode_encrypted = decode_encrypted_string(iv, key, nonce) 362 | 363 | redis_dict.extends_type(EncryptedString, 364 | encode_encrypted, 365 | decode_encrypted, 366 | ) 367 | key = "foo" 368 | expected_type = EncryptedString.__name__ 369 | expected = "foobar" 370 | encoded_expected = encode_encrypted(expected) 371 | 372 | redis_dict[key] = EncryptedString(expected) 373 | 374 | # Should be stored encrypted 375 | internal_result_type, internal_result_value = self.helper_get_redis_internal_value(key) 376 | self.assertNotEqual(internal_result_value, expected) 377 | 378 | self.assertEqual(internal_result_type, expected_type) 379 | self.assertEqual(internal_result_value, encoded_expected) 380 | 381 | result = redis_dict[key] 382 | 383 | self.assertNotEqual(encoded_expected, expected) 384 | self.assertIsInstance(result, EncryptedString) 385 | self.assertEqual(result, expected) 386 | 387 | def test_encoding_decoding_should_remain_equal(self): 388 | """Test adding new type and test if encoding and decoding results in the same value""" 389 | redis_dict = self.redis_dict 390 | 391 | key = "foo" 392 | key2 = "bar" 393 | expected = "foobar" 394 | 395 | redis_dict[key] = EncryptedString(expected) 396 | 397 | redis_dict[key2] = redis_dict[key] 398 | 399 | result_one = redis_dict[key] 400 | result_two = redis_dict[key2] 401 | 402 | self.assertEqual(result_one, EncryptedString(expected)) 403 | self.assertEqual(result_one, result_two) 404 | 405 | self.assertEqual(result_one, expected) 406 | 407 | def test_values(self): 408 | """Test different values""" 409 | redis_dict = self.redis_dict 410 | expected_internal_type = EncryptedString.__name__ 411 | test_cases = { 412 | "Empty string": "", 413 | "Single space": " ", 414 | "Multiple spaces": " ", 415 | "Various whitespace characters": "\t\n\r", 416 | "Single character": "a", 417 | "Two characters": "ab", 418 | "Three characters": "abc", 419 | "Normal string with punctuation": "Hello, World!", 420 | "Numeric string": "1234567890", 421 | "Special characters": "!@#$%^&*()_+-=[]{}|;:,.<>?", 422 | "Non-ASCII characters": "äöüßÄÖÜ", 423 | "Emoji": "😀🙈🚀", 424 | "Long string (1000 'a' characters)": "a" * 1000, 425 | "Very long text (Lorem ipsum)": "Lorem ipsum dolor sit amet, consectetur adipiscing elit. " * 100, 426 | "JSON-like string": '{"key": "value"}', 427 | "HTML-like string": "Test", 428 | "SQL-like string": "SELECT * FROM users;", 429 | "URL-like string": "https://www.example.com/path?query=value", 430 | "String with null byte": "prefix\0suffix", 431 | "String with low ASCII characters": "\u0000\u0001\u0002\u0003", 432 | "String with high Unicode character (U+FFFF)": "\uFFFF", 433 | "Surrogate pair (Unicode smiley face)": "\uD83D\uDE00", 434 | "Mathematical script letters": "𝔘𝔫𝔦𝔠𝔬𝔡𝔢", 435 | "Chinese characters": "中文字符测试", 436 | "Japanese Hiragana": "こんにちは", 437 | "Korean Hangul": "한글 테스트", 438 | "String with right-to-left override character": "\u202Eexample", 439 | "String with escaped newlines and tabs": "\\n\\t\\r", 440 | "String with double quotes": "\"quoted\"", 441 | "String with single quotes": "'single quotes'", 442 | "String with backslash": "back\\slash", 443 | "Windows file path": "C:\\Program Files\\", 444 | "Unix file path": "/usr/local/bin/", 445 | "Decimal number string": "3.14159", 446 | "Negative decimal number string": "-273.15", 447 | "Scientific notation string": "1e10", 448 | "Not-a-Number (NaN) string": "NaN", 449 | "Infinity string": "Infinity", 450 | "String with byte values (0x00 and 0xFF)": "\x00\xFF", 451 | "Octal number string": "01234567", 452 | "Hexadecimal string": "0xDEADBEEF", 453 | "Null-like string": "null", 454 | "Undefined-like string": "undefined", 455 | "Boolean true string": "true", 456 | "Boolean false string": "false", 457 | "String with Python-like code": "import os\nos.system('echo Hello')", 458 | "String with a print statement": "print('Hello')", 459 | "Very long string (10,000 'a' characters)": "a" * 10000, 460 | } 461 | 462 | for test_num, (test_name, expected) in enumerate(test_cases.items()): 463 | key = f"test_{test_num+1}" 464 | redis_dict[key] = EncryptedString(expected) 465 | result = redis_dict[key] 466 | # Assert result is same as the expected input value 467 | self.assertEqual(result, expected, f"testcase {test_num + 1} failed {test_name}") 468 | 469 | # Assert that the value internally stored in Redis is encoded, and the type is correct. 470 | internal_result_type, internal_result_value = self.helper_get_redis_internal_value(key) 471 | self.assertNotEqual(internal_result_value, expected, f"testcase {test_num+1} failed") 472 | self.assertNotEqual(internal_result_value, expected, f"testcase {test_num+1} failed") 473 | self.assertEqual(internal_result_type, expected_internal_type, f"testcase {test_num+1} failed") 474 | 475 | 476 | 477 | 478 | if __name__ == '__main__': 479 | unittest.main() 480 | -------------------------------------------------------------------------------- /tests/unit/tests_extend_types.py: -------------------------------------------------------------------------------- 1 | import json 2 | import gzip 3 | import time 4 | import base64 5 | import unittest 6 | 7 | from datetime import datetime 8 | 9 | from redis_dict import RedisDict 10 | 11 | 12 | class Customer: 13 | """ 14 | Smallest possible class to used showing the layout of a custom type class. 15 | Methods: 16 | encode 17 | decode 18 | """ 19 | def __init__(self, name, age, address): 20 | self.name = name 21 | self.age = age 22 | self.address = address 23 | 24 | def encode(self) -> str: 25 | return json.dumps(self.__dict__) 26 | 27 | @classmethod 28 | def decode(cls, encoded_str: str) -> 'Customer': 29 | return cls(**json.loads(encoded_str)) 30 | 31 | 32 | class BaseRedisDictTest(unittest.TestCase): 33 | def setUp(self): 34 | self.redis_dict = RedisDict() 35 | self.redis_dict_seperator = ":" 36 | 37 | def tearDown(self): 38 | self.redis_dict.clear() 39 | new_types = [ 40 | 'Customer', 41 | 'GzippedDict', 42 | 'Person', 43 | 'CompressedString', 44 | 'EncryptedStringClassBased', 45 | 'EncryptedRot13String', 46 | 'EncryptedString', 47 | ] 48 | for extend_type in new_types: 49 | self.redis_dict.encoding_registry.pop(extend_type, None) 50 | self.redis_dict.decoding_registry.pop(extend_type, None) 51 | 52 | def helper_get_redis_internal_value(self, key): 53 | sep = self.redis_dict_seperator 54 | redis_dict = self.redis_dict 55 | 56 | stored_in_redis_as = redis_dict.redis.get(redis_dict._format_key(key)) 57 | internal_result_type, internal_result_value = stored_in_redis_as.split(sep, 1) 58 | return internal_result_type, internal_result_value 59 | 60 | 61 | class TestRedisDictExtendTypesDefault(BaseRedisDictTest): 62 | 63 | def test_customer_encoding_and_decoding(self): 64 | """Test adding Customer type and test if encoding and decoding works.""" 65 | redis_dict = self.redis_dict 66 | redis_dict.extends_type(Customer) 67 | key = "customer1" 68 | expected_type = Customer.__name__ 69 | expected_customer = Customer("John Doe1", 31, "1234 Main St") 70 | 71 | redis_dict[key] = expected_customer 72 | 73 | internal_result_type, internal_result_value = self.helper_get_redis_internal_value(key) 74 | 75 | self.assertEqual(internal_result_type, expected_type) 76 | self.assertEqual(json.loads(internal_result_value), expected_customer.__dict__) 77 | 78 | result = redis_dict[key] 79 | 80 | self.assertIsInstance(result, Customer) 81 | self.assertEqual(result.address, expected_customer.address) 82 | 83 | def test_customer_encoding_decoding_should_remain_equal(self): 84 | """Test adding Customer type and test if encoding and decoding results in the same value""" 85 | redis_dict = self.redis_dict 86 | redis_dict.extends_type(Customer) 87 | 88 | key1 = "customer1" 89 | key2 = "customer2" 90 | expected_customer = Customer("Jane Smith1", 27, "4567 Elm St") 91 | 92 | redis_dict[key1] = expected_customer 93 | redis_dict[key2] = redis_dict[key1] 94 | 95 | result_one = redis_dict[key1] 96 | result_two = redis_dict[key2] 97 | self.assertEqual(result_one.name, result_two.name) 98 | 99 | self.assertEqual(result_two.name, expected_customer.name) 100 | self.assertEqual(result_two.age, expected_customer.age) 101 | self.assertEqual(result_two.address, expected_customer.address) 102 | 103 | 104 | class Person: 105 | def __init__(self, name, age, address): 106 | self.name = name 107 | self.age = age 108 | self.address = address 109 | 110 | def __eq__(self, other): 111 | if not isinstance(other, Person): 112 | return False 113 | return self.__dict__ == other.__dict__ 114 | 115 | def __repr__(self): 116 | return f"Person(name='{self.name}', age={self.age}, address='{self.address}')" 117 | 118 | 119 | def person_encode(obj): 120 | return json.dumps(obj.__dict__) 121 | 122 | 123 | def person_decode(json_str): 124 | return Person(**json.loads(json_str)) 125 | 126 | 127 | class TestRedisDictExtendTypesEncodeDecodeFunctionsProvided(BaseRedisDictTest): 128 | 129 | def test_person_encoding_and_decoding(self): 130 | """Test adding Person type and test if encoding and decoding works.""" 131 | redis_dict = self.redis_dict 132 | redis_dict.extends_type(Person, person_encode, person_decode) 133 | key = "person1" 134 | expected_type = Person.__name__ 135 | expected_person = Person("John Doe", 30, "123 Main St") 136 | 137 | redis_dict[key] = expected_person 138 | 139 | internal_result_type, internal_result_value = self.helper_get_redis_internal_value(key) 140 | 141 | self.assertEqual(internal_result_type, expected_type) 142 | self.assertEqual(json.loads(internal_result_value), expected_person.__dict__) 143 | 144 | result = redis_dict[key] 145 | 146 | self.assertIsInstance(result, Person) 147 | self.assertEqual(result, expected_person) 148 | 149 | def test_person_encoding_decoding_should_remain_equal(self): 150 | """Test adding Person type and test if encoding and decoding results in the same value""" 151 | redis_dict = self.redis_dict 152 | redis_dict.extends_type(Person, person_encode, person_decode) 153 | 154 | key1 = "person1" 155 | key2 = "person2" 156 | expected_person = Person("Jane Smith", 25, "456 Elm St") 157 | 158 | redis_dict[key1] = expected_person 159 | redis_dict[key2] = redis_dict[key1] 160 | 161 | result_one = redis_dict[key1] 162 | result_two = redis_dict[key2] 163 | 164 | self.assertEqual(result_one, expected_person) 165 | self.assertEqual(result_one, result_two) 166 | 167 | self.assertEqual(result_two.name, expected_person.name) 168 | self.assertEqual(result_two.age, expected_person.age) 169 | self.assertEqual(result_two.address, expected_person.address) 170 | 171 | 172 | class EncryptedRot13String(str): 173 | """ 174 | A class that behaves exactly like a string but has a distinct type for redis-dict encoding and decoding. 175 | 176 | This class will allow for encoding and decoding with ROT13. Demonstrating the serialization. 177 | https://en.wikipedia.org/wiki/ROT13 178 | 179 | This class inherits from the built-in str class, automatically providing all 180 | string functionality. The only difference is its class name, which allows for 181 | type checking of "EncryptedRot13String" strings. Used to encode and decode for storage. 182 | 183 | Usage: 184 | >>> normal_string = "Hello, World!" 185 | >>> encrypted_string = EncryptedRot13String("Hello, World!") 186 | >>> assert normal_string == encrypted_string 187 | >>> assert type(encrypted_string) == EncryptedRot13String 188 | >>> assert isinstance(encrypted_string, str) 189 | """ 190 | pass 191 | 192 | 193 | def rot13(s): 194 | """ 195 | Applies the ROT13 substitution cipher to the input string. 196 | 197 | ROT13 is a simple letter substitution cipher that replaces a letter with 198 | the 13th letter after it in the alphabet. It's often described as the 199 | "bubble sort" of encryption due to its simplicity and reversibility. 200 | 201 | This implementation is for testing purposes and should not be used 202 | for actual data encryption. 203 | 204 | Example: 205 | >>> rot13("Hello, World!") 206 | "uryyb, jbeyq!" 207 | >>> rot13("uryyb, jbeyq!") 208 | "hello, world!" 209 | 210 | For more information: 211 | https://en.wikipedia.org/wiki/ROT13 212 | """ 213 | return ''.join([chr((ord(c) - 97 + 13) % 26 + 97) if c.isalpha() else c for c in s.lower()]) 214 | 215 | 216 | def rot13_encode(encrypted_rot13_str: EncryptedRot13String): 217 | """ 218 | Example of encoding function for redis-dict extended types. 219 | 220 | Encodes an EncryptedRot13String to encoded string for storage on redis. 221 | 222 | Converts the input to a string using ROT13 encoding. 223 | 224 | Args: 225 | encrypted_rot13_str (Any): The EncryptedRot13String to be encoded. 226 | 227 | Returns: 228 | str: The ROT13 encoded string representation of the input. 229 | 230 | """ 231 | return rot13(encrypted_rot13_str) 232 | 233 | 234 | def rot13_decode(encoded_string: str) -> 'EncryptedRot13String': 235 | """ 236 | Example of decoding function for redis-dict extended types. 237 | 238 | Decodes a ROT13 encoded string back to an EncryptedRot13String object. 239 | 240 | It converts a ROT13 encoded string back to an EncryptedRot13String object. 241 | 242 | Args: 243 | encoded_string (str): The ROT13 encoded string to be decoded. 244 | 245 | Returns: 246 | EncryptedRot13String: An instance of EncryptedRot13String containing the decoded string. 247 | """ 248 | decoded_string = rot13(encoded_string) 249 | return EncryptedRot13String(decoded_string) 250 | 251 | 252 | class TestRedisDictExtendTypesEncodingDecoding(BaseRedisDictTest): 253 | 254 | def test_encrypted_string_encoding_and_decoding(self): 255 | """Test adding new type and test if encoding and decoding works.""" 256 | redis_dict = self.redis_dict 257 | redis_dict.extends_type(EncryptedRot13String, rot13_encode, rot13_decode) 258 | key = "foo" 259 | expected_type = EncryptedRot13String.__name__ 260 | expected = "foobar" 261 | encoded_expected = rot13(expected) 262 | 263 | # Store string that should be encoded using Rot13 264 | redis_dict[key] = EncryptedRot13String(expected) 265 | 266 | # Assert the stored string is correctly stored encoded with Rot13 267 | internal_result_type, internal_result_value = self.helper_get_redis_internal_value(key) 268 | 269 | self.assertNotEqual(internal_result_value, expected) 270 | self.assertEqual(internal_result_type, expected_type) 271 | self.assertEqual(internal_result_value, encoded_expected) 272 | 273 | # Assert the result from getting the value is decoding correctly 274 | result = redis_dict[key] 275 | self.assertNotEqual(encoded_expected, expected) 276 | self.assertIsInstance(result, EncryptedRot13String) 277 | self.assertEqual(result, expected) 278 | 279 | def test_encoding_decoding_should_remain_equal(self): 280 | """Test adding new type and test if encoding and decoding results in the same value""" 281 | redis_dict = RedisDict() 282 | redis_dict.extends_type(EncryptedRot13String, rot13_encode, rot13_decode) 283 | 284 | key = "foo" 285 | key2 = "bar" 286 | expected = "foobar" 287 | 288 | redis_dict[key] = EncryptedRot13String(expected) 289 | 290 | # Decodes the value, And stores the value encoded. Seamless usage of new type. 291 | redis_dict[key2] = redis_dict[key] 292 | 293 | result_one = redis_dict[key] 294 | result_two = redis_dict[key2] 295 | 296 | # Assert the single encoded decoded value is the same as double encoding decoded value. 297 | self.assertEqual(result_one, EncryptedRot13String(expected)) 298 | self.assertEqual(result_one, result_two) 299 | 300 | self.assertEqual(result_two, expected) 301 | 302 | 303 | class TestRedisDictExtendTypesFuncsAndMethods(BaseRedisDictTest): 304 | 305 | def test_datetime_encoding_and_decoding_with_method_names(self): 306 | """Test extending RedisDict with datetime using method names.""" 307 | redis_dict = self.redis_dict 308 | redis_dict.extends_type(datetime, encoding_method_name="isoformat", decoding_method_name="fromisoformat") 309 | key = "now" 310 | expected = datetime(2024, 10, 15, 12, 35, 43, 842438) 311 | expected_type = datetime.__name__ 312 | 313 | # Store datetime 314 | redis_dict[key] = expected 315 | 316 | # Assert the stored datetime is correctly encoded 317 | internal_result_type, internal_result_value = self.helper_get_redis_internal_value(key) 318 | 319 | self.assertEqual(internal_result_type, expected_type) 320 | self.assertEqual(internal_result_value, expected.isoformat()) 321 | 322 | # Assert the result from getting the value is decoding correctly 323 | result = redis_dict[key] 324 | self.assertIsInstance(result, datetime) 325 | self.assertEqual(result, expected) 326 | 327 | def test_datetime_encoding_and_decoding_with_functions(self): 328 | """Test extending RedisDict with datetime using explicit functions.""" 329 | redis_dict = self.redis_dict 330 | redis_dict.extends_type(datetime, datetime.isoformat, datetime.fromisoformat) 331 | key = "now" 332 | expected = datetime(2024, 10, 14, 18, 41, 53, 493775) 333 | expected_type = datetime.__name__ 334 | 335 | # Store datetime 336 | redis_dict[key] = expected 337 | 338 | # Assert the stored datetime is correctly encoded 339 | internal_result_type, internal_result_value = self.helper_get_redis_internal_value(key) 340 | 341 | self.assertEqual(internal_result_type, expected_type) 342 | self.assertEqual(internal_result_value, expected.isoformat()) 343 | 344 | # Assert the result from getting the value is decoding correctly 345 | result = redis_dict[key] 346 | self.assertIsInstance(result, datetime) 347 | self.assertEqual(result, expected) 348 | 349 | # Assert the RedisDict representation 350 | self.assertEqual(str(redis_dict), str({key: expected})) 351 | 352 | 353 | class GzippedDict: 354 | """ 355 | A class that can encode its attributes to a compressed string and decode from a compressed string, 356 | optimized for the fastest possible gzipping. 357 | 358 | Methods: 359 | encode: Compresses and encodes the object's attributes to a base64 string using the fastest settings. 360 | decode: Creates a new object from a compressed and encoded base64 string. 361 | """ 362 | 363 | def __init__(self, name, age, address): 364 | self.name = name 365 | self.age = age 366 | self.address = address 367 | 368 | def encode(self) -> str: 369 | """ 370 | Encodes the object's attributes to a compressed base64 string using the fastest possible settings. 371 | 372 | Returns: 373 | str: A base64 encoded string of the compressed object attributes. 374 | """ 375 | json_data = json.dumps(self.__dict__, separators=(',', ':')) 376 | compressed_data = gzip.compress(json_data.encode('utf-8'), compresslevel=1) 377 | return base64.b64encode(compressed_data).decode('ascii') 378 | 379 | @classmethod 380 | def decode(cls, encoded_str: str) -> 'GzippedDict': 381 | """ 382 | Creates a new object from a compressed and encoded base64 string. 383 | 384 | Args: 385 | encoded_str (str): A base64 encoded string of compressed object attributes. 386 | 387 | Returns: 388 | GzippedDict: A new instance of the class with decoded attributes. 389 | """ 390 | json_data = gzip.decompress(base64.b64decode(encoded_str)).decode('utf-8') 391 | attributes = json.loads(json_data) 392 | return cls(**attributes) 393 | 394 | 395 | class TestRedisDictExtendTypesGzipped(BaseRedisDictTest): 396 | 397 | def test_gzipped_dict_encoding_and_decoding(self): 398 | """Test adding new type and test if encoding and decoding works.""" 399 | self.redis_dict.extends_type(GzippedDict) 400 | 401 | redis_dict = self.redis_dict 402 | key = "person" 403 | expected = GzippedDict("John Doe", 30, "123 Main St, Anytown, USA 12345") 404 | expected_type = GzippedDict.__name__ 405 | 406 | # Store GzippedDict that should be encoded 407 | redis_dict[key] = expected 408 | 409 | # Assert the stored value is correctly encoded 410 | internal_result_type, internal_result_value = self.helper_get_redis_internal_value(key) 411 | 412 | self.assertNotEqual(internal_result_value, expected.__dict__) 413 | self.assertEqual(internal_result_type, expected_type) 414 | self.assertIsInstance(internal_result_value, str) 415 | 416 | # Assert the result from getting the value is decoding correctly 417 | result = redis_dict[key] 418 | self.assertIsInstance(result, GzippedDict) 419 | self.assertDictEqual(result.__dict__, expected.__dict__) 420 | 421 | def test_encoding_decoding_should_remain_equal(self): 422 | """Test adding new type and test if encoding and decoding results in the same value""" 423 | redis_dict = self.redis_dict 424 | self.redis_dict.extends_type(GzippedDict) 425 | 426 | key = "person1" 427 | key2 = "person2" 428 | expected = GzippedDict("Jane Doe", 28, "456 Elm St, Othertown, USA 67890") 429 | 430 | redis_dict[key] = expected 431 | 432 | # Decodes the value, And stores the value encoded. Seamless usage of new type. 433 | redis_dict[key2] = redis_dict[key] 434 | 435 | result_one = redis_dict[key] 436 | result_two = redis_dict[key2] 437 | 438 | # Assert the single encoded decoded value is the same as double encoding decoded value. 439 | self.assertDictEqual(result_one.__dict__, expected.__dict__) 440 | self.assertDictEqual(result_one.__dict__, result_two.__dict__) 441 | self.assertEqual(result_one.name, expected.name) 442 | 443 | 444 | class CompressedString(str): 445 | """ 446 | A string subclass that provides methods for encoding (compressing) and decoding (decompressing) its content. 447 | 448 | Methods: 449 | encode: Compresses the string content and returns a base64 encoded string. 450 | decode: Creates a new CompressedString instance from a compressed and encoded base64 string. 451 | """ 452 | 453 | def compress(self) -> str: 454 | """ 455 | Compresses the string content and returns a base64 encoded string. 456 | 457 | Returns: 458 | str: A base64 encoded string of the compressed content. 459 | """ 460 | compressed_data = gzip.compress(self.encode('utf-8'), compresslevel=1) 461 | return base64.b64encode(compressed_data).decode('ascii') 462 | 463 | @classmethod 464 | def decompress(cls, compressed_str: str) -> 'CompressedString': 465 | """ 466 | Creates a new CompressedString instance from a compressed and encoded base64 string. 467 | 468 | Args: 469 | compressed_str (str): A base64 encoded string of compressed content. 470 | 471 | Returns: 472 | CompressedString: A new instance of the class with decompressed content. 473 | """ 474 | decompressed_data = gzip.decompress(base64.b64decode(compressed_str)).decode('utf-8') 475 | return cls(decompressed_data) 476 | 477 | 478 | class TestRedisDictExtendTypesCompressed(BaseRedisDictTest): 479 | 480 | def test_compressed_string_encoding_and_decoding(self): 481 | """Test adding new type and test if encoding and decoding works.""" 482 | redis_dict = self.redis_dict 483 | redis_dict.extends_type(CompressedString,encoding_method_name='compress', decoding_method_name='decompress') 484 | key = "message" 485 | expected = CompressedString("This is a test message that will be compressed and stored in Redis.") 486 | expected_type = CompressedString.__name__ 487 | 488 | # Store CompressedString that should be encoded 489 | redis_dict[key] = expected 490 | 491 | # Assert the stored value is correctly encoded 492 | internal_result_type, internal_result_value = self.helper_get_redis_internal_value(key) 493 | 494 | self.assertNotEqual(internal_result_value, expected) 495 | self.assertEqual(internal_result_type, expected_type) 496 | self.assertIsInstance(internal_result_value, str) 497 | 498 | # Assert the result from getting the value is decoding correctly 499 | result = redis_dict[key] 500 | self.assertIsInstance(result, CompressedString) 501 | self.assertEqual(result, expected) 502 | 503 | def test_encoding_decoding_should_remain_equal(self): 504 | """Test adding new type and test if encoding and decoding results in the same value""" 505 | redis_dict = self.redis_dict 506 | redis_dict.extends_type(CompressedString, encoding_method_name='compress', decoding_method_name='decompress') 507 | 508 | key = "message1" 509 | key2 = "message2" 510 | expected = CompressedString("Another test message to ensure consistent encoding and decoding.") 511 | 512 | redis_dict[key] = expected 513 | 514 | # Decodes the value, And stores the value encoded. Seamless usage of new type. 515 | redis_dict[key2] = redis_dict[key] 516 | 517 | result_one = redis_dict[key] 518 | result_two = redis_dict[key2] 519 | 520 | # Assert the single encoded decoded value is the same as double encoding decoded value. 521 | self.assertEqual(result_one, expected) 522 | self.assertEqual(result_one, result_two) 523 | self.assertEqual(result_one[:10], expected[:10]) 524 | 525 | def test_compression_size_reduction(self): 526 | """Test that compression significantly reduces the size of stored data""" 527 | redis_dict = self.redis_dict 528 | redis_dict.extends_type(CompressedString, encoding_method_name='compress', decoding_method_name='decompress') 529 | key = "large_message" 530 | 531 | # Create a large string with some repetitive content to ensure good compression 532 | large_string = "This is a test message. " * 1000 + "Some unique content to mix things up." 533 | expected = CompressedString(large_string) 534 | 535 | # Store the large CompressedString 536 | redis_dict[key] = expected 537 | 538 | # Get the internal (compressed) value 539 | internal_result_type, internal_result_value = self.helper_get_redis_internal_value(key) 540 | 541 | # Calculate sizes 542 | original_size = len(large_string) 543 | compressed_size = len(internal_result_value) 544 | 545 | # Print sizes for information (optional) 546 | print(f"Original size: {original_size} bytes") 547 | print(f"Compressed size: {compressed_size} bytes") 548 | print(f"Compression ratio: {compressed_size / original_size:.2f}") 549 | 550 | # Assert that compression achieved significant size reduction 551 | self.assertLess(compressed_size, original_size * 0.5, "Compression should reduce size by at least 50%") 552 | 553 | # Verify that we can still recover the original string 554 | decoded = redis_dict[key] 555 | self.assertEqual(decoded, expected) 556 | self.assertEqual(len(decoded), original_size) 557 | 558 | def test_compression_timing_comparison(self): 559 | """Compare timing of operations between compressed and uncompressed strings""" 560 | redis_dict = self.redis_dict # A new instance for regular strings 561 | 562 | key_compressed = "compressed" 563 | key = "regular" 564 | 565 | # Create a large string with some repetitive content 566 | large_string = "This is a test message. " * 1000 + "Some unique content to mix things up." 567 | compressed_string = CompressedString(large_string) 568 | 569 | # Timing for setting compressed string 570 | start_time = time.time() 571 | redis_dict[key_compressed] = compressed_string 572 | compressed_set_time = time.time() - start_time 573 | 574 | # Timing for setting regular string 575 | start_time = time.time() 576 | redis_dict[key] = large_string 577 | regular_set_time = time.time() - start_time 578 | 579 | # Timing for getting compressed string 580 | start_time = time.time() 581 | _ = redis_dict[key_compressed] 582 | compressed_get_time = time.time() - start_time 583 | 584 | # Timing for getting regular string 585 | start_time = time.time() 586 | _ = redis_dict[key] 587 | regular_get_time = time.time() - start_time 588 | 589 | # Print timing results 590 | print(f"Compressed string set time: {compressed_set_time:.6f} seconds") 591 | print(f"Regular string set time: {regular_set_time:.6f} seconds") 592 | print(f"Compressed string get time: {compressed_get_time:.6f} seconds") 593 | print(f"Regular string get time: {regular_get_time:.6f} seconds") 594 | 595 | 596 | class TestNewTypeComplianceFailures(BaseRedisDictTest): 597 | def test_missing_encode_method(self): 598 | class MissingEncodeMethod: 599 | @classmethod 600 | def decode(cls, value): 601 | pass 602 | 603 | with self.assertRaises(NotImplementedError) as context: 604 | self.redis_dict.new_type_compliance(MissingEncodeMethod, encode_method_name="encode", 605 | decode_method_name="decode") 606 | 607 | self.assertTrue( 608 | "Class MissingEncodeMethod does not implement the required encode method" in str(context.exception)) 609 | 610 | def test_missing_decode_method(self): 611 | class MissingDecodeMethod: 612 | @classmethod 613 | def encode(cls): 614 | pass 615 | 616 | with self.assertRaises(NotImplementedError) as context: 617 | self.redis_dict.new_type_compliance(MissingDecodeMethod, encode_method_name="encode", 618 | decode_method_name="decode") 619 | 620 | self.assertTrue( 621 | "Class MissingDecodeMethod does not implement the required decode class method" in str(context.exception)) 622 | 623 | def test_non_callable_encode_method(self): 624 | class NonCallableEncodeMethod: 625 | encode = "not a method" 626 | 627 | @classmethod 628 | def decode(cls, value): 629 | pass 630 | 631 | with self.assertRaises(NotImplementedError) as context: 632 | self.redis_dict.new_type_compliance(NonCallableEncodeMethod, encode_method_name="encode", 633 | decode_method_name="decode") 634 | 635 | self.assertTrue( 636 | "Class NonCallableEncodeMethod does not implement the required encode method" in str(context.exception)) 637 | 638 | def test_non_callable_decode_method(self): 639 | class NonCallableDecodeMethod: 640 | @classmethod 641 | def encode(cls): 642 | pass 643 | 644 | decode = "not a method" 645 | 646 | with self.assertRaises(NotImplementedError) as context: 647 | self.redis_dict.new_type_compliance(NonCallableDecodeMethod, encode_method_name="encode", 648 | decode_method_name="decode") 649 | 650 | self.assertTrue("Class NonCallableDecodeMethod does not implement the required decode class method" in str( 651 | context.exception)) 652 | 653 | if __name__ == '__main__': 654 | unittest.main() 655 | -------------------------------------------------------------------------------- /src/redis_dict/core.py: -------------------------------------------------------------------------------- 1 | """Redis Dict module.""" 2 | from typing import Any, Dict, Iterator, List, Tuple, Union, Optional, Type 3 | 4 | from datetime import timedelta 5 | from contextlib import contextmanager 6 | from collections.abc import Mapping 7 | 8 | from redis import StrictRedis 9 | 10 | from .type_management import SENTINEL, EncodeFuncType, DecodeFuncType, EncodeType, DecodeType 11 | from .type_management import _create_default_encode, _create_default_decode, _default_decoder 12 | from .type_management import encoding_registry as enc_reg 13 | from .type_management import decoding_registry as dec_reg 14 | 15 | 16 | # pylint: disable=R0902, R0904 17 | class RedisDict: 18 | """Python dictionary with Redis as backend. 19 | 20 | With support for advanced features, such as custom data types, pipelining, and key expiration. 21 | 22 | This class provides a dictionary-like interface that interacts with a Redis database, allowing 23 | for efficient storage and retrieval of key-value pairs. It supports various data types, including 24 | strings, integers, floats, lists, dictionaries, tuples, sets, and user-defined types. The class 25 | leverages the power of Redis pipelining to minimize network round-trip time, latency, and I/O load, 26 | thereby optimizing performance for batch operations. Additionally, it allows for the management of 27 | key expiration through the use of context managers. 28 | 29 | The RedisDict class is designed to be analogous to a standard Python dictionary while providing 30 | enhanced functionality, such as support for a wider range of data types and efficient batch operations. 31 | It aims to offer a seamless and familiar interface for developers familiar with Python dictionaries, 32 | enabling a smooth transition to a Redis-backed data store. 33 | 34 | Extendable Types: You can extend RedisDict by adding or overriding encoding and decoding functions. 35 | This functionality enables various use cases, such as managing encrypted data in Redis, 36 | To implement this, simply create and register your custom encoding and decoding functions. 37 | By delegating serialization to redis-dict, reduce complexity and have simple code in the codebase. 38 | """ 39 | 40 | encoding_registry: EncodeType = enc_reg 41 | decoding_registry: DecodeType = dec_reg 42 | 43 | # pylint: disable=R0913 44 | def __init__(self, 45 | namespace: str = 'main', 46 | expire: Union[int, timedelta, None] = None, 47 | preserve_expiration: Optional[bool] = False, 48 | redis: "Optional[StrictRedis[Any]]" = None, 49 | raise_key_error_delete: bool = False, 50 | **redis_kwargs: Any) -> None: # noqa: D202:R0913 pydocstyle clashes with Sphinx 51 | """ 52 | Initialize a RedisDict instance. 53 | 54 | Init the RedisDict instance. 55 | 56 | Args: 57 | namespace (str): A prefix for keys stored in Redis. 58 | expire (Union[int, timedelta, None], optional): Expiration time for keys. 59 | preserve_expiration (Optional[bool], optional): Preserve expiration on key updates. 60 | redis (Optional[StrictRedis[Any]], optional): A Redis connection instance. 61 | raise_key_error_delete (bool): Enable strict Python dict behavior raise if key not found when deleting. 62 | **redis_kwargs (Any): Additional kwargs for Redis connection if not provided. 63 | """ 64 | 65 | self.namespace: str = namespace 66 | self.expire: Union[int, timedelta, None] = expire 67 | self.preserve_expiration: Optional[bool] = preserve_expiration 68 | self.raise_key_error_delete: bool = raise_key_error_delete 69 | if redis: 70 | redis.connection_pool.connection_kwargs["decode_responses"] = True 71 | 72 | self.redis: StrictRedis[Any] = redis or StrictRedis(decode_responses=True, **redis_kwargs) 73 | self.get_redis: StrictRedis[Any] = self.redis 74 | 75 | self.custom_encode_method = "encode" 76 | self.custom_decode_method = "decode" 77 | 78 | self._iter: Iterator[str] = iter([]) 79 | self._max_string_size: int = 500 * 1024 * 1024 # 500mb 80 | self._temp_redis: Optional[StrictRedis[Any]] = None 81 | self._insertion_order_key = f"redis-dict-insertion-order-{namespace}" 82 | self._batch_size: int = 200 83 | 84 | def _format_key(self, key: str) -> str: 85 | """ 86 | Format a key with the namespace prefix. 87 | 88 | Args: 89 | key (str): The key to be formatted. 90 | 91 | Returns: 92 | str: The formatted key with the namespace prefix. 93 | """ 94 | return f'{self.namespace}:{key}' 95 | 96 | def _parse_key(self, key: str) -> str: 97 | """ 98 | Parse a formatted key with the namespace prefix and type. 99 | 100 | Args: 101 | key (str): The key to be parsed to type. 102 | 103 | Returns: 104 | str: The parsed key 105 | """ 106 | return key[len(self.namespace) + 1:] 107 | 108 | def _valid_input(self, value: Any) -> bool: 109 | """ 110 | Check if the input value is valid based on the specified value type. 111 | 112 | This method ensures that the input value is within the acceptable constraints for the given 113 | value type. For example, when the value type is "str", the method checks that the string 114 | length does not exceed the maximum allowed size (500 MB). 115 | 116 | Args: 117 | value (Any): The input value to be validated. 118 | 119 | Returns: 120 | bool: True if the input value is valid, False otherwise. 121 | """ 122 | store_type = type(value).__name__ 123 | if store_type == "str": 124 | return len(value) < self._max_string_size 125 | return True 126 | 127 | def _format_value(self, value: Any) -> str: 128 | """Format a valid value with the type and encoded representation of the value. 129 | 130 | Args: 131 | value (Any): The value to be encoded and formatted. 132 | 133 | Returns: 134 | str: The formatted value with the type and encoded representation of the value. 135 | """ 136 | store_type = type(value).__name__ 137 | encoded_value = self.encoding_registry.get(store_type, lambda x: x)(value) # type: ignore 138 | return f'{store_type}:{encoded_value}' 139 | 140 | def _store_set(self, formatted_key: str, formatted_value: str) -> None: 141 | if self.preserve_expiration and self.get_redis.exists(formatted_key): 142 | self.redis.set(formatted_key, formatted_value, keepttl=True) 143 | else: 144 | self.redis.set(formatted_key, formatted_value, ex=self.expire) 145 | 146 | def _store(self, key: str, value: Any) -> None: 147 | """ 148 | Store a value in Redis with the given key. 149 | 150 | Args: 151 | key (str): The key to store the value. 152 | value (Any): The value to be stored. 153 | 154 | Raises: 155 | ValueError: If the value or key fail validation. 156 | 157 | Note: Validity checks could be refactored to allow for custom exceptions that inherit from ValueError, 158 | providing detailed information about why a specific validation failed. 159 | This would enable users to specify which validity checks should be executed, add custom validity functions, 160 | and choose whether to fail on validation errors, or drop the data and only issue a warning and continue. 161 | Example use case is caching, to cache data only when it's between min and max sizes. 162 | Allowing for simple dict set operation, but only cache data that makes sense. 163 | 164 | """ 165 | if not self._valid_input(value) or not self._valid_input(key): 166 | raise ValueError("Invalid input value or key size exceeded the maximum limit.") 167 | 168 | formatted_key = self._format_key(key) 169 | formatted_value = self._format_value(value) 170 | 171 | self._store_set(formatted_key, formatted_value) 172 | 173 | def _load(self, key: str) -> Tuple[bool, Any]: 174 | """ 175 | Load a value from Redis with the given key. 176 | 177 | Args: 178 | key (str): The key to retrieve the value. 179 | 180 | Returns: 181 | tuple: A tuple containing a boolean indicating whether the value was found and the value itself. 182 | """ 183 | result = self.get_redis.get(self._format_key(key)) 184 | if result is None: 185 | return False, None 186 | return True, self._transform(result) 187 | 188 | def _transform(self, result: str) -> Any: 189 | """ 190 | Transform the result string from Redis into the appropriate Python object. 191 | 192 | Args: 193 | result (str): The result string from Redis. 194 | 195 | Returns: 196 | Any: The transformed Python object. 197 | """ 198 | type_, value = result.split(':', 1) 199 | return self.decoding_registry.get(type_, _default_decoder)(value) 200 | 201 | def new_type_compliance( 202 | self, 203 | class_type: type, 204 | encode_method_name: Optional[str] = None, 205 | decode_method_name: Optional[str] = None, 206 | ) -> None: 207 | """Check if a class complies with the required encoding and decoding methods. 208 | 209 | Args: 210 | class_type (type): The class to check for compliance. 211 | encode_method_name (str, optional): Name of encoding method of the class for redis-dict custom types. 212 | decode_method_name (str, optional): Name of decoding method of the class for redis-dict custom types. 213 | 214 | Raises: 215 | NotImplementedError: If the class does not implement the required methods when the respective check is True. 216 | """ 217 | if encode_method_name is not None: 218 | if not (hasattr(class_type, encode_method_name) and callable( 219 | getattr(class_type, encode_method_name))): 220 | raise NotImplementedError( 221 | f"Class {class_type.__name__} does not implement the required {encode_method_name} method.") 222 | 223 | if decode_method_name is not None: 224 | if not (hasattr(class_type, decode_method_name) and callable( 225 | getattr(class_type, decode_method_name))): 226 | raise NotImplementedError( 227 | f"Class {class_type.__name__} does not implement the required {decode_method_name} class method.") 228 | 229 | # pylint: disable=too-many-arguments 230 | def extends_type( 231 | self, 232 | class_type: type, 233 | encode: Optional[EncodeFuncType] = None, 234 | decode: Optional[DecodeFuncType] = None, 235 | encoding_method_name: Optional[str] = None, 236 | decoding_method_name: Optional[str] = None, 237 | ) -> None: # noqa: D202 pydocstyle clashes with Sphinx 238 | """ 239 | Extend RedisDict to support a custom type in the encode/decode mapping. 240 | 241 | This method enables serialization of instances based on their type, 242 | allowing for custom types, specialized storage formats, and more. 243 | There are three ways to add custom types: 244 | 1. Have a class with an `encode` instance method and a `decode` class method. 245 | 2. Have a class and pass encoding and decoding functions, where 246 | `encode` converts the class instance to a string, and 247 | `decode` takes the string and recreates the class instance. 248 | 3. Have a class that already has serialization methods, that satisfies the: 249 | EncodeFuncType = Callable[[Any], str] 250 | DecodeFuncType = Callable[[str], Any] 251 | 252 | `custom_encode_method` 253 | `custom_decode_method` 254 | 255 | If no encoding or decoding function is provided, default to use the `encode` and `decode` methods of the class. 256 | 257 | The `encode` method should be an instance method that converts the object to a string. 258 | The `decode` method should be a class method that takes a string and returns an instance of the class. 259 | 260 | The method names for encoding and decoding can be changed by modifying the 261 | - `custom_encode_method` 262 | - `custom_decode_method` 263 | attributes of the RedisDict instance 264 | 265 | Example: 266 | >>> class Person: 267 | ... def __init__(self, name, age): 268 | ... self.name = name 269 | ... self.age = age 270 | ... 271 | ... def encode(self) -> str: 272 | ... return json.dumps(self.__dict__) 273 | ... 274 | ... @classmethod 275 | ... def decode(cls, encoded_str: str) -> 'Person': 276 | ... return cls(**json.loads(encoded_str)) 277 | ... 278 | >>> redis_dict.extends_type(Person) 279 | 280 | Args: 281 | class_type (type): The class `__name__` will become the key for the encoding and decoding functions. 282 | encode (Optional[EncodeFuncType]): function that encodes an object into a storable string format. 283 | decode (Optional[DecodeFuncType]): function that decodes a string back into an object of `class_type`. 284 | encoding_method_name (str, optional): Name of encoding method of the class for redis-dict custom types. 285 | decoding_method_name (str, optional): Name of decoding method of the class for redis-dict custom types. 286 | 287 | Raises: 288 | NotImplementedError 289 | 290 | Note: 291 | You can check for compliance of a class separately using the `new_type_compliance` method: 292 | 293 | This method raises a NotImplementedError if either `encode` or `decode` is `None` 294 | and the class does not implement the corresponding method. 295 | """ 296 | 297 | if encode is None or decode is None: 298 | encode_method_name = encoding_method_name or self.custom_encode_method 299 | if encode is None: 300 | self.new_type_compliance(class_type, encode_method_name=encode_method_name) 301 | encode = _create_default_encode(encode_method_name) 302 | 303 | if decode is None: 304 | decode_method_name = decoding_method_name or self.custom_decode_method 305 | self.new_type_compliance(class_type, decode_method_name=decode_method_name) 306 | decode = _create_default_decode(class_type, decode_method_name) 307 | 308 | type_name = class_type.__name__ 309 | self.decoding_registry[type_name] = decode 310 | self.encoding_registry[type_name] = encode 311 | 312 | def __eq__(self, other: Any) -> bool: 313 | """ 314 | Compare the current RedisDict with another object. 315 | 316 | Args: 317 | other (Any): The object to compare with. 318 | 319 | Returns: 320 | bool: True if equal, False otherwise 321 | """ 322 | if len(self) != len(other): 323 | return False 324 | for key, value in self.items(): 325 | if value != other.get(key, SENTINEL): 326 | return False 327 | return True 328 | 329 | def __ne__(self, other: Any) -> bool: 330 | """ 331 | Compare the current RedisDict with another object. 332 | 333 | Args: 334 | other (Any): The object to compare with. 335 | 336 | Returns: 337 | bool: False if equal, True otherwise 338 | """ 339 | return not self.__eq__(other) 340 | 341 | def __getitem__(self, item: str) -> Any: 342 | """ 343 | Get the value associated with the given key, analogous to a dictionary. 344 | 345 | Args: 346 | item (str): The key to retrieve the value. 347 | 348 | Returns: 349 | Any: The value associated with the key. 350 | 351 | Raises: 352 | KeyError: If the key is not found. 353 | """ 354 | found, value = self._load(item) 355 | if not found: 356 | raise KeyError(item) 357 | return value 358 | 359 | def __setitem__(self, key: str, value: Any) -> None: 360 | """ 361 | Set the value associated with the given key, analogous to a dictionary. 362 | 363 | Args: 364 | key (str): The key to store the value. 365 | value (Any): The value to be stored. 366 | """ 367 | self._store(key, value) 368 | 369 | def __delitem__(self, key: str) -> None: 370 | """ 371 | Delete the value associated with the given key, analogous to a dictionary. 372 | 373 | For distributed systems, we intentionally don't raise KeyError when the key doesn't exist. 374 | This ensures identical code running across different systems won't randomly fail 375 | when another system already achieved the deletion goal (key not existing). 376 | 377 | Warning: 378 | Setting dict_compliant=True will raise KeyError when key doesn't exist. 379 | This is not recommended for distributed systems as it can cause KeyErrors 380 | that are hard to debug when multiple systems interact with the same keys. 381 | 382 | Args: 383 | key (str): The key to delete 384 | 385 | Raises: 386 | KeyError: Only if dict_compliant=True and key doesn't exist 387 | """ 388 | formatted_key = self._format_key(key) 389 | result = self.redis.delete(formatted_key) 390 | if self.raise_key_error_delete and not result: 391 | raise KeyError(key) 392 | 393 | def __contains__(self, key: str) -> bool: 394 | """ 395 | Check if the given key exists in the RedisDict, analogous to a dictionary. 396 | 397 | Args: 398 | key (str): The key to check for existence. 399 | 400 | Returns: 401 | bool: True if the key exists, False otherwise. 402 | """ 403 | return self._load(key)[0] 404 | 405 | def __len__(self) -> int: 406 | """ 407 | Get the number of items in the RedisDict, analogous to a dictionary. 408 | 409 | Returns: 410 | int: The number of items in the RedisDict. 411 | """ 412 | return sum(1 for _ in self._scan_keys(full_scan=True)) 413 | 414 | def __iter__(self) -> Iterator[str]: 415 | """ 416 | Return an iterator over the keys of the RedisDict, analogous to a dictionary. 417 | 418 | Returns: 419 | Iterator[str]: An iterator over the keys of the RedisDict. 420 | """ 421 | self._iter = self.keys() 422 | return self 423 | 424 | def __repr__(self) -> str: 425 | """ 426 | Create a string representation of the RedisDict. 427 | 428 | Returns: 429 | str: A string representation of the RedisDict. 430 | """ 431 | return str(self) 432 | 433 | def __str__(self) -> str: 434 | """ 435 | Create a string representation of the RedisDict. 436 | 437 | Returns: 438 | str: A string representation of the RedisDict. 439 | """ 440 | return str(self.to_dict()) 441 | 442 | def __or__(self, other: Dict[str, Any]) -> Dict[str, Any]: 443 | """Implement the | operator (dict union). 444 | 445 | Returns a new dictionary with items from both dictionaries. 446 | 447 | Args: 448 | other (Dict[str, Any]): The dictionary to merge with. 449 | 450 | Raises: 451 | TypeError: If other does not adhere to Mapping. 452 | 453 | Returns: 454 | Dict[str, Any]: A new dictionary containing items from both dictionaries. 455 | """ 456 | if not isinstance(other, Mapping): 457 | raise TypeError(f"unsupported operand type(s) for |: '{type(other).__name__}' and 'RedisDict'") 458 | 459 | result = {} 460 | result.update(self.to_dict()) 461 | result.update(other) 462 | return result 463 | 464 | def __ror__(self, other: Dict[str, Any]) -> Dict[str, Any]: 465 | """ 466 | Implement the reverse | operator. 467 | 468 | Called when RedisDict is on the right side of |. 469 | 470 | Args: 471 | other (Dict[str, Any]): The dictionary to merge with. 472 | 473 | Raises: 474 | TypeError: If other does not adhere to Mapping. 475 | 476 | Returns: 477 | Dict[str, Any]: A new dictionary containing items from both dictionaries. 478 | """ 479 | if not isinstance(other, Mapping): 480 | raise TypeError(f"unsupported operand type(s) for |: 'RedisDict' and '{type(other).__name__}'") 481 | 482 | result = {} 483 | result.update(other) 484 | result.update(self.to_dict()) 485 | return result 486 | 487 | def __ior__(self, other: Dict[str, Any]) -> 'RedisDict': 488 | """ 489 | Implement the |= operator (in-place union). 490 | 491 | Modifies the current dictionary by adding items from other. 492 | 493 | Args: 494 | other (Dict[str, Any]): The dictionary to merge with. 495 | 496 | Raises: 497 | TypeError: If other does not adhere to Mapping. 498 | 499 | Returns: 500 | RedisDict: The modified RedisDict instance. 501 | """ 502 | if not isinstance(other, Mapping): 503 | raise TypeError(f"unsupported operand type(s) for |: '{type(other).__name__}' and 'RedisDict'") 504 | 505 | self.update(other) 506 | return self 507 | 508 | @classmethod 509 | def __class_getitem__(cls: Type['RedisDict'], _key: Any) -> Type['RedisDict']: 510 | """ 511 | Enable type hinting support like RedisDict[str, Any]. 512 | 513 | Args: 514 | _key (Any): The type parameter(s) used in the type hint. 515 | 516 | Returns: 517 | Type[RedisDict]: The class itself, enabling type hint usage. 518 | """ 519 | return cls 520 | 521 | def __reversed__(self) -> Iterator[str]: 522 | """ 523 | Implement reversed() built-in. 524 | 525 | Returns an iterator over dictionary keys in reverse insertion order. 526 | 527 | Warning: 528 | RedisDict Currently does not support 'insertion order' as property thus also not reversed. 529 | 530 | Returns: 531 | Iterator[str]: An iterator yielding the dictionary keys in reverse order. 532 | """ 533 | return reversed(list(self.keys())) 534 | 535 | def __next__(self) -> str: 536 | """ 537 | Get the next item in the iterator. 538 | 539 | Returns: 540 | str: The next item in the iterator. 541 | """ 542 | return next(self._iter) 543 | 544 | def next(self) -> str: 545 | """ 546 | Get the next item in the iterator (alias for __next__). 547 | 548 | Returns: 549 | str: The next item in the iterator. 550 | 551 | """ 552 | return next(self) 553 | 554 | def _create_iter_query(self, search_term: str) -> str: 555 | """ 556 | Create a Redis query string for iterating over keys based on the given search term. 557 | 558 | This method constructs a query by prefixing the search term with the namespace 559 | followed by a wildcard to facilitate scanning for keys that start with the 560 | provided search term. 561 | 562 | Args: 563 | search_term (str): The term to search for in Redis keys. 564 | 565 | Returns: 566 | str: A formatted query string that can be used to find keys in Redis. 567 | 568 | Example: 569 | >>> d = RedisDict(namespace='foo') 570 | >>> query = self._create_iter_query('bar') 571 | >>> print(query) 572 | 'foo:bar*' 573 | """ 574 | return f'{self.namespace}:{search_term}*' 575 | 576 | def _scan_keys(self, search_term: str = '', full_scan: bool = False) -> Iterator[str]: 577 | """Scan for Redis keys matching the given search term. 578 | 579 | Args: 580 | search_term (str): A search term to filter keys. Defaults to ''. 581 | full_scan (bool): During full scan uses batches of self._batch_size by default 200 582 | 583 | Returns: 584 | Iterator[str]: An iterator of matching Redis keys. 585 | """ 586 | search_query = self._create_iter_query(search_term) 587 | count = None if full_scan else self._batch_size 588 | return self.get_redis.scan_iter(match=search_query, count=count) 589 | 590 | def get(self, key: str, default: Optional[Any] = None) -> Any: 591 | """Return the value for the given key if it exists, otherwise return the default value. 592 | 593 | Analogous to a dictionary's get method. 594 | 595 | Args: 596 | key (str): The key to retrieve the value. 597 | default (Optional[Any], optional): The value to return if the key is not found. 598 | 599 | Returns: 600 | Any: The value associated with the key or the default value. 601 | """ 602 | found, item = self._load(key) 603 | if not found: 604 | return default 605 | return item 606 | 607 | def keys(self) -> Iterator[str]: 608 | """Return an Iterator of keys in the RedisDict, analogous to a dictionary's keys method. 609 | 610 | Returns: 611 | Iterator[str]: A list of keys in the RedisDict. 612 | """ 613 | to_rm = len(self.namespace) + 1 614 | return (str(item[to_rm:]) for item in self._scan_keys()) 615 | 616 | def key(self, search_term: str = '') -> Optional[str]: 617 | """Return the first value for search_term if it exists, otherwise return None. 618 | 619 | Args: 620 | search_term (str): A search term to filter keys. Defaults to ''. 621 | 622 | Returns: 623 | str: The first key associated with the given search term. 624 | """ 625 | to_rm = len(self.namespace) + 1 626 | search_query = self._create_iter_query(search_term) 627 | _, data = self.get_redis.scan(match=search_query, count=1) 628 | for item in data: 629 | return str(item[to_rm:]) 630 | 631 | return None 632 | 633 | def items(self) -> Iterator[Tuple[str, Any]]: 634 | """Return a list of key-value pairs (tuples) in the RedisDict, analogous to a dictionary's items method. 635 | 636 | Yields: 637 | Iterator[Tuple[str, Any]]: A list of key-value pairs in the RedisDict. 638 | """ 639 | to_rm = len(self.namespace) + 1 640 | for item in self._scan_keys(): 641 | try: 642 | yield str(item[to_rm:]), self[item[to_rm:]] 643 | except KeyError: 644 | pass 645 | 646 | def values(self) -> Iterator[Any]: 647 | """Analogous to a dictionary's values method. 648 | 649 | Return a list of values in the RedisDict, 650 | 651 | Yields: 652 | List[Any]: A list of values in the RedisDict. 653 | """ 654 | to_rm = len(self.namespace) + 1 655 | for item in self._scan_keys(): 656 | try: 657 | yield self[item[to_rm:]] 658 | except KeyError: 659 | pass 660 | 661 | def to_dict(self) -> Dict[str, Any]: 662 | """Convert the RedisDict to a Python dictionary. 663 | 664 | Returns: 665 | Dict[str, Any]: A dictionary with the same key-value pairs as the RedisDict. 666 | """ 667 | return dict(self.items()) 668 | 669 | def clear(self) -> None: 670 | """Remove all key-value pairs from the RedisDict in one batch operation using pipelining. 671 | 672 | This method mimics the behavior of the `clear` method from a standard Python dictionary. 673 | Redis pipelining is employed to group multiple commands into a single request, minimizing 674 | network round-trip time, latency, and I/O load, thereby enhancing the overall performance. 675 | 676 | """ 677 | with self.pipeline(): 678 | for key in self._scan_keys(full_scan=True): 679 | self.redis.delete(key) 680 | 681 | def _pop(self, formatted_key: str) -> Any: 682 | """ 683 | Remove the value associated with the given key and return it. 684 | 685 | Or return the default value if the key is not found. 686 | 687 | Args: 688 | formatted_key (str): The formatted key to remove the value. 689 | 690 | Returns: 691 | Any: The value associated with the key or the default value. 692 | """ 693 | return self.get_redis.execute_command("GETDEL", formatted_key) 694 | 695 | def pop(self, key: str, default: Union[Any, object] = SENTINEL) -> Any: 696 | """Analogous to a dictionary's pop method. 697 | 698 | Remove the value associated with the given key and return it, or return the default value 699 | if the key is not found. 700 | 701 | Args: 702 | key (str): The key to remove the value. 703 | default (Optional[Any], optional): The value to return if the key is not found. 704 | 705 | Returns: 706 | Any: The value associated with the key or the default value. 707 | 708 | Raises: 709 | KeyError: If the key is not found and no default value is provided. 710 | """ 711 | formatted_key = self._format_key(key) 712 | value = self._pop(formatted_key) 713 | if value is None: 714 | if default is not SENTINEL: 715 | return default 716 | raise KeyError(formatted_key) 717 | return self._transform(value) 718 | 719 | def popitem(self) -> Tuple[str, Any]: 720 | """Remove and return a random (key, value) pair from the RedisDict as a tuple. 721 | 722 | This method is analogous to the `popitem` method of a standard Python dictionary. 723 | 724 | if dict_compliant set true stays true to In Python 3.7+, removes the last inserted item (LIFO order) 725 | 726 | Returns: 727 | tuple: A tuple containing a randomly chosen (key, value) pair. 728 | 729 | Raises: 730 | KeyError: If RedisDict is empty. 731 | """ 732 | while True: 733 | key = self.key() 734 | if key is None: 735 | raise KeyError("popitem(): dictionary is empty") 736 | try: 737 | return key, self.pop(key) 738 | except KeyError: 739 | continue 740 | 741 | def _create_set_get_command(self, formatted_key: str, formatted_value: str) -> Tuple[List[str], Dict[str, bool]]: 742 | """Create SET command arguments and options for Redis. For setdefault operation. 743 | 744 | Args: 745 | formatted_key (str): The formatted Redis key. 746 | formatted_value (str): The formatted value to be set. 747 | 748 | Returns: 749 | Tuple[List[str], Dict[str, bool]]: A tuple containing the command arguments and options. 750 | """ 751 | # Setting {"get": True} enables parsing of the redis result as "GET", instead of "SET" command 752 | options = {"get": True} 753 | args = ["SET", formatted_key, formatted_value, "NX", "GET"] 754 | if self.preserve_expiration: 755 | args.append("KEEPTTL") 756 | elif self.expire is not None: 757 | expire_val = int(self.expire.total_seconds()) if isinstance(self.expire, timedelta) else self.expire 758 | expire_str = str(1) if expire_val <= 1 else str(expire_val) 759 | args.extend(["EX", expire_str]) 760 | return args, options 761 | 762 | def setdefault(self, key: str, default_value: Optional[Any] = None) -> Any: 763 | """Get value under key, and if not present set default value. 764 | 765 | Return the value associated with the given key if it exists, otherwise set the value to the 766 | default value and return it. Analogous to a dictionary's setdefault method. 767 | 768 | Args: 769 | key (str): The key to retrieve the value. 770 | default_value (Optional[Any], optional): The value to set if the key is not found. 771 | 772 | Returns: 773 | Any: The value associated with the key or the default value. 774 | """ 775 | formatted_key = self._format_key(key) 776 | formatted_value = self._format_value(default_value) 777 | 778 | args, options = self._create_set_get_command(formatted_key, formatted_value) 779 | result = self.get_redis.execute_command(*args, **options) 780 | 781 | if result is None: 782 | return default_value 783 | 784 | return self._transform(result) 785 | 786 | def copy(self) -> Dict[str, Any]: 787 | """Create a shallow copy of the RedisDict and return it as a standard Python dictionary. 788 | 789 | This method is analogous to the `copy` method of a standard Python dictionary 790 | 791 | Returns: 792 | dict: A shallow copy of the RedisDict as a standard Python dictionary. 793 | 794 | Note: 795 | does not create a new RedisDict instance. 796 | """ 797 | return self.to_dict() 798 | 799 | def update(self, dic: Dict[str, Any]) -> None: 800 | """ 801 | Update the RedisDict with key-value pairs from the given mapping, analogous to a dictionary's update method. 802 | 803 | Args: 804 | dic (Mapping[str, Any]): A mapping containing key-value pairs to update the RedisDict. 805 | """ 806 | with self.pipeline(): 807 | for key, value in dic.items(): 808 | self[key] = value 809 | 810 | def fromkeys(self, iterable: List[str], value: Optional[Any] = None) -> 'RedisDict': 811 | """Create a new RedisDict from an iterable of key-value pairs. 812 | 813 | Create a new RedisDict with keys from the provided iterable and values set to the given value. 814 | This method is analogous to the `fromkeys` method of a standard Python dictionary, populating 815 | the RedisDict with the keys from the iterable and setting their corresponding values to the 816 | specified value. 817 | 818 | 819 | Args: 820 | iterable (List[str]): An iterable containing the keys to be added to the RedisDict. 821 | value (Optional[Any], optional): The value to be assigned to each key in the RedisDict. Defaults to None. 822 | 823 | Returns: 824 | RedisDict: The current RedisDict instance,populated with the keys from the iterable and their 825 | corresponding values. 826 | """ 827 | for key in iterable: 828 | self[key] = value 829 | return self 830 | 831 | def __sizeof__(self) -> int: 832 | """Return the approximate size of the RedisDict in memory, in bytes. 833 | 834 | This method is analogous to the `__sizeof__` method of a standard Python dictionary, estimating 835 | the memory consumption of the RedisDict based on the serialized in-memory representation. 836 | Should be changed to redis view of the size. 837 | 838 | Returns: 839 | int: The approximate size of the RedisDict in memory, in bytes. 840 | """ 841 | return self.to_dict().__sizeof__() 842 | 843 | def chain_set(self, iterable: List[str], v: Any) -> None: 844 | """ 845 | Set a value in the RedisDict using a chain of keys. 846 | 847 | Args: 848 | iterable (List[str]): A list of keys representing the chain. 849 | v (Any): The value to be set. 850 | """ 851 | self[':'.join(iterable)] = v 852 | 853 | def chain_get(self, iterable: List[str]) -> Any: 854 | """ 855 | Get a value from the RedisDict using a chain of keys. 856 | 857 | Args: 858 | iterable (List[str]): A list of keys representing the chain. 859 | 860 | Returns: 861 | Any: The value associated with the chain of keys. 862 | """ 863 | return self[':'.join(iterable)] 864 | 865 | def chain_del(self, iterable: List[str]) -> None: 866 | """ 867 | Delete a value from the RedisDict using a chain of keys. 868 | 869 | Args: 870 | iterable (List[str]): A list of keys representing the chain. 871 | """ 872 | del self[':'.join(iterable)] 873 | 874 | # def expire_at(self, sec_epoch: int | timedelta) -> Iterator[None]: 875 | # compatibility with Python 3.9 typing 876 | @contextmanager 877 | def expire_at(self, sec_epoch: Union[int, timedelta]) -> Iterator[None]: 878 | """Context manager to set the expiration time for keys in the RedisDict. 879 | 880 | Args: 881 | sec_epoch (int, timedelta): The expiration duration is set using either an integer or a timedelta. 882 | 883 | Yields: 884 | ContextManager: A context manager during which the expiration time is the time set. 885 | """ 886 | self.expire, temp = sec_epoch, self.expire 887 | yield 888 | self.expire = temp 889 | 890 | @contextmanager 891 | def pipeline(self) -> Iterator[None]: 892 | """ 893 | Context manager to create a Redis pipeline for batch operations. 894 | 895 | Yields: 896 | ContextManager: A context manager to create a Redis pipeline batching all operations within the context. 897 | """ 898 | top_level = False 899 | if self._temp_redis is None: 900 | self.redis, self._temp_redis, top_level = self.redis.pipeline(), self.redis, True 901 | try: 902 | yield 903 | finally: 904 | if top_level: 905 | _, self._temp_redis, self.redis = self.redis.execute(), None, self._temp_redis # type: ignore 906 | 907 | def multi_get(self, key: str) -> List[Any]: 908 | """ 909 | Get multiple values from the RedisDict using a shared key prefix. 910 | 911 | Args: 912 | key (str): The shared key prefix. 913 | 914 | Returns: 915 | List[Any]: A list of values associated with the key prefix. 916 | """ 917 | found_keys = list(self._scan_keys(key)) 918 | if len(found_keys) == 0: 919 | return [] 920 | return [self._transform(i) for i in self.redis.mget(found_keys) if i is not None] 921 | 922 | def multi_chain_get(self, keys: List[str]) -> List[Any]: 923 | """ 924 | Get multiple values from the RedisDict using a chain of keys. 925 | 926 | Args: 927 | keys (List[str]): A list of keys representing the chain. 928 | 929 | Returns: 930 | List[Any]: A list of values associated with the chain of keys. 931 | """ 932 | return self.multi_get(':'.join(keys)) 933 | 934 | def multi_dict(self, key: str) -> Dict[str, Any]: 935 | """ 936 | Get a dictionary of key-value pairs from the RedisDict using a shared key prefix. 937 | 938 | Args: 939 | key (str): The shared key prefix. 940 | 941 | Returns: 942 | Dict[str, Any]: A dictionary of key-value pairs associated with the key prefix. 943 | """ 944 | keys = list(self._scan_keys(key)) 945 | if len(keys) == 0: 946 | return {} 947 | to_rm = keys[0].rfind(':') + 1 948 | return dict( 949 | zip([i[to_rm:] for i in keys], (self._transform(i) for i in self.redis.mget(keys) if i is not None)) 950 | ) 951 | 952 | def multi_del(self, key: str) -> int: 953 | """ 954 | Delete multiple values from the RedisDict using a shared key prefix. 955 | 956 | Args: 957 | key (str): The shared key prefix. 958 | 959 | Returns: 960 | int: The number of keys deleted. 961 | """ 962 | keys = list(self._scan_keys(key)) 963 | if len(keys) == 0: 964 | return 0 965 | return self.redis.delete(*keys) 966 | 967 | def get_redis_info(self) -> Dict[str, Any]: 968 | """ 969 | Retrieve information and statistics about the Redis server. 970 | 971 | Returns: 972 | dict: The information and statistics from the Redis server in a dictionary. 973 | """ 974 | return dict(self.redis.info()) 975 | 976 | def get_ttl(self, key: str) -> Optional[int]: 977 | """Get the Time To Live from Redis. 978 | 979 | Get the Time To Live (TTL) in seconds for a given key. If the key does not exist or does not have an 980 | associated `expire`, return None. 981 | 982 | Args: 983 | key (str): The key for which to get the TTL. 984 | 985 | Returns: 986 | Optional[int]: The TTL in seconds if the key exists and has an expiry set; otherwise, None. 987 | """ 988 | val = self.redis.ttl(self._format_key(key)) 989 | if val < 0: 990 | return None 991 | return val 992 | --------------------------------------------------------------------------------