├── tests ├── types │ ├── __init__.py │ ├── helpers.py │ ├── test_base.py │ ├── test_rediscache.py │ └── test_queue.py ├── __init__.py └── test_session.py ├── async_rediscache ├── py.typed ├── redis_scripts │ ├── __init__.py │ ├── rediscache_pop.lua │ ├── redisqueue_reschedule_task.lua │ ├── redisqueue_reschedule_all_client_tasks.lua │ └── rediscache_increment.lua ├── types │ ├── __init__.py │ ├── cache.py │ ├── queue.py │ └── base.py ├── __init__.py └── session.py ├── .gitattributes ├── .gitignore ├── MANIFEST.in ├── pyproject.toml ├── tox.ini ├── Pipfile ├── .github └── workflows │ ├── release.yaml │ ├── test_release.yaml │ └── lint_test.yaml ├── setup.py ├── README.md └── Pipfile.lock /tests/types/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /async_rediscache/py.typed: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | * text=auto 2 | -------------------------------------------------------------------------------- /async_rediscache/redis_scripts/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .git 2 | .idea 3 | build/ 4 | dist/ 5 | *.egg-info 6 | .coverage 7 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | 4 | logging.disable(logging.CRITICAL) 5 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | recursive-exclude tests * 2 | recursive-include async_rediscache/redis_scripts *.lua 3 | -------------------------------------------------------------------------------- /async_rediscache/types/__init__.py: -------------------------------------------------------------------------------- 1 | from .base import * 2 | from .cache import * 3 | from .queue import * 4 | -------------------------------------------------------------------------------- /async_rediscache/__init__.py: -------------------------------------------------------------------------------- 1 | """Make package-level imports possible by importing the __all__ names from modules.""" 2 | 3 | from .session import * 4 | from .types import * 5 | -------------------------------------------------------------------------------- /async_rediscache/redis_scripts/rediscache_pop.lua: -------------------------------------------------------------------------------- 1 | local popped_value = redis.call('HGET', KEYS[1], KEYS[2]) 2 | redis.call('HDEL', KEYS[1], KEYS[2]) 3 | return popped_value 4 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["setuptools >= 40.6.0", "wheel"] 3 | build-backend = "setuptools.build_meta" 4 | 5 | [tool.coverage.run] 6 | branch = true 7 | source = [ 8 | "async_rediscache", 9 | "tests", 10 | ] 11 | -------------------------------------------------------------------------------- /async_rediscache/redis_scripts/redisqueue_reschedule_task.lua: -------------------------------------------------------------------------------- 1 | local number_rescheduled = redis.call("LREM", KEYS[2], 1, ARGV[1]) 2 | if number_rescheduled == 0 then 3 | return redis.error_reply("Task not found in pending tasks queue.") 4 | end 5 | 6 | redis.call("LPUSH", KEYS[1], ARGV[1]) 7 | -------------------------------------------------------------------------------- /async_rediscache/redis_scripts/redisqueue_reschedule_all_client_tasks.lua: -------------------------------------------------------------------------------- 1 | local queue_size = redis.call("LLEN", KEYS[2]) 2 | local client_tasks = redis.call("LRANGE", KEYS[2], 0, queue_size) 3 | 4 | -- For Lua version compatibility, try picking the right version of unpack 5 | local unpack_function 6 | if unpack == nil then 7 | unpack_function = table.unpack 8 | else 9 | unpack_function = unpack 10 | end 11 | 12 | if queue_size > 0 then 13 | redis.call("RPUSH", KEYS[1], unpack_function(client_tasks)) 14 | end 15 | 16 | redis.call("DEL", KEYS[2]) 17 | return queue_size 18 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | [flake8] 2 | max-line-length=100 3 | docstring-convention=all 4 | import-order-style=pycharm 5 | application_import_names=async_rediscache,tests 6 | exclude=.cache,.venv,.git 7 | ignore= 8 | B311,W503,E226,S311,T000 9 | # Missing Docstrings 10 | D100,D104,D105,D107, 11 | # Docstring Whitespace 12 | D203,D212,D214,D215, 13 | # Docstring Quotes 14 | D301,D302, 15 | # Docstring Content 16 | D400,D401,D402,D404,D405,D406,D407,D408,D409,D410,D411,D412,D413,D414,D416,D417 17 | # Type Annotations 18 | ANN002,ANN003,ANN101,ANN102,ANN204,ANN206,ANN401 19 | per-file-ignores= 20 | tests/*:D,ANN,N802, 21 | */__init__.py:F 22 | -------------------------------------------------------------------------------- /Pipfile: -------------------------------------------------------------------------------- 1 | [[source]] 2 | name = "pypi" 3 | url = "https://pypi.org/simple" 4 | verify_ssl = true 5 | 6 | [dev-packages] 7 | flake8 = "~=6.1.0" 8 | flake8-annotations = "~=3.0.1" 9 | flake8-bugbear = "~=23.9.16" 10 | flake8-docstrings = "~=1.7.0" 11 | flake8-import-order = "~=0.18.2" 12 | flake8-string-format = "~=0.3.0" 13 | flake8-tidy-imports = "~=4.10.0" 14 | flake8-todo = "~=0.7" 15 | pep8-naming = "~=0.13.3" 16 | "coverage[toml]" = "~=6.5.0" 17 | coveralls = "~=3.3.1" 18 | time-machine = "~=2.13.0" 19 | 20 | [packages] 21 | redis = "~=5.0" 22 | "fakeredis[lua]" = "~=2.0" 23 | 24 | [requires] 25 | python_version = "3.8" 26 | 27 | [scripts] 28 | lint = "python -m flake8" 29 | tests = "python -m unittest" 30 | -------------------------------------------------------------------------------- /.github/workflows/release.yaml: -------------------------------------------------------------------------------- 1 | name: Release to PyPI 2 | 3 | on: 4 | release: 5 | types: [published] 6 | 7 | 8 | jobs: 9 | build: 10 | name: Build dist & publish 11 | runs-on: ubuntu-latest 12 | if: ${{ !contains(github.ref, 'dev') && !contains(github.ref, 'rc') }} 13 | 14 | steps: 15 | - name: Checkout the repo and the submodules. 16 | uses: actions/checkout@v4 17 | 18 | - name: Set up Python 19 | uses: actions/setup-python@v4 20 | with: 21 | python-version: '3.8' 22 | 23 | - name: Install build dependencies & build 24 | run: | 25 | python -m pip install --upgrade pip 26 | pip install setuptools wheel 27 | python setup.py sdist bdist_wheel 28 | 29 | - name: Publish to PyPI 30 | uses: pypa/gh-action-pypi-publish@v1.3.1 31 | with: 32 | user: __token__ 33 | password: ${{ secrets.PYPI_TOKEN }} 34 | -------------------------------------------------------------------------------- /tests/types/helpers.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | import unittest.mock 3 | 4 | import fakeredis.aioredis 5 | 6 | from async_rediscache.session import RedisSession 7 | 8 | 9 | class BaseRedisObjectTests(unittest.IsolatedAsyncioTestCase): 10 | """Base class for Redis data type test classes.""" 11 | 12 | async def asyncSetUp(self): 13 | """Patch the RedisSession to pass in a fresh fakeredis client for each test.""" 14 | self.patcher = unittest.mock.patch("async_rediscache.types.base.RedisSession") 15 | self.mock_session: RedisSession = self.patcher.start() 16 | self.mock_session.get_current_session.return_value = self.mock_session 17 | self.mock_session.client = fakeredis.aioredis.FakeRedis() # noqa 18 | 19 | # Flush everything from the database to prevent carry-overs between tests 20 | await self.mock_session.client.flushall() 21 | 22 | async def asyncTearDown(self): 23 | self.patcher.stop() 24 | -------------------------------------------------------------------------------- /.github/workflows/test_release.yaml: -------------------------------------------------------------------------------- 1 | name: Test Release to test.pypi.org 2 | 3 | on: 4 | push: 5 | tags: 6 | - '**dev**' 7 | - '**rc**' 8 | 9 | jobs: 10 | build: 11 | name: Build dist & publish to test.pypi.org 12 | runs-on: ubuntu-latest 13 | 14 | steps: 15 | - name: Checkout the repo and the submodules. 16 | uses: actions/checkout@v4 17 | 18 | - name: Set up Python 19 | uses: actions/setup-python@v4 20 | with: 21 | python-version: '3.8' 22 | 23 | - name: Install build dependencies & build 24 | run: | 25 | python -m pip install --upgrade pip 26 | pip install setuptools wheel 27 | python setup.py sdist bdist_wheel 28 | 29 | - name: Publish dev/rc build to test.pypi.org 30 | uses: pypa/gh-action-pypi-publish@v1.3.1 31 | with: 32 | user: __token__ 33 | password: ${{ secrets.PYPI_TEST }} 34 | repository_url: https://test.pypi.org/legacy/ 35 | -------------------------------------------------------------------------------- /async_rediscache/redis_scripts/rediscache_increment.lua: -------------------------------------------------------------------------------- 1 | local value = redis.call('HGET', KEYS[1], KEYS[2]); 2 | if not value then value = "i|0" end 3 | 4 | local get_prefix = function (redis_value) 5 | local prefix_end = redis_value:find("|") 6 | return prefix_end, redis_value:sub(1, prefix_end-1) 7 | end 8 | 9 | local value_prefix_end, value_prefix = get_prefix(value) 10 | if not value_prefix_end then 11 | return string.format("ValueError|received malformed value from keys %s %s: `%s`", KEYS[1], KEYS[1], value) 12 | end 13 | 14 | local increment = ARGV[1] 15 | local incr_prefix_end, incr_prefix = get_prefix(increment) 16 | if not incr_prefix_end then 17 | return string.format("ValueError|received malformed increment value: `%s`", increment) 18 | end 19 | 20 | local valid_prefixes = "if" 21 | local valid_values = valid_prefixes:match(value_prefix) and valid_prefixes:match(incr_prefix) 22 | if not valid_values then 23 | return string.format("TypeError|cannot increment value `%s` with `%s`.", value, ARGV[1]) 24 | end 25 | 26 | local new_value = value:sub(value_prefix_end+1) + increment:sub(incr_prefix_end+1) 27 | local result 28 | if incr_prefix..value_prefix == "ii" then 29 | result = string.format("i|%d", new_value) 30 | else 31 | result = string.format("f|%s", tostring(new_value)) 32 | end 33 | 34 | redis.call("HSET", KEYS[1], KEYS[2], result) 35 | return result 36 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | """ 2 | This file is used to install this package via the pip tool. 3 | 4 | It keeps track of versioning, as well as dependencies and 5 | what versions of python we support. 6 | """ 7 | from setuptools import find_packages, setup 8 | 9 | 10 | setup( 11 | name="async-rediscache", 12 | version="1.0.0rc3", 13 | description="An easy to use asynchronous Redis cache", 14 | long_description=open("README.md").read(), 15 | long_description_content_type="text/markdown", 16 | author="Python Discord", 17 | author_email="staff@pythondiscord.com", 18 | url="https://github.com/python-discord/async-rediscache", 19 | license="MIT", 20 | packages=find_packages(exclude=["tests", "tests.*"]), 21 | package_data={"async_rediscache": ["py.typed"]}, 22 | classifiers=[ 23 | "License :: OSI Approved :: MIT License", 24 | "Development Status :: 4 - Beta", 25 | "Operating System :: OS Independent", 26 | "Programming Language :: Python", 27 | "Programming Language :: Python :: 3.8", 28 | "Programming Language :: Python :: 3.9", 29 | "Programming Language :: Python :: 3.10", 30 | "Programming Language :: Python :: 3.11", 31 | "Framework :: AsyncIO", 32 | "Topic :: Database", 33 | "Topic :: Software Development :: Libraries :: Python Modules", 34 | ], 35 | install_requires=[ 36 | "redis~=5.0" 37 | ], 38 | python_requires='~=3.8', 39 | extras_require={ 40 | "fakeredis": ["fakeredis[lua]~=2.0"], 41 | }, 42 | include_package_data=True, 43 | zip_safe=False 44 | ) 45 | -------------------------------------------------------------------------------- /tests/test_session.py: -------------------------------------------------------------------------------- 1 | import unittest.mock 2 | 3 | from async_rediscache import session 4 | 5 | 6 | class RedisSessionTests(unittest.IsolatedAsyncioTestCase): 7 | """Tests for the RedisSession wrapper class.""" 8 | 9 | def setUp(self) -> None: 10 | """Explicitly remove `RedisSession`s after each test.""" 11 | session.RedisSession._instance = None 12 | 13 | async def test_singleton(self): 14 | """Test that the same session is returned from multiple constructions.""" 15 | first = session.RedisSession(use_fakeredis=True) 16 | second = session.RedisSession(use_fakeredis=True) 17 | self.assertIs(first, second, "Only one RedisSession should exist at runtime.") 18 | self.assertIs( 19 | first, 20 | session.RedisSession.get_current_session(), 21 | "The session returned by get_current_session does not match the initial session." 22 | ) 23 | 24 | async def test_get_session_checks_initialized(self): 25 | """Ensure an error is raised if the session is accessed before it's been initialized.""" 26 | with self.assertRaises(session.RedisSessionNotInitialized): 27 | session.RedisSession.get_current_session() 28 | 29 | async def test_error_if_not_connected(self): 30 | """Test that no operations can be performed until the connect method is called.""" 31 | with self.assertRaises(session.RedisSessionNotConnected): 32 | _ = session.RedisSession().client 33 | 34 | async def test_pool_deprecation_warning(self): 35 | """Test that accessing the pool through the session outputs a warning.""" 36 | redis_session = await session.RedisSession(use_fakeredis=True).connect() 37 | with self.assertWarns(DeprecationWarning): 38 | _ = redis_session.pool 39 | 40 | @staticmethod 41 | async def test_no_connect_operations(): 42 | """Test that no operations are performed on connect if ping is False.""" 43 | # We use a purposefully invalid real connection, which should fail 44 | # should any operations be attempted. 45 | await session.RedisSession(host="invalid").connect(ping=False) 46 | -------------------------------------------------------------------------------- /.github/workflows/lint_test.yaml: -------------------------------------------------------------------------------- 1 | name: Lint & Test 2 | on: 3 | push: 4 | branches: 5 | - main 6 | pull_request: 7 | 8 | jobs: 9 | lint_test: 10 | runs-on: ubuntu-latest 11 | 12 | env: 13 | PIP_NO_CACHE_DIR: false 14 | PIP_USER: 1 # Make installed packages go to the custom PYTHONUSERBASE for caching. 15 | PIPENV_HIDE_EMOJIS: 1 16 | PIPENV_IGNORE_VIRTUALENVS: 1 17 | PIPENV_NOSPIN: 1 18 | PYTHONUSERBASE: ${{ github.workspace }}/.cache/py-user-base 19 | 20 | steps: 21 | - uses: actions/checkout@v4 22 | 23 | - name: Set up Python 24 | id: python 25 | uses: actions/setup-python@v4 26 | with: 27 | python-version: '3.8' 28 | 29 | - name: Restore Python environment 30 | id: pycache 31 | uses: actions/cache@v3 32 | with: 33 | path: ${{ env.PYTHONUSERBASE }} 34 | key: "${{ runner.os }}-\ 35 | python-\ 36 | ${{ steps.python.outputs.python-version }}-\ 37 | ${{ hashFiles('Pipfile') }}-\ 38 | ${{ hashFiles('Pipfile.lock') }}" 39 | # Don't restore on a different (older) Python version cause I'm unsure if it's safe. 40 | restore-keys: "${{ runner.os }}-\ 41 | python-\ 42 | ${{ steps.python.outputs.python-version }}-\ 43 | ${{ hashFiles('Pipfile') }}" 44 | 45 | - name: Prepend PATH 46 | run: echo '${{ env.PYTHONUSERBASE }}/bin' >> $GITHUB_PATH 47 | 48 | - name: Install pipenv 49 | run: pip install pipenv 50 | if: ${{ success() && steps.pycache.outputs.cache-hit != 'true' }} 51 | 52 | - name: Install dependencies using pipenv 53 | run: pipenv install --dev --deploy --system 54 | if: ${{ success() && steps.pycache.outputs.cache-hit != 'true' }} 55 | 56 | - name: Run flake8 57 | run: python -m flake8 58 | 59 | - name: Run unittest 60 | run: | 61 | python -m coverage run -m unittest 62 | python -m coverage report -m 63 | 64 | # This step will publish the coverage reports coveralls.io and 65 | # print a "job" link in the output of the GitHub Action 66 | - name: Publish coverage report to coveralls.io 67 | env: 68 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 69 | run: python -m coveralls 70 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | [![Coverage Status](https://coveralls.io/repos/github/SebastiaanZ/async-rediscache/badge.svg?branch=main)](https://coveralls.io/github/SebastiaanZ/async-rediscache?branch=main) 2 | [![Lint & Test](https://github.com/SebastiaanZ/async-rediscache/actions/workflows/lint_test.yaml/badge.svg)](https://github.com/SebastiaanZ/async-rediscache/actions/workflows/lint_test.yaml) 3 | [![Release to PyPI](https://github.com/SebastiaanZ/async-rediscache/actions/workflows/release.yaml/badge.svg)](https://github.com/SebastiaanZ/async-rediscache/actions/workflows/release.yaml) 4 | 5 | # Asynchronous Redis Cache 6 | This package offers several data types to ease working with a Redis cache in an asynchronous workflow. The package is currently in development and it's not recommended to start using it in production at this point. 7 | 8 | ## Installation 9 | 10 | ### Prerequisites 11 | 12 | To use `async-rediscache`, make sure that [`redis`](https://redis.io/download) is installed and running on your system. Alternatively, you could use `fakeredis` as a back-end for testing purposes and local development. 13 | 14 | ### Install using `pip` 15 | 16 | To install `async-rediscache` run the following command: 17 | 18 | ```bash 19 | pip install async-rediscache 20 | ``` 21 | 22 | Alternatively, to install `async-rediscache` with `fakeredis` run: 23 | 24 | ```bash 25 | pip install async-rediscache[fakeredis] 26 | ``` 27 | 28 | ## Basic use 29 | 30 | ### Creating a `RedisSession` 31 | To use a `RedisCache`, you first have to create a `RedisSession` instance that manages the connection to Redis. You can create the `RedisSession` at any point but make sure to call the `connect` method from an asynchronous context (see [this explanation](https://docs.aiohttp.org/en/stable/faq.html#why-is-creating-a-clientsession-outside-of-an-event-loop-dangerous) for why). 32 | 33 | ```python 34 | import async_rediscache 35 | 36 | async def main(): 37 | session = async_rediscache.RedisSession(host="localhost") 38 | await session.connect() 39 | 40 | # Do something interesting 41 | ``` 42 | 43 | ### Creating a `RedisSession` with a network connection 44 | 45 | ```python 46 | import async_rediscache 47 | async def main(): 48 | connection = {"address": "redis://127.0.0.1:6379"} 49 | async_rediscache.RedisSession(**connection) 50 | ``` 51 | ### `RedisCache` 52 | 53 | A `RedisCache` is the most basic data type provided by `async-rediscache`. It works like a dictionary in that you can associate keys with values. To prevent key collisions, each `RedisCache` instance should use a unique `namespace` identifier that will be prepended to the key when storing the pair to Redis. 54 | 55 | #### Creating a `RedisCache` instance 56 | 57 | When creating a `RedisCache` instance, it's important to make sure that it has a unique `namespace`. This can be done directly by passing a `namespace` keyword argument to the constructor: 58 | 59 | ```python 60 | import async_rediscache 61 | 62 | birthday_cache = async_rediscache.RedisCache(namespace="birthday") 63 | ``` 64 | 65 | Alternatively, if you assign a class attribute to a `RedisCache` instance, a namespace will be automatically generated using the name of the owner class and the name of attribute assigned to the cache: 66 | 67 | ```python 68 | import async_rediscache 69 | 70 | class Channel: 71 | topics = async_rediscache.RedisCache() # The namespace be set to `"Channel.topics"` 72 | ``` 73 | 74 | Note: There is nothing preventing you from reusing the same namespace, although you should be aware this could lead to key collisions (i.e., one cache could interfere with the values another cache has stored). 75 | 76 | #### Using a `RedisCache` instance 77 | 78 | Using a `RedisCache` is straightforward: Just call and await the methods you want to use and it should just work. There's no need to pass a `RedisSession` around as the session is fetched internally by the `RedisCache`. Obviously, one restriction is that you have to make sure that the `RedisSession` is still open and connected when trying to use a `RedisCache`. 79 | 80 | Here are some usage examples: 81 | 82 | ```python 83 | import async_rediscache 84 | 85 | async def main(): 86 | session = async_rediscache.RedisSession(host="localhost") 87 | await session.connect() 88 | 89 | cache = async_rediscache.RedisCache(namespace="python") 90 | 91 | # Simple key/value manipulation 92 | await cache.set("Guido", "van Rossum") 93 | print(await cache.get("Guido")) # Would print `van Rossum` 94 | 95 | # A contains check works as well 96 | print(await cache.contains("Guido")) # True 97 | print(await cache.contains("Kyle")) # False 98 | 99 | # You can iterate over all key, value pairs as well: 100 | item_view = await cache.items() 101 | for key, value in item_view: 102 | print(key, value) 103 | 104 | # Other options: 105 | number_of_pairs = await cache.length() 106 | pairs_in_dict = await cache.to_dict() 107 | popped_item = await cache.pop("Raymond", "Default value") 108 | await cache.update({"Brett": 10, "Barry": False}) 109 | await cache.delete("Barry") 110 | await cache.increment("Brett", 1) # Increment Brett's int by 1 111 | await cache.clear() 112 | ``` 113 | 114 | #### `RedisQueue` 115 | 116 | A `RedisQueue` implements the same interface as a `queue.SimpleQueue` object, except that all the methods are coroutines. Creating an instance works the same as with a `RedisCache`. 117 | -------------------------------------------------------------------------------- /async_rediscache/session.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import logging 4 | import warnings 5 | 6 | import redis.asyncio 7 | 8 | __all__ = ['RedisSession', 'RedisSessionNotInitialized', 'RedisSessionNotConnected'] 9 | 10 | log = logging.getLogger(__name__) 11 | 12 | 13 | class RedisSessionNotInitialized(RuntimeError): 14 | """Raised when the RedisSession instance has not been initialized yet.""" 15 | 16 | 17 | class RedisSessionNotConnected(RuntimeError): 18 | """Raised when trying to access the Redis client before `connect` has been called.""" 19 | 20 | 21 | class FakeRedisNotInstalled(ImportError): 22 | """Exception raised when trying to use `fakeredis` while it's not installed.""" 23 | 24 | 25 | class RedisSingleton(type): 26 | """ 27 | Ensure that only a single RedisSession instance exists at a time. 28 | 29 | The metaclass first checks if an instance currently exists and, if so, it 30 | returns the existing instance. If an instance does not exist, it will create 31 | and return a new instance of the class. 32 | """ 33 | 34 | def __init__(cls, *args) -> None: 35 | super().__init__(*args) 36 | cls._instance = None 37 | 38 | def __call__(cls, *args, **kwargs) -> RedisSession: 39 | """Return the singleton RedisSession instance.""" 40 | if not cls._instance: 41 | cls._instance = super().__call__(*args, **kwargs) 42 | 43 | return cls._instance 44 | 45 | 46 | class RedisSession(metaclass=RedisSingleton): 47 | """ 48 | A RedisSession that manages the lifetime of a Redis client instance. 49 | 50 | To avoid making the client access more complicated than it needs to be for 51 | a client that should only be created once during the application's runtime, the 52 | `RedisSession` instance should be created and its `connect` method should be 53 | awaited before the tasks get scheduled that rely on the Redis client. 54 | 55 | If using a third party library which interacts with the event loop, obtain it before it's 56 | created, and run the loop until the `connect` coroutine is completed. 57 | Pass the loop to library. 58 | 59 | Example: 60 | redis_session = RedisSession(host="localhost", port=6379) 61 | loop = asyncio.get_event_loop() 62 | loop.run_until_complete(redis_session.connect()) 63 | bot = discord.ext.commands.Bot(..., loop=loop) 64 | """ 65 | 66 | _instance: RedisSession = None 67 | 68 | def __init__( 69 | self, *, global_namespace: str = "", use_fakeredis: bool = False, **session_kwargs 70 | ) -> None: 71 | self.global_namespace = global_namespace 72 | 73 | self._client = None 74 | self._session_kwargs = session_kwargs 75 | self._use_fakeredis = use_fakeredis 76 | self.valid = False 77 | 78 | @classmethod 79 | def get_current_session(cls) -> RedisSession: 80 | """ 81 | Get the currently configured RedisSession instance. 82 | 83 | If an instance has not been created yet as this point, this method will 84 | raise a `RedisSessionNotInitialized` exception. 85 | """ 86 | if not cls._instance: 87 | raise RedisSessionNotInitialized("the redis session has not been initialized yet.") 88 | 89 | return cls._instance 90 | 91 | @property 92 | def client(self) -> redis.asyncio.Redis: 93 | """ 94 | Get the redis client after that it was initialized. 95 | 96 | This property will raise a `RedisSessionNotConnected` if it is accessed 97 | before the connect method is called. 98 | """ 99 | if not self.valid: 100 | raise RedisSessionNotConnected( 101 | "attempting to access the client before the connection has been created." 102 | ) 103 | return self._client 104 | 105 | @property 106 | def pool(self) -> redis.asyncio.ConnectionPool: 107 | """ 108 | Get the connection pool after checking if it is still connected. 109 | 110 | This property is deprecated. Most operations should be performed on 111 | the client directly. Operations which benefit from managing the pool directly 112 | can access it from `client.connection_pool`. 113 | 114 | This property will raise a `RedisSessionNotConnected` if it's accessed after 115 | before the connect method is called. 116 | """ 117 | # The validation and error logic is handled by the client property 118 | warnings.warn( 119 | DeprecationWarning( 120 | "pool property is deprecated. Most operations should be performed on " 121 | "the client directly. Operations which benefit from managing the pool directly " 122 | "can access it from client.connection_pool.", 123 | ), 124 | stacklevel=2, 125 | ) 126 | return self.client.connection_pool 127 | 128 | async def connect(self, *, ping: bool = True) -> RedisSession: 129 | """ 130 | Connect to Redis by instantiating the redis instance. 131 | 132 | If ping is True, a PING will be performed to ensure the connection is valid. 133 | If it's False, it'll be assumed the session is valid, and it's up to the user to 134 | manage when it's not. 135 | """ 136 | log.debug("Creating Redis client.") 137 | 138 | # Decide if we want to use `fakeredis` or an actual Redis server. The 139 | # option to use `fakeredis.aioredis` is provided to aid with running the 140 | # application in a development environment and to aid with writing 141 | # unittests. 142 | if self._use_fakeredis: 143 | # Only import fakeredis when required. This ensures that it's not a 144 | # required dependency if someone's not planning on using it. 145 | try: 146 | import fakeredis.aioredis 147 | except ImportError: # pragma: no cover 148 | raise FakeRedisNotInstalled( 149 | "RedisSession was configured to use `fakeredis`, but it is not installed. " 150 | "Either install `fakeredis` manually or install `async-rediscache` using " 151 | "`pip install async-rediscache[fakeredis]` to enable support." 152 | ) 153 | 154 | kwargs = dict(self._session_kwargs) 155 | # The following kwargs are not supported by fakeredis 156 | [kwargs.pop(kwarg, None) for kwarg in ( 157 | "address", "username", "password", "port", "timeout" 158 | )] 159 | self._client = fakeredis.aioredis.FakeRedis(**kwargs) 160 | else: 161 | self._client = redis.asyncio.Redis(**self._session_kwargs) 162 | 163 | if ping: 164 | # Perform a PING to confirm the connection is valid 165 | await self._client.ping() 166 | 167 | self.valid = True 168 | return self 169 | -------------------------------------------------------------------------------- /async_rediscache/types/cache.py: -------------------------------------------------------------------------------- 1 | import logging 2 | from typing import Dict, ItemsView, Optional 3 | 4 | from .base import RedisKeyType, RedisObject, RedisValueType 5 | 6 | __all__ = [ 7 | "RedisCache", 8 | ] 9 | 10 | log = logging.getLogger(__name__) 11 | 12 | 13 | class RedisCache(RedisObject): 14 | """ 15 | A simplified interface for a Redis hash set. 16 | 17 | We implement several convenient methods that are fairly similar to have a 18 | dict behaves, and should be familiar to Python users. The biggest difference 19 | is that all the public methods in this class are coroutines, and must be 20 | awaited. 21 | 22 | Because of limitations in Redis, this cache will only accept strings and 23 | integers as keys and strings, integers, floats, and bools as values. 24 | 25 | By default, the namespace key of a RedisCache is automatically determined 26 | by the name of the owner class and the class attribute assigned to the 27 | RedisQueue instance. To bind a RedisQueue to a specific namespace, pass the 28 | namespace as the `namespace` keyword argument to constructor. 29 | 30 | Please note that for automatic namespacing, this class MUST be created as a 31 | class attribute to properly initialize the instance's namespace. See 32 | `__set_name__` for more information about how this works. 33 | 34 | Simple example for how to use this: 35 | 36 | class SomeCog(Cog): 37 | # To initialize a valid RedisCache, just add it as a class attribute 38 | # here. Do not add it to the __init__ method or anywhere else, it MUST 39 | # be a class attribute. Do not pass any parameters. 40 | cache = RedisCache() 41 | 42 | async def my_method(self): 43 | 44 | # Now we're ready to use the RedisCache. 45 | # 46 | # We can store some stuff in the cache just by doing this. 47 | # This data will persist through restarts! 48 | await self.cache.set("key", "value") 49 | 50 | # To get the data, simply do this. 51 | value = await self.cache.get("key") 52 | 53 | # Other methods work more or less like a dictionary. 54 | # Checking if something is in the cache 55 | await self.cache.contains("key") 56 | 57 | # iterating the cache 58 | for key, value in await self.cache.items(): 59 | print(value) 60 | 61 | # We can even iterate in a comprehension! 62 | consumed = [value for key, value in await self.cache.items()] 63 | 64 | """ 65 | 66 | def __init__(self, *args, **kwargs) -> None: 67 | """Initialize the RedisCache.""" 68 | super().__init__(*args, **kwargs) 69 | 70 | async def set(self, key: RedisKeyType, value: RedisValueType) -> None: 71 | """Store an item in the Redis cache.""" 72 | # Convert to a typestring and then set it 73 | key = self._key_to_typestring(key) 74 | value = self._value_to_typestring(value) 75 | 76 | log.debug(f"Setting {key} to {value}.") 77 | await self.redis_session.client.hset(self.namespace, key, value) 78 | 79 | async def get( 80 | self, key: RedisKeyType, default: Optional[RedisValueType] = None 81 | ) -> Optional[RedisValueType]: 82 | """Get an item from the Redis cache.""" 83 | key = self._key_to_typestring(key) 84 | 85 | log.debug(f"Attempting to retrieve {key}.") 86 | value = await self.redis_session.client.hget(self.namespace, key) 87 | return self._maybe_value_from_typestring(value, default) 88 | 89 | async def delete(self, key: RedisKeyType) -> int: 90 | """ 91 | Delete an item from the Redis cache. 92 | 93 | If we try to delete a key that does not exist, it will simply be ignored. 94 | Returns the number of deleted keys. 95 | 96 | See https://redis.io/commands/hdel for more info on how this works. 97 | """ 98 | key = self._key_to_typestring(key) 99 | 100 | log.debug(f"Attempting to delete {key}.") 101 | return await self.redis_session.client.hdel(self.namespace, key) 102 | 103 | async def contains(self, key: RedisKeyType) -> bool: 104 | """ 105 | Check if a key exists in the Redis cache. 106 | 107 | Return True if the key exists, otherwise False. 108 | """ 109 | key = self._key_to_typestring(key) 110 | exists = await self.redis_session.client.hexists(self.namespace, key) 111 | 112 | log.debug(f"Testing if {key} exists in the RedisCache - Result is {exists}") 113 | return exists 114 | 115 | async def items(self) -> ItemsView: 116 | """ 117 | Fetch all the key/value pairs in the cache. 118 | 119 | Returns a normal ItemsView, like you would get from dict.items(). 120 | 121 | Keep in mind that these items are just a _copy_ of the data in the 122 | RedisCache - any changes you make to them will not be reflected 123 | into the RedisCache itself. If you want to change these, you need 124 | to make a .set call. 125 | 126 | Example: 127 | items = await my_cache.items() 128 | for key, value in items: 129 | # Iterate like a normal dictionary 130 | """ 131 | items = await self.redis_session.client.hgetall(self.namespace) 132 | items = self._dict_from_typestring(items).items() 133 | log.debug(f"Retrieving all key/value pairs from cache, total of {len(items)} items.") 134 | return items 135 | 136 | async def length(self) -> int: 137 | """Return the number of items in the Redis cache.""" 138 | number_of_items = await self.redis_session.client.hlen(self.namespace) 139 | log.debug(f"Returning length. Result is {number_of_items}.") 140 | return number_of_items 141 | 142 | async def to_dict(self) -> Dict: 143 | """Convert to dict and return.""" 144 | return {key: value for key, value in await self.items()} 145 | 146 | async def clear(self) -> None: 147 | """Deletes the entire hash from the Redis cache.""" 148 | log.debug("Clearing the cache of all key/value pairs.") 149 | await self.redis_session.client.delete(self.namespace) 150 | 151 | async def pop( 152 | self, key: RedisKeyType, default: Optional[RedisValueType] = None 153 | ) -> RedisValueType: 154 | """Get the item, remove it from the cache, and provide a default if not found.""" 155 | pop_script = await self._load_script("rediscache_pop.lua") 156 | key = self._key_to_typestring(key) 157 | 158 | log.debug(f"Popping {key!r} from the cache.") 159 | value = await self.redis_session.client.evalsha(pop_script, 2, self.namespace, key) 160 | 161 | return self._maybe_value_from_typestring(value, default) 162 | 163 | async def update(self, items: Dict[RedisKeyType, RedisValueType]) -> None: 164 | """ 165 | Update the Redis cache with multiple values. 166 | 167 | This works exactly like dict.update from a normal dictionary. You pass 168 | a dictionary with one or more key/value pairs into this method. If the 169 | keys do not exist in the RedisCache, they are created. If they do exist, 170 | the values are updated with the new ones from `items`. 171 | 172 | Please note that keys and the values in the `items` dictionary 173 | must consist of valid RedisKeyTypes and RedisValueTypes. 174 | """ 175 | log.debug(f"Updating the cache with the following items:\n{items}") 176 | await self.redis_session.client.hset( 177 | self.namespace, 178 | mapping=self._dict_to_typestring(items) 179 | ) 180 | 181 | async def increment(self, key: RedisKeyType, amount: Optional[float] = 1) -> float: 182 | """ 183 | Increment the value by `amount`. 184 | 185 | This works for both floats and ints, but will raise a TypeError 186 | if you try to do it for any other type of value. 187 | 188 | This also supports negative amounts, although it would provide better 189 | readability to use .decrement() for that. 190 | """ 191 | log.debug(f"Attempting to increment/decrement the value with the key {key} by {amount}.") 192 | 193 | if type(amount) not in (int, float): 194 | raise TypeError("the increment amount must be an `int` or `float`.") 195 | 196 | increment_script = await self._load_script("rediscache_increment.lua") 197 | 198 | keys = [self.namespace, self._key_to_typestring(key)] 199 | args = [self._value_to_typestring(amount)] 200 | value = await self.redis_session.client.evalsha(increment_script, len(keys), *keys, *args) 201 | 202 | return self._maybe_value_from_typestring(value) 203 | 204 | async def decrement(self, key: RedisKeyType, amount: Optional[float] = 1) -> float: 205 | """ 206 | Decrement the value by `amount`. 207 | 208 | Basically just does the opposite of .increment. 209 | """ 210 | return await self.increment(key, -amount) 211 | -------------------------------------------------------------------------------- /tests/types/test_base.py: -------------------------------------------------------------------------------- 1 | import unittest.mock 2 | 3 | from async_rediscache import types 4 | 5 | 6 | @unittest.mock.patch("async_rediscache.types.base.RedisSession") 7 | class RedisObjectTests(unittest.IsolatedAsyncioTestCase): 8 | """Tests for the base RedisObject class.""" 9 | 10 | def test_explicit_namespace_without_global_namespace(self, mock_session): 11 | """Test explicitly set namespace without a global namespace.""" 12 | local_namespace = "van Rossum" 13 | 14 | mock_session.get_current_session().global_namespace = "" 15 | redis_object = types.RedisObject(namespace=local_namespace) 16 | self.assertEqual(redis_object.namespace, local_namespace) 17 | 18 | def test_explicit_namespace_with_global_namespace(self, mock_session): 19 | """Test explicitly set namespace with a global namespace.""" 20 | global_namespace = "Guido" 21 | local_namespace = "van Rossum" 22 | expected_namespace = f"{global_namespace}.{local_namespace}" 23 | 24 | mock_session.get_current_session().global_namespace = global_namespace 25 | redis_object = types.RedisObject(namespace=local_namespace) 26 | self.assertEqual(redis_object.namespace, expected_namespace) 27 | 28 | def test_descriptor_based_namespace_without_global_namespace(self, mock_session): 29 | """Test namespace set by __set_name__ without a global namespace.""" 30 | class Guido: 31 | van_rossum = types.RedisObject() 32 | 33 | mock_session.get_current_session().global_namespace = "" 34 | self.assertEqual(Guido.van_rossum.namespace, "Guido.van_rossum") 35 | 36 | def test_descriptor_based_namespace_with_global_namespace(self, mock_session): 37 | """Test namespace set by __set_name__ with a global namespace.""" 38 | class Guido: 39 | van_rossum = types.RedisObject() 40 | 41 | mock_session.get_current_session().global_namespace = "core_dev" 42 | self.assertEqual(Guido.van_rossum.namespace, "core_dev.Guido.van_rossum") 43 | 44 | def test_set_name_picks_first_attribute_for_namespace(self, mock_session): 45 | """Test that __set_name__ only sets the namespace for the first assignment.""" 46 | class Kyle: 47 | stanley = types.RedisObject() 48 | broflovski = stanley 49 | 50 | mock_session.get_current_session().global_namespace = "" 51 | self.assertEqual(Kyle.stanley.namespace, "Kyle.stanley") 52 | 53 | def test_set_name_does_not_override_explicit_namespace(self, mock_session): 54 | """Test that __set_name__ only sets the namespace for the first assignment.""" 55 | class Kyle: 56 | stanley = types.RedisObject(namespace="python") 57 | 58 | mock_session.get_current_session().global_namespace = "" 59 | self.assertEqual(Kyle.stanley.namespace, "python") 60 | 61 | async def test_get_client_raises_without_set_namespace(self, _mock_session): 62 | """Test if accessing the `redis_session` raises exception if a namespace wasn't set.""" 63 | with self.assertRaises(types.NoNamespaceError): 64 | _ = types.RedisObject().redis_session 65 | 66 | def test_bypassing_global_namespace(self, mock_session): 67 | """Test if a RedisObject allows you to bypass the global namespace.""" 68 | cache = types.RedisObject(namespace="Amsterdam", use_global_namespace=False) 69 | 70 | mock_session.get_current_session().global_namespace = "New" 71 | self.assertEqual(cache.namespace, "Amsterdam") 72 | 73 | async def test_redis_session_returns_correct_session(self, mock_session): 74 | """Test if accessing the `redis_session` property returns the correct session.""" 75 | cache = types.RedisObject(namespace="test") 76 | mock_session.get_current_session.return_value = mock_session 77 | self.assertEqual(cache.redis_session, mock_session) 78 | 79 | def test_redis_session_property_gets_current_session(self, mock_session): 80 | """Test if the .redis_session property gets the current RedisSession.""" 81 | cache = types.RedisObject(namespace="test") 82 | mock_session.get_current_session.return_value = unittest.mock.sentinel.redis_session 83 | self.assertEqual(cache.redis_session, unittest.mock.sentinel.redis_session) 84 | 85 | def test_typestring_conversion(self, _mock_session): 86 | """Test the typestring-related helper functions.""" 87 | conversion_tests = ( 88 | (12, "i|12"), 89 | (12.4, "f|12.4"), 90 | ("cowabunga", "s|cowabunga"), 91 | ) 92 | 93 | cache = types.RedisObject(namespace="test") 94 | 95 | # Test conversion to typestring 96 | for _input, expected in conversion_tests: 97 | self.assertEqual(cache._value_to_typestring(_input), expected) 98 | 99 | # Test conversion from typestrings 100 | for _input, expected in conversion_tests: 101 | self.assertEqual(cache._value_from_typestring(expected), _input) 102 | 103 | # Test that exceptions are raised on invalid input 104 | with self.assertRaises(TypeError): 105 | cache._value_to_typestring(["internet"]) 106 | 107 | with self.assertRaises(TypeError): 108 | cache._value_from_typestring("o|firedog") 109 | 110 | def test_dict_to_from_typestring(self, _mock_session): 111 | """Test if ._dict_to_typestring creates a properly typed dict.""" 112 | cache = types.RedisObject(namespace="test") 113 | 114 | original = { 115 | "a": 1.1, 116 | "b": 200, 117 | 3: "string value", 118 | 4: True, 119 | } 120 | typestring_dictionary = { 121 | "s|a": "f|1.1", 122 | "s|b": "i|200", 123 | "i|3": "s|string value", 124 | "i|4": "b|1", 125 | } 126 | 127 | with self.subTest(method="_dict_to_typestring"): 128 | self.assertEqual(cache._dict_to_typestring(original), typestring_dictionary) 129 | 130 | with self.subTest(method="_dict_from_typestring"): 131 | self.assertEqual(cache._dict_from_typestring(typestring_dictionary), original) 132 | 133 | @unittest.mock.patch.object(types.base.asyncio, "Lock") 134 | async def test_atomic_transaction(self, mock_asyncio_lock, _mock_session): 135 | cache = types.RedisObject(namespace="test") 136 | 137 | # Set up proper mocking of `asyncio.Lock` 138 | async_lock_context_manager = unittest.mock.AsyncMock() 139 | mock_asyncio_lock.return_value = async_lock_context_manager 140 | 141 | # Create a mock method with a sentinel return value and decorate it 142 | mock_method = unittest.mock.AsyncMock(return_value=unittest.mock.sentinel.method_return) 143 | mock_method.__qualname__ = "mock_method" 144 | decorated_method = cache.atomic_transaction(mock_method) 145 | 146 | # Get the result by running the decorated method 147 | result = await decorated_method("one", kwarg="two") 148 | 149 | # Assert that the original method was awaited with the proper arguments 150 | mock_method.assert_awaited_once_with("one", kwarg="two") 151 | 152 | # Assert that the return value is properly propagated 153 | self.assertEqual(result, unittest.mock.sentinel.method_return) 154 | 155 | # Run decorated method another time to check that we only create lock once 156 | await decorated_method("one", kwarg="two") 157 | 158 | # Assert lock creation and acquisition 159 | mock_asyncio_lock.assert_called_once_with() 160 | 161 | # Assert that we've acquired and released the lock twice 162 | self.assertEqual(async_lock_context_manager.__aenter__.await_count, 2) 163 | self.assertEqual(async_lock_context_manager.__aexit__.await_count, 2) 164 | 165 | def test_maybe_value(self, _): 166 | """Test the deserialization of a valid `maybe_value`.""" 167 | cache = types.RedisObject(namespace="test") 168 | value_string = "Hello!" 169 | decoded = cache._maybe_value_from_typestring(cache._value_to_typestring(value_string)) 170 | self.assertEqual(value_string, decoded) 171 | 172 | def test_maybe_encoded_value(self, _): 173 | """Test the deserialization of a valid bytes `maybe_value`.""" 174 | cache = types.RedisObject(namespace="test") 175 | value_string = "Hello!" 176 | value = cache._value_to_typestring("Hello!").encode("utf-8") 177 | decoded = cache._maybe_value_from_typestring(value) 178 | self.assertEqual(value_string, decoded) 179 | 180 | def test_maybe_error_value(self, _): 181 | """Test that an exception is raised when value has an exception.""" 182 | cache = types.RedisObject(namespace="test") 183 | exception_message = "This is a sample exception." 184 | with self.assertRaises(ValueError) as exception: 185 | cache._maybe_value_from_typestring(f"ValueError|{exception_message}") 186 | self.assertEqual( 187 | exception_message, 188 | exception.exception.args[0], 189 | "Function did not correctly read the exception message." 190 | ) 191 | 192 | def test_maybe_empty_value(self, _): 193 | """Test that the correct value is returned when deserializing a None value.""" 194 | cache = types.RedisObject(namespace="test") 195 | for default in (None, "default value"): 196 | with self.subTest(default=default): 197 | value = cache._maybe_value_from_typestring(None, default=default) 198 | self.assertEqual(default, value) 199 | -------------------------------------------------------------------------------- /tests/types/test_rediscache.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | 3 | import time_machine 4 | 5 | from async_rediscache import types 6 | from .helpers import BaseRedisObjectTests 7 | 8 | 9 | class RedisCacheTests(BaseRedisObjectTests): 10 | """Tests for the RedisCache data type.""" 11 | 12 | async def asyncSetUp(self): 13 | await super().asyncSetUp() 14 | self.cache = types.RedisCache(namespace="test_cache") 15 | 16 | async def test_set_get_item(self): 17 | """Test that users can set and get items from the RedisDict.""" 18 | test_cases = ( 19 | ('favorite_fruit', 'melon'), 20 | ('favorite_number', 86), 21 | ('favorite_fraction', 86.54), 22 | ('favorite_boolean', False), 23 | ('other_boolean', True), 24 | ) 25 | 26 | # Test that we can get and set different types. 27 | for key, value in test_cases: 28 | with self.subTest(key=key, value=value): 29 | await self.cache.set(key, value) 30 | returned_value = await self.cache.get(key) 31 | 32 | # A round trip should preserve the value 33 | self.assertEqual(returned_value, value) 34 | # A round trip should preserve the exact type of the value 35 | self.assertIs(type(returned_value), type(value)) 36 | 37 | async def test_get_item_default_value(self): 38 | """Test if the get method returns the default value if a key was not found.""" 39 | test_cases = ("a", 10, 1.1, True) 40 | 41 | for default_value in test_cases: 42 | with self.subTest(default_value=default_value): 43 | returned_value = await self.cache.get("non-existing key", default_value) 44 | 45 | # We should return the same object 46 | self.assertIs(returned_value, default_value) 47 | 48 | async def test_set_item_type(self): 49 | """Test that .set rejects keys and values that are not permitted.""" 50 | test_cases = (["lemon", "melon", "apple"], 4.21, True) 51 | 52 | for invalid_key in test_cases: 53 | with self.subTest(invalid_key=invalid_key, invalid_key_type=type(invalid_key)): 54 | with self.assertRaises(TypeError): 55 | await self.cache.set(invalid_key, "nice") 56 | 57 | async def test_delete_item(self): 58 | """Test that .delete allows us to delete stuff from the RedisCache.""" 59 | # Add an item and verify that it gets added 60 | await self.cache.set("internet", "firetruck") 61 | self.assertEqual(await self.cache.get("internet"), "firetruck") 62 | 63 | # Delete that item and verify that it gets deleted 64 | await self.cache.delete("internet") 65 | self.assertIs(await self.cache.get("internet"), None) 66 | 67 | async def test_contains(self): 68 | """Test that we can check membership with .contains.""" 69 | await self.cache.set('favorite_country', "Burkina Faso") 70 | 71 | self.assertIs(await self.cache.contains('favorite_country'), True) 72 | self.assertIs(await self.cache.contains('favorite_dentist'), False) 73 | 74 | async def test_items(self): 75 | """Test that the RedisDict can be iterated.""" 76 | # Set up our test cases in the Redis cache 77 | test_cases = [ 78 | ('favorite_turtle', 'Donatello'), 79 | ('second_favorite_turtle', 'Leonardo'), 80 | ('third_favorite_turtle', 'Raphael'), 81 | ] 82 | for key, value in test_cases: 83 | await self.cache.set(key, value) 84 | 85 | # Consume the AsyncIterator into a regular list, easier to compare that way. 86 | redis_items = [item for item in await self.cache.items()] 87 | 88 | # These sequences are probably in the same order now, but probably 89 | # isn't good enough for tests. Let's not rely on .hgetall always 90 | # returning things in sequence, and just sort both lists to be safe. 91 | redis_items = sorted(redis_items) 92 | test_cases = sorted(test_cases) 93 | 94 | # If these are equal now, everything works fine. 95 | self.assertSequenceEqual(test_cases, redis_items) 96 | 97 | async def test_length(self): 98 | """Test that we can get the correct .length from the RedisDict.""" 99 | await self.cache.set('one', 1) 100 | await self.cache.set('two', 2) 101 | await self.cache.set('three', 3) 102 | self.assertEqual(await self.cache.length(), 3) 103 | 104 | await self.cache.set('four', 4) 105 | self.assertEqual(await self.cache.length(), 4) 106 | 107 | async def test_to_dict(self): 108 | """Test that the .to_dict method returns a workable dictionary copy.""" 109 | test_data = [ 110 | ('favorite_turtle', 'Donatello'), 111 | ('second_favorite_turtle', 'Leonardo'), 112 | ('third_favorite_turtle', 'Raphael'), 113 | ] 114 | for key, value in test_data: 115 | await self.cache.set(key, value) 116 | 117 | copy = await self.cache.to_dict() 118 | local_copy = {key: value for key, value in await self.cache.items()} 119 | self.assertIs(type(copy), dict) 120 | self.assertDictEqual(copy, local_copy) 121 | 122 | async def test_clear(self): 123 | """Test that the .clear method removes the entire hash.""" 124 | await self.cache.set('teddy', 'with me') 125 | await self.cache.set('in my dreams', 'you have a weird hat') 126 | self.assertEqual(await self.cache.length(), 2) 127 | 128 | await self.cache.clear() 129 | self.assertEqual(await self.cache.length(), 0) 130 | 131 | async def test_pop(self): 132 | """Test that we can .pop an item from the RedisDict.""" 133 | await self.cache.set('john', 'was afraid') 134 | 135 | self.assertEqual(await self.cache.pop('john'), 'was afraid') 136 | self.assertEqual(await self.cache.pop('pete', 'breakneck'), 'breakneck') 137 | self.assertEqual(await self.cache.length(), 0) 138 | 139 | async def test_update(self): 140 | """Test that we can .update the RedisDict with multiple items.""" 141 | await self.cache.set("reckfried", "lona") 142 | await self.cache.set("bel air", "prince") 143 | await self.cache.update({ 144 | "reckfried": "jona", 145 | "mega": "hungry, though", 146 | }) 147 | 148 | result = { 149 | "reckfried": "jona", 150 | "bel air": "prince", 151 | "mega": "hungry, though", 152 | } 153 | self.assertDictEqual(await self.cache.to_dict(), result) 154 | 155 | async def test_increment(self): 156 | """Test the .increment and .decrement methods on float and integer values.""" 157 | local_copy = { 158 | "int_value": 10, 159 | "float_value": 12.5, 160 | } 161 | 162 | await self.cache.set("int_value", local_copy["int_value"]) 163 | await self.cache.set("float_value", local_copy["float_value"]) 164 | 165 | for increment in ((), (25,), (25.5,), (-30,), (-35.5,)): 166 | for target in ("int_value", "float_value"): 167 | # Sanity check 168 | pre_increment = await self.cache.get(target) 169 | self.assertEqual(local_copy[target], pre_increment) 170 | 171 | with self.subTest(target=target, initial_value=pre_increment, increment=increment): 172 | # unpack the value for our local copy 173 | value = increment[0] if increment else 1 174 | 175 | # first we increment 176 | local_copy[target] += value 177 | await self.cache.increment(target, *increment) 178 | post_increment = await self.cache.get(target) 179 | self.assertEqual(local_copy[target], post_increment) 180 | 181 | # then, we decrement 182 | local_copy[target] -= value 183 | await self.cache.decrement(target, *increment) 184 | post_decrement = await self.cache.get(target) 185 | self.assertEqual(local_copy[target], post_decrement) 186 | 187 | async def test_increment_raises_type_error_for_invalid_types(self): 188 | """Test if `.increment` raises TypeError for invalid types.""" 189 | test_cases = ( 190 | {"initial": 100, "increment": "Python Discord"}, 191 | {"initial": 1.1, "increment": True}, 192 | {"initial": "Python Discord", "increment": 200}, 193 | {"initial": True, "increment": 2.2}, 194 | ) 195 | 196 | for case in test_cases: 197 | await self.cache.set("value", case["initial"]) 198 | with self.subTest(**case): 199 | with self.assertRaises(TypeError): 200 | await self.cache.increment("value", amount=case["increment"]) 201 | 202 | async def test_expiry_expires_after_timeout(self): 203 | """Test setting an expiry on a RedisCache.""" 204 | with time_machine.travel(0, tick=False) as traveller: 205 | await self.cache.set("key", "value") 206 | result = await self.cache.set_expiry(10) 207 | self.assertTrue(result) 208 | traveller.shift(5) 209 | self.assertEqual(await self.cache.get("key"), "value") 210 | traveller.shift(6) 211 | self.assertIsNone(await self.cache.get("key")) 212 | 213 | async def test_expiry_returns_false_for_nonexisting_key(self): 214 | """The set_expiry method should return `False` for non-existing keys.""" 215 | # Before settings the first key->value, the outer namespace key does 216 | # not exist yet. 217 | self.assertFalse(await self.cache.set_expiry(10)) 218 | 219 | async def test_set_expiry_at_expires_after_timestamp(self): 220 | """The namespace should expire after the specified timestamp.""" 221 | with time_machine.travel(1100, tick=False) as traveller: 222 | await self.cache.set("key", "value") 223 | result = await self.cache.set_expiry_at(1110) 224 | self.assertTrue(result) 225 | traveller.shift(5) 226 | self.assertEqual(await self.cache.get("key"), "value") 227 | traveller.shift(6) 228 | self.assertIsNone(await self.cache.get("key")) 229 | 230 | async def test_set_expiry_at_accepts_datetime(self): 231 | """The namespace should expire after the specified timestamp.""" 232 | dt = datetime.datetime(2021, 1, 1, 12, 11, 10, tzinfo=datetime.timezone.utc) 233 | delta_expiry = datetime.timedelta(seconds=50_000_000) 234 | 235 | dt_expiry = dt + delta_expiry 236 | with time_machine.travel(dt, tick=False) as traveller: 237 | await self.cache.set("key", "value") 238 | result = await self.cache.set_expiry_at(dt_expiry) 239 | self.assertTrue(result) 240 | traveller.move_to(dt_expiry - datetime.timedelta(seconds=1)) 241 | self.assertEqual(await self.cache.get("key"), "value") 242 | traveller.move_to(dt_expiry + datetime.timedelta(seconds=1)) 243 | self.assertIsNone(await self.cache.get("key")) 244 | 245 | async def test_expiry_at_returns_false_for_nonexisting_key(self): 246 | """The set_expiry method should return `False` for non-existing keys.""" 247 | # Before settings the first key->value, the outer namespace key does 248 | # not exist yet. 249 | self.assertFalse(await self.cache.set_expiry_at(10)) 250 | -------------------------------------------------------------------------------- /async_rediscache/types/queue.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import functools 4 | import logging 5 | import typing 6 | import weakref 7 | from typing import Optional 8 | 9 | import redis 10 | 11 | from .base import RedisObject, RedisValueType 12 | 13 | __all__ = [ 14 | "RedisQueue", 15 | "RedisTaskQueue", 16 | "RedisTask", 17 | "TaskAlreadyDone", 18 | "TaskNotPending", 19 | ] 20 | 21 | log = logging.getLogger(__name__) 22 | 23 | 24 | class TaskAlreadyDone(RuntimeError): 25 | """Raised when finalized is called on a task that is already done.""" 26 | 27 | 28 | class TaskNotPending(RuntimeError): 29 | """Raised when finalizing a task that is not found in the 'pending' queue.""" 30 | 31 | 32 | class RedisQueue(RedisObject): 33 | """ 34 | A Redis-backed queue that can be used for producer/consumer design patterns. 35 | 36 | The queue is backed internally by a Redis List, which allows you to append 37 | to and pop from both sides of the queue in constant time. To avoid confusion 38 | about the endianness of queue, the queue uses methods such as `put` and 39 | `get` instead of `lpop` and `rpush`; this is similar to the interface 40 | provided by `Queue.SimpleQueue` in Python's standard library. 41 | 42 | By default, the namespace key of a RedisQueue is automatically determined 43 | by the name of the owner class and the class attribute assigned to the 44 | RedisQueue instance. To bind a RedisQueue to a specific namespace, pass the 45 | namespace as the `namespace` keyword argument to constructor. 46 | """ 47 | 48 | async def put(self, value: RedisValueType) -> None: 49 | """ 50 | Remove and return a value from the queue. 51 | 52 | If `wait` is `True` (default), this method will wait for an item to 53 | become available. An optional `timeout` specifies for how long this 54 | method should wait. A `timeout` value of `0` indicates that this method 55 | will wait forever. 56 | 57 | This method returns `None` if no item was available within the bounds of 58 | the specified waiting conditions. 59 | """ 60 | value_string = self._value_to_typestring(value) 61 | log.debug(f"putting {value_string!r} on RedisQueue `{self.namespace}`") 62 | await self.redis_session.client.lpush(self.namespace, value_string) 63 | 64 | # This method is provided to provide a compatible interface with Queue.SimpleQueue 65 | put_nowait = functools.partialmethod(put) 66 | 67 | async def get(self, wait: bool = True, timeout: int = 0) -> Optional[RedisValueType]: 68 | """ 69 | Remove and return a value from the queue. 70 | 71 | If `wait` is `True` (default), this method will wait for an item to 72 | become available. An optional `timeout` specifies for how long this 73 | method should wait. A `timeout` value of `0` indicates that this method 74 | will wait forever. 75 | 76 | This method returns `None` if no item was available within the waiting 77 | conditions specified. 78 | """ 79 | log.debug( 80 | f"getting value from RedisQueue `{self.namespace}` " 81 | f"(wait={wait!r}, timeout={timeout!r})" 82 | ) 83 | 84 | if wait: 85 | value = await self.redis_session.client.brpop(self.namespace, timeout=timeout) 86 | 87 | # If we can get an item from the queue before the timeout runs 88 | # out, we get a list back, in the form `[namespace, value]`. If 89 | # no value was received before the timeout, we simply get `None` 90 | # back. This means we need to get the value out of the list when 91 | # we actually got a value back instead of `None`. 92 | if value: 93 | _, value = value 94 | else: 95 | value = await self.redis_session.client.rpop(self.namespace) 96 | 97 | if value is not None: 98 | value = self._value_from_typestring(value) 99 | 100 | log.debug(f"got value `{value!r}` from RedisQueue `{self.namespace}`") 101 | return value 102 | 103 | # This method is provided to provide a compatible interface with Queue.SimpleQueue 104 | get_nowait = functools.partialmethod(get, wait=False) 105 | 106 | async def qsize(self) -> int: 107 | """ 108 | Return the (approximate) size of the RedisQueue. 109 | 110 | Note that while we can determine the exact size of the queue at the 111 | moment Redis receives the request, this value may have become stale 112 | before we received it back. 113 | """ 114 | return await self.redis_session.client.llen(self.namespace) 115 | 116 | async def empty(self) -> bool: 117 | """ 118 | Return `True` if the RedisQueue is empty. 119 | 120 | The caveat that applies to the `qsize` method also applies here. 121 | """ 122 | return await self.qsize() == 0 123 | 124 | async def iter_tasks( 125 | self, wait: bool = True, timeout: int = 0 126 | ) -> typing.AsyncGenerator[typing.Union[RedisValueType, RedisTask], None, None]: 127 | """Yield all items the queue, optionally waiting for new tasks.""" 128 | while True: 129 | value = await self.get(wait, timeout) 130 | if value is None: 131 | return 132 | 133 | yield value 134 | 135 | def __aiter__( 136 | self 137 | ) -> typing.AsyncGenerator[typing.Union[RedisValueType, RedisTask], None, None]: 138 | """Yield all items in the queue until it's emptied.""" 139 | return self.iter_tasks(wait=False) 140 | 141 | 142 | class RedisTaskQueue(RedisQueue): 143 | """A Queue class with task tracking features to prevent data loss.""" 144 | 145 | def __init__(self, *args, client_identifier: typing.Optional[str] = None, **kwargs) -> None: 146 | super().__init__(*args, **kwargs) 147 | self.client_identifier = client_identifier 148 | 149 | @property 150 | def namespace_pending(self) -> str: 151 | """Get the name of the queue where pending tasks are stored.""" 152 | client_id = f"{self.client_identifier}_" if self.client_identifier is not None else "" 153 | return f"{self.namespace}${client_id}pending" 154 | 155 | async def get(self, wait: bool = True, timeout: int = 0) -> Optional[RedisTask]: 156 | """ 157 | Get an item from the queue wrapped in a Task instance. 158 | 159 | When you get an item from the queue, it will not be directly removed 160 | from Redis. Instead, it will be moved to an in-progress list to prevent 161 | data loss in case the worker is interrupted for its job is completed. 162 | 163 | You should mark a `Task` as done by calling its `finalize` method in the 164 | worker once it has completed its work to prevent items from staying 165 | alive indefinitely. See the `Task` class for more information. 166 | """ 167 | log.debug( 168 | f"getting value from RedisTaskQueue `{self.namespace}` " 169 | f"(wait={wait!r}, timeout={timeout!r})" 170 | ) 171 | 172 | namespaces = {"src": self.namespace, "dst": self.namespace_pending} 173 | if wait: 174 | value = await self.redis_session.client.brpoplpush(**namespaces, timeout=timeout) 175 | else: 176 | value = await self.redis_session.client.rpoplpush(**namespaces) 177 | 178 | if value is not None: 179 | value = self._value_from_typestring(value) 180 | value = RedisTask(value, owner=self) 181 | 182 | log.debug(f"got value `{value!r}` from RedisTaskQueue `{self.namespace}`") 183 | return value 184 | 185 | async def task_done(self, task: RedisTask) -> None: 186 | """Mark a task as done by removing it from the pending tasks queue.""" 187 | typestring = self._value_to_typestring(task.value) 188 | removed = await self.redis_session.client.lrem(self.namespace_pending, 1, typestring) 189 | 190 | if not removed: 191 | raise TaskNotPending(f"task {task.value!r} was not found in the pending tasks queue.") 192 | 193 | task.done = True 194 | 195 | async def reschedule_pending_task(self, task: typing.Union[RedisValueType, RedisTask]) -> None: 196 | """ 197 | Move a `task` from the pending tasks queue back to the main queue. 198 | 199 | This is a DANGEROUS operation: Rescheduling a task that is currently 200 | still being processed by a worker leads to an inconsistent state: The 201 | task is still being processed, but it's also queued to be processed 202 | again. It will also trigger a `RuntimeError` when the worker attempts 203 | to mark the task as done as the task will not be found in the pending 204 | tasks queue. 205 | """ 206 | if isinstance(task, RedisTask): 207 | task = task.value 208 | 209 | reschedule_script = await self._load_script("redisqueue_reschedule_task.lua") 210 | 211 | try: 212 | keys = [self.namespace, self.namespace_pending] 213 | args = [self._value_to_typestring(task)] 214 | await self.redis_session.client.evalsha(reschedule_script, len(keys), *keys, *args) 215 | except redis.ResponseError: 216 | raise TaskNotPending( 217 | f"task `{task}` not found in pending tasks queue `{self.namespace_pending}`" 218 | ) from None 219 | 220 | async def reschedule_all_pending_client_tasks(self) -> int: 221 | """ 222 | Reschedule all pending tasks of this client. 223 | 224 | This is a DANGEROUS operation that could lead to an inconsistent state 225 | in the queue. See `RedisTaskQueue.reschedule_pending_task` for more 226 | information. 227 | """ 228 | reschedule_script = await self._load_script("redisqueue_reschedule_all_client_tasks.lua") 229 | rescheduled_tasks = await self.redis_session.client.evalsha( 230 | reschedule_script, 231 | 2, 232 | self.namespace, 233 | self.namespace_pending, 234 | ) 235 | 236 | return int(rescheduled_tasks) 237 | 238 | 239 | class RedisTask: 240 | """ 241 | A class that represents a task popped from a RedisQueue. 242 | 243 | A task has a weak reference to its owner queue, which means you can mark a 244 | task as done as long as the owner queue is still alive. 245 | """ 246 | 247 | def __init__(self, value: RedisValueType, owner: RedisTaskQueue) -> None: 248 | self._value = value 249 | self.owner_reference = weakref.ref(owner) 250 | self.done = False 251 | 252 | def __repr__(self) -> str: 253 | """Return the official representation of the task.""" 254 | cls = self.__class__.__name__ 255 | status = "done" if self.done else "pending" 256 | return f"<{cls} task_data={self._value!r} [{status}]>" 257 | 258 | @property 259 | def value(self) -> RedisValueType: 260 | """Return the task value.""" 261 | return self._value 262 | 263 | @property 264 | def owner(self) -> RedisTaskQueue: 265 | """Get the owner RedisTaskQueue from the weak reference.""" 266 | queue = self.owner_reference() 267 | if not queue: 268 | raise RuntimeError("can't finalize task as the queue instance no longer exists") 269 | 270 | return queue 271 | 272 | async def finalize(self) -> None: 273 | """Mark the task as done and remove it from the pending tasks queue.""" 274 | if self.done: 275 | raise TaskAlreadyDone("task was already marked as done") 276 | 277 | await self.owner.task_done(task=self) 278 | 279 | async def reschedule(self) -> None: 280 | """Reschedule this task in the main queue.""" 281 | await self.owner.reschedule_pending_task(self) 282 | -------------------------------------------------------------------------------- /async_rediscache/types/base.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import asyncio 4 | import datetime 5 | import functools 6 | import importlib.resources 7 | import logging 8 | from functools import partialmethod 9 | from types import MethodType 10 | from typing import Any, Callable, Dict, Optional, Tuple, Union 11 | 12 | from ..session import RedisSession 13 | 14 | __all__ = [ 15 | "RedisObject", 16 | "NoNamespaceError", 17 | "RedisKeyOrValue", 18 | "RedisKeyType", 19 | "RedisValueType", 20 | ] 21 | 22 | log = logging.getLogger(__name__) 23 | 24 | # Type aliases 25 | RedisKeyType = Union[str, int] 26 | RedisValueType = Union[str, int, float, bool] 27 | RedisKeyOrValue = Union[RedisKeyType, RedisValueType] 28 | 29 | # Prefix tuples 30 | _PrefixTuple = Tuple[Tuple[str, Any], ...] 31 | _VALUE_PREFIXES = ( 32 | ("f|", float), 33 | ("i|", int), 34 | ("s|", str), 35 | ("b|", bool), 36 | ) 37 | _KEY_PREFIXES = ( 38 | ("i|", int), 39 | ("s|", str), 40 | ) 41 | _ERROR_PREFIXES = ( 42 | ("TypeError|", TypeError), 43 | ("ValueError|", ValueError), 44 | ) 45 | 46 | 47 | class NoNamespaceError(RuntimeError): 48 | """Raised when a RedisCache instance has no namespace.""" 49 | 50 | 51 | class RedisObject: 52 | """A base class for Redis caching object implementations.""" 53 | 54 | _namespace_locks = {} 55 | _registered_scripts = {} 56 | 57 | def __init__( 58 | self, *, namespace: Optional[str] = None, use_global_namespace: bool = True 59 | ) -> None: 60 | """Initialize the RedisCache.""" 61 | self._local_namespace = namespace 62 | self._use_global_namespace = use_global_namespace 63 | self._transaction_lock = None 64 | 65 | def __set_name__(self, owner: Any, attribute_name: str) -> None: 66 | """ 67 | Set the namespace to Class.attribute_name. 68 | 69 | Called automatically when this class is assigned to a class attribute. 70 | 71 | This class MUST be created as a class attribute, otherwise it will raise 72 | exceptions whenever a method is used. This is because it uses this 73 | method to create a namespace like `MyCog.my_class_attribute` which is 74 | used as a hash name when we store stuff in Redis, to prevent collisions. 75 | 76 | The namespace is only set the first time a class attribute gets assigned 77 | to a RedisCache instance. Assigning a class attribute to an existing 78 | instance will not overwrite the namespace and the additional class 79 | attribute will act as an alias to the original instance. 80 | """ 81 | if not self._local_namespace: 82 | self._local_namespace = f"{owner.__name__}.{attribute_name}" 83 | 84 | def __repr__(self) -> str: 85 | """Return a beautiful representation of this object instance.""" 86 | return f"{self.__class__.__name__}(namespace={self._local_namespace!r})" 87 | 88 | @property 89 | def redis_session(self) -> RedisSession: 90 | """Get the current active RedisSession after validating a namespace was set.""" 91 | if self._local_namespace is None: 92 | cls_name = self.__class__.__name__ 93 | error_message = ( 94 | f"can't get the redis session as the {cls_name} instance does not have a namespace." 95 | ) 96 | log.critical(error_message) 97 | raise NoNamespaceError(error_message) 98 | 99 | return RedisSession.get_current_session() 100 | 101 | @property 102 | def namespace(self) -> str: 103 | """Return the `namespace` of this RedisObject.""" 104 | global_namespace = self.redis_session.global_namespace 105 | if self._use_global_namespace and global_namespace: 106 | namespace = f"{global_namespace}.{self._local_namespace}" 107 | else: 108 | namespace = self._local_namespace 109 | 110 | return namespace 111 | 112 | @staticmethod 113 | def _to_typestring(key_or_value: RedisKeyOrValue, prefixes: _PrefixTuple) -> str: 114 | """Turn a valid Redis type into a typestring.""" 115 | key_or_value_type = type(key_or_value) 116 | 117 | for prefix, _type in prefixes: 118 | # isinstance is a bad idea here, because isinstance(False, int) == True. 119 | if key_or_value_type is _type: 120 | if key_or_value_type is bool: 121 | # Convert bools into integers before storing them 122 | key_or_value = int(key_or_value) 123 | 124 | return f"{prefix}{key_or_value}" 125 | 126 | raise TypeError(f"RedisObject._to_typestring only supports the following: {prefixes}.") 127 | 128 | @staticmethod 129 | def _from_typestring( 130 | key_or_value: Union[bytes, str], prefixes: _PrefixTuple 131 | ) -> RedisKeyOrValue: 132 | """Deserialize a typestring into a valid Redis type.""" 133 | # Stuff that comes out of Redis will be bytestrings, so let's decode those. 134 | if isinstance(key_or_value, bytes): 135 | key_or_value = key_or_value.decode('utf-8') 136 | 137 | # Now we convert our unicode string back into the type it originally was. 138 | for prefix, _type in prefixes: 139 | if key_or_value.startswith(prefix): 140 | 141 | # For booleans, we need special handling because bool("False") is True. 142 | if prefix == "b|": 143 | value = key_or_value[len(prefix):] 144 | return bool(int(value)) 145 | 146 | # Otherwise we can just convert normally. 147 | return _type(key_or_value[len(prefix):]) 148 | raise TypeError(f"RedisObject._from_typestring only supports the following: {prefixes}.") 149 | 150 | # Add some nice partials to call our generic typestring converters. 151 | # These are basically methods that will fill in some of the parameters for you, so that 152 | # any call to _key_to_typestring will be like calling _to_typestring with the two parameters 153 | # at `prefixes` and `types_string` pre-filled. 154 | # 155 | # See https://docs.python.org/3/library/functools.html#functools.partialmethod 156 | _key_to_typestring: MethodType = partialmethod(_to_typestring, prefixes=_KEY_PREFIXES) 157 | _value_to_typestring: MethodType = partialmethod(_to_typestring, prefixes=_VALUE_PREFIXES) 158 | _key_from_typestring: MethodType = partialmethod(_from_typestring, prefixes=_KEY_PREFIXES) 159 | _value_from_typestring: MethodType = partialmethod(_from_typestring, prefixes=_VALUE_PREFIXES) 160 | 161 | def _maybe_value_from_typestring( 162 | self, 163 | maybe_value: Optional[Union[bytes, str]], 164 | default: Optional[RedisValueType] = None, 165 | ) -> Optional[RedisValueType]: 166 | """ 167 | Deserialize an optional redis return value safely. 168 | 169 | This method will try to match `maybe_value` in three ways: 170 | - If `maybe_value` is `None`, return the default 171 | - If `maybe_value` represents an error, raise the appropriate exception 172 | - If `maybe_value` represents a valid return value, deserialize it 173 | """ 174 | if maybe_value is None: 175 | return default 176 | 177 | if isinstance(maybe_value, bytes): 178 | maybe_value = maybe_value.decode('utf-8') 179 | 180 | for prefix, exception in _ERROR_PREFIXES: 181 | if maybe_value.startswith(prefix): 182 | raise exception(maybe_value[len(prefix):]) 183 | 184 | return self._value_from_typestring(maybe_value) 185 | 186 | def _dict_from_typestring(self, dictionary: Dict) -> Dict: 187 | """Turns all contents of a dict into valid Redis types.""" 188 | return { 189 | self._key_from_typestring(key): self._value_from_typestring(value) 190 | for key, value in dictionary.items() 191 | } 192 | 193 | def _dict_to_typestring(self, dictionary: Dict) -> Dict: 194 | """Turns all contents of a dict into typestrings.""" 195 | return { 196 | self._key_to_typestring(key): self._value_to_typestring(value) 197 | for key, value in dictionary.items() 198 | } 199 | 200 | async def _load_script(self, script: str) -> str: 201 | """Load a Redis Lua script and return the SHA Digest.""" 202 | if script in self._registered_scripts: 203 | digest = self._registered_scripts[script] 204 | 205 | # check if the script is already registered with redis 206 | [script_exists] = await self.redis_session.client.script_exists(digest) 207 | 208 | if script_exists: 209 | return digest 210 | 211 | redis_script = importlib.resources.read_text("async_rediscache.redis_scripts", script) 212 | log.debug(f"Registering `{script}` script with Redis.") 213 | self._registered_scripts[script] = await self.redis_session.client.script_load(redis_script) 214 | return self._registered_scripts[script] 215 | 216 | def atomic_transaction(self, method: Callable) -> Callable: 217 | """ 218 | Ensure that the decorated method is atomic within a RedisObject. 219 | 220 | Some operations performed on a RedisObject need to occur atomically, 221 | from the perspective of Redis. An example is trying to set multiple 222 | values that form a consistent set and should be set "all at once". 223 | 224 | By applying this decorator to all methods that interact with those 225 | consistent sets, those methods need to acquire a lock before they are 226 | allowed to run. This means that these methods will "wait" for the 227 | previous tasks to be finished. 228 | 229 | The `asyncio.Lock` is RedisObject-specific, meaning that there's a 230 | separate lock for each RedisObject (e.g., a RedisCache). 231 | 232 | The `wrapper` lazily loads the `asyncio.Lock` to ensure it's created 233 | within the right running event loop. 234 | 235 | Note: Take care not to await decorated method from within a method also 236 | decorated by this decorator. It will cause a deadlock... 237 | """ 238 | log.debug(f"Wrapping {method.__qualname__} to ensure atomic transactions") 239 | 240 | @functools.wraps(method) 241 | async def wrapper(*args, **kwargs) -> Any: 242 | if self._transaction_lock is None: 243 | log.debug(f"Creating a transaction lock for {self!r}") 244 | self._transaction_lock = asyncio.Lock() 245 | 246 | log.debug(f"[transaction lock] {method.__qualname__}: Trying to acquire lock") 247 | async with self._transaction_lock: 248 | log.debug(f"[transaction lock] {method.__qualname__}: Acquired lock") 249 | result = await method(*args, **kwargs) 250 | 251 | log.debug(f"[transaction lock] {method.__qualname__}: Released lock") 252 | return result 253 | 254 | return wrapper 255 | 256 | async def set_expiry(self, seconds: float) -> bool: 257 | """ 258 | Set a time-to-live on the entire RedisCache namespace. 259 | 260 | This method accepts a precision down to 1 ms. If more decimal 261 | places are provided, the duration is truncated. Passing a 262 | negative expire will result in the namespace being deleted 263 | immediately. 264 | 265 | Note: Setting an expiry on a key within the namespace is not 266 | supported by Redis. It's the entire namespace or nothing. 267 | """ 268 | result = await self.redis_session.client.pexpire(self.namespace, int(1000*seconds)) 269 | return bool(result) 270 | 271 | async def set_expiry_at(self, timestamp: Union[datetime.datetime, float]) -> bool: 272 | """ 273 | Set a specific timestamp for the entire RedisCache to expire. 274 | 275 | This method accepts either a `datetime.datetime` or seconds since 276 | the Unix Epoch with a maximum precision of four decimal places (ms). 277 | 278 | Note: Setting an expiry on a key within the namespace is not 279 | supported by Redis. It's the entire namespace or nothing. 280 | """ 281 | if isinstance(timestamp, datetime.datetime): 282 | timestamp = timestamp.timestamp() 283 | 284 | result = await self.redis_session.client.pexpireat(self.namespace, int(1000*timestamp)) 285 | return bool(result) 286 | -------------------------------------------------------------------------------- /tests/types/test_queue.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | from async_rediscache import types 4 | from .helpers import BaseRedisObjectTests 5 | 6 | 7 | class RedisQueueTests(BaseRedisObjectTests): 8 | """Tests for the RedisQueue datatype.""" 9 | 10 | async def asyncSetUp(self): 11 | await super().asyncSetUp() 12 | self.queue = types.RedisQueue(namespace="test_queue") 13 | 14 | async def test_put_get_no_wait(self): 15 | """Test the .put and .get method without waiting, open connections.""" 16 | values_in = ("hello", 100, 1.1, True) 17 | 18 | for value_in in values_in: 19 | with self.subTest("single value put/get test", value=repr(value_in)): 20 | self.assertTrue(await self.queue.empty()) 21 | 22 | await self.queue.put(value_in) 23 | value_out = await self.queue.get(wait=False) 24 | self.assertEqual(value_in, value_out) 25 | 26 | with self.subTest("RedisQueue works on FIFO principle"): 27 | self.assertTrue(await self.queue.empty()) 28 | for value_in in values_in: 29 | await self.queue.put(value_in) 30 | 31 | values_out = [await self.queue.get(wait=False) for _ in range(len(values_in))] 32 | self.assertSequenceEqual(values_in, values_out) 33 | 34 | with self.subTest("complex sequence FIFO test"): 35 | self.assertTrue(await self.queue.empty()) 36 | 37 | values_in = ("one", "two", "three", "four", "five") 38 | iter_values = iter(values_in) 39 | values_out = [] 40 | 41 | # Five items, five items out, intermixed 42 | await self.queue.put(next(iter_values)) 43 | await self.queue.put(next(iter_values)) 44 | values_out.append(await self.queue.get(wait=False)) 45 | await self.queue.put(next(iter_values)) 46 | values_out.append(await self.queue.get(wait=False)) 47 | await self.queue.put(next(iter_values)) 48 | values_out.append(await self.queue.get(wait=False)) 49 | values_out.append(await self.queue.get(wait=False)) 50 | await self.queue.put(next(iter_values)) 51 | values_out.append(await self.queue.get(wait=False)) 52 | 53 | self.assertSequenceEqual(values_in, values_out) 54 | 55 | with self.subTest("test get_nowait partial method"): 56 | self.assertTrue(await self.queue.empty()) 57 | 58 | value_in = "test" 59 | await self.queue.put_nowait(value_in) 60 | value_out = await self.queue.get_nowait() 61 | 62 | self.assertEqual(value_in, value_out) 63 | 64 | async def test_get_wait(self): 65 | """Test if get is able to wait for an item to become available.""" 66 | with self.subTest("get with no timeout"): 67 | pending_get = asyncio.create_task(self.queue.get()) 68 | self.assertFalse(pending_get.done()) 69 | 70 | value_in = "spam" 71 | await self.queue.put(value_in) 72 | 73 | value_out = await pending_get 74 | self.assertEqual(value_in, value_out) 75 | self.assertTrue(pending_get.done()) 76 | 77 | with self.subTest("get with timeout"): 78 | pending_get = asyncio.create_task(self.queue.get(timeout=1)) 79 | value_out = await pending_get 80 | self.assertIsNone(value_out) 81 | 82 | async def test_qzise_and_empty(self): 83 | """Test if .qsize and .empty correctly reflect the size of the queue.""" 84 | self.assertEqual(await self.queue.qsize(), 0) 85 | self.assertTrue(await self.queue.empty()) 86 | 87 | await self.queue.put("one") 88 | await self.queue.put("two") 89 | self.assertEqual(await self.queue.qsize(), 2) 90 | self.assertFalse(await self.queue.empty()) 91 | 92 | await self.queue.get() 93 | self.assertEqual(await self.queue.qsize(), 1) 94 | self.assertFalse(await self.queue.empty()) 95 | 96 | await self.queue.get() 97 | self.assertEqual(await self.queue.qsize(), 0) 98 | self.assertTrue(await self.queue.empty()) 99 | 100 | 101 | class RedisTaskQueueTests(BaseRedisObjectTests): 102 | """Tests for teh RedisTaskQueue class.""" 103 | 104 | async def asyncSetUp(self): 105 | await super().asyncSetUp() 106 | self.queue = types.RedisTaskQueue(namespace="task_queue") 107 | 108 | async def test_get_also_pushes_task_to_pending(self): 109 | """Test if the `.get` also pushes the task to the pending tasks queue.""" 110 | test_value = "Python Discord" 111 | await self.queue.put(test_value) 112 | task = await self.queue.get() 113 | 114 | queue_length = await self.mock_session.client.llen(self.queue.namespace) 115 | pending_length = await self.mock_session.client.llen(self.queue.namespace_pending) 116 | 117 | self.assertEqual(task.value, test_value) 118 | self.assertEqual(queue_length, 0) 119 | self.assertEqual(pending_length, 1) 120 | 121 | async def test_task_finalize_correctly_finalizes_task(self): 122 | """`task.finalize` should remove the task from the pending queue and marks it as done.`""" 123 | test_value = "Python Discord" 124 | await self.queue.put(test_value) 125 | self.assertEqual(await self.queue.qsize(), 1) 126 | 127 | task = await self.queue.get() 128 | self.assertEqual(await self.queue.qsize(), 0) 129 | self.assertEqual(task.value, test_value) 130 | 131 | pending_length = await self.mock_session.client.llen(self.queue.namespace_pending) 132 | 133 | self.assertEqual(pending_length, 1) 134 | 135 | await task.finalize() 136 | 137 | pending_length = await self.mock_session.client.llen(self.queue.namespace_pending) 138 | 139 | self.assertEqual(pending_length, 0) 140 | self.assertTrue(task.done) 141 | 142 | async def test_reschedule_task_puts_task_back_on_queue(self) -> None: 143 | """Rescheduling a task should put it back on the queue.""" 144 | test_value = "Python Discord" 145 | await self.queue.put(test_value) 146 | self.assertEqual(await self.queue.qsize(), 1) 147 | 148 | task = await self.queue.get() 149 | self.assertEqual(await self.queue.qsize(), 0) 150 | self.assertEqual(task.value, test_value) 151 | 152 | pending_length = await self.mock_session.client.llen(self.queue.namespace_pending) 153 | self.assertEqual(pending_length, 1) 154 | 155 | await self.queue.reschedule_pending_task(task) 156 | 157 | self.assertEqual(await self.queue.qsize(), 1) 158 | pending_length = await self.mock_session.client.llen(self.queue.namespace_pending) 159 | self.assertEqual(pending_length, 0) 160 | 161 | async def test_reschedule_all_tasks_should_requeue_all_tasks(self) -> None: 162 | """Reschedule all tasks should reschedule all tasks in the original order.""" 163 | task_values = [f"task-{ident}" for ident in range(10)] 164 | 165 | for task_value in task_values: 166 | await self.queue.put(task_value) 167 | 168 | self.assertEqual(await self.queue.qsize(), len(task_values)) 169 | 170 | task_values_iter = iter(task_values) 171 | async for task in self.queue: 172 | self.assertEqual(next(task_values_iter), task.value) 173 | 174 | self.assertEqual(await self.queue.qsize(), 0) 175 | pending_length = await self.mock_session.client.llen(self.queue.namespace_pending) 176 | self.assertEqual(pending_length, 10) 177 | 178 | await self.queue.reschedule_all_pending_client_tasks() 179 | 180 | self.assertEqual(await self.queue.qsize(), 10) 181 | pending_length = await self.mock_session.client.llen(self.queue.namespace_pending) 182 | self.assertEqual(pending_length, 0) 183 | 184 | async def test_task_raises_TaskAlreadyDone_for_second_finalize(self) -> None: 185 | """Should raise TaskAlreadyDone for finalizing a task that is already done.""" 186 | await self.queue.put("Hello") 187 | task = await self.queue.get() 188 | await task.finalize() 189 | 190 | with self.assertRaises(types.TaskAlreadyDone): 191 | await task.finalize() 192 | 193 | async def test_task_queue_raises_TaskNotPending_for_finalizing_nonpending_task(self) -> None: 194 | """Should raise TaskNotPending for finalizing a task not found in pending queue.""" 195 | await self.queue.put("Hello") 196 | task = await self.queue.get() 197 | await task.reschedule() 198 | 199 | with self.assertRaises(types.TaskNotPending): 200 | await task.finalize() 201 | 202 | async def test_reschedule_task_by_value(self) -> None: 203 | """We should also be able to reschedule a task by its value.""" 204 | value = "Some interesting value" 205 | await self.queue.put(value) 206 | self.assertEqual((await self.queue.get()).value, value) 207 | await self.queue.reschedule_pending_task(value) 208 | 209 | self.assertEqual(await self.queue.qsize(), 1) 210 | pending_length = await self.mock_session.client.llen(self.queue.namespace_pending) 211 | self.assertEqual(pending_length, 0) 212 | 213 | async def test_rescheduling_unknown_task_raises_TaskNotPending(self) -> None: 214 | """Rescheduling an unknown task raises TaskNotPending.""" 215 | with self.assertRaises(types.TaskNotPending): 216 | await self.queue.reschedule_pending_task("non-existent task") 217 | 218 | async def test_task_raises_RuntimeError_for_missing_owner_queue(self): 219 | """If the owner queue instance no longer exists, a task should raise a RuntimeError.""" 220 | await self.queue.put("Hello") 221 | task = await self.queue.get() 222 | del self.queue 223 | with self.assertRaises(RuntimeError): 224 | _owner = task.owner # noqa: F841 225 | 226 | 227 | class RedisTaskQueueMultipleClientsTests(BaseRedisObjectTests): 228 | """Tests for teh RedisTaskQueue class.""" 229 | 230 | async def asyncSetUp(self): 231 | await super().asyncSetUp() 232 | self.queue_one = types.RedisTaskQueue(namespace="task_queue", client_identifier="one") 233 | self.queue_two = types.RedisTaskQueue(namespace="task_queue", client_identifier="two") 234 | 235 | async def test_client_pending_queues_are_independent(self) -> None: 236 | """The pending queues of different clients should be independent.""" 237 | values = [f"task-{i}" for i in range(6)] 238 | 239 | for value_one, value_two in zip(*[iter(values)]*2): 240 | await self.queue_one.put(value_one) 241 | await self.queue_two.put(value_two) 242 | 243 | # The central queue should be the same 244 | self.assertEqual(await self.queue_one.qsize(), len(values)) 245 | self.assertEqual(await self.queue_one.qsize(), await self.queue_two.qsize()) 246 | 247 | for value_one, value_two in zip(*[iter(values)]*2): 248 | self.assertEqual((await self.queue_one.get()).value, value_one) 249 | self.assertEqual((await self.queue_two.get()).value, value_two) 250 | 251 | # The central queue should now be depleted 252 | self.assertEqual(await self.queue_one.qsize(), 0) 253 | self.assertEqual(await self.queue_one.qsize(), await self.queue_two.qsize()) 254 | 255 | pending_one_length = await self.mock_session.client.llen(self.queue_one.namespace_pending) 256 | pending_two_length = await self.mock_session.client.llen(self.queue_two.namespace_pending) 257 | 258 | self.assertEqual(pending_one_length, len(values) // 2) 259 | self.assertEqual(pending_two_length, len(values) // 2) 260 | 261 | await self.queue_one.reschedule_all_pending_client_tasks() 262 | 263 | pending_one_length = await self.mock_session.client.llen(self.queue_one.namespace_pending) 264 | pending_two_length = await self.mock_session.client.llen(self.queue_two.namespace_pending) 265 | 266 | self.assertEqual(pending_one_length, 0) 267 | self.assertEqual(pending_two_length, len(values) // 2) 268 | 269 | # Check if the values of queue_one have made it back on the main queue 270 | self.assertEqual(await self.queue_one.qsize(), len(values) // 2) 271 | 272 | await self.queue_two.reschedule_all_pending_client_tasks() 273 | 274 | pending_one_length = await self.mock_session.client.llen(self.queue_one.namespace_pending) 275 | pending_two_length = await self.mock_session.client.llen(self.queue_two.namespace_pending) 276 | 277 | self.assertEqual(pending_one_length, 0) 278 | self.assertEqual(pending_two_length, 0) 279 | 280 | # Check if the values of queue_two have made it back on the main queue 281 | self.assertEqual(await self.queue_one.qsize(), len(values)) 282 | -------------------------------------------------------------------------------- /Pipfile.lock: -------------------------------------------------------------------------------- 1 | { 2 | "_meta": { 3 | "hash": { 4 | "sha256": "eb0e10631448a0f79c20d7fe068c0f3cac9132ab4e45dc9f643adb4c29b591dc" 5 | }, 6 | "pipfile-spec": 6, 7 | "requires": { 8 | "python_version": "3.8" 9 | }, 10 | "sources": [ 11 | { 12 | "name": "pypi", 13 | "url": "https://pypi.org/simple", 14 | "verify_ssl": true 15 | } 16 | ] 17 | }, 18 | "default": { 19 | "async-timeout": { 20 | "hashes": [ 21 | "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f", 22 | "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028" 23 | ], 24 | "markers": "python_full_version <= '3.11.2'", 25 | "version": "==4.0.3" 26 | }, 27 | "fakeredis": { 28 | "extras": [ 29 | "lua" 30 | ], 31 | "hashes": [ 32 | "sha256:69987928d719d1ae1665ae8ebb16199d22a5ebae0b7d0d0d6586fc3a1a67428c", 33 | "sha256:c9baf3c7fd2ebf40db50db4c642c7c76b712b1eed25d91efcc175bba9bc40ca3" 34 | ], 35 | "index": "pypi", 36 | "markers": "python_version >= '3.7' and python_version < '4.0'", 37 | "version": "==2.20.0" 38 | }, 39 | "lupa": { 40 | "hashes": [ 41 | "sha256:0068d75f0df5f2fb85230b1df7a05305645ee28ef89551997eb09009c70d7f8a", 42 | "sha256:019e10a56c50ba60e94ff8c3e60a9a239d6438f1dc6ac17bcf2d44d4ada8f171", 43 | "sha256:02a0e45ada08e5694ab3f3c06523ec16322dfb875668ce9ff3e04a01d3e18e81", 44 | "sha256:02ed2848a33dfe43013c5a86d2c155a9669d3c438a847a4e3816b7f1bf17cec6", 45 | "sha256:033a14fe291ef532db11c3f3b65b364b5b3b3d3b6146aa7f7412f8f4d89471ce", 46 | "sha256:0432ec532513eaf5ae8961000baf56d550fed4a7b91c0a9759b6f17c1dafc8af", 47 | "sha256:06792b86f9410bd26936728e7f903e2eee76642cbf51e435622637a3d752a2ea", 48 | "sha256:0e66da3bc40cde8edeb4d7d8141afad67ec6a5da0ee07ce5265df7e899e0883c", 49 | "sha256:17fd814523b9fa268df8f0995874218a9be008dbcd1c1c7bd28207814a209491", 50 | "sha256:1be2e1015d8481511852ae0f9f05f3722715d7aadb48207480eb50edc45a7510", 51 | "sha256:200544d259a054c5d0c6696499d0c66ccd924d42efb41b09b19c2af9771f5c31", 52 | "sha256:201fc894d257132e90e42ce9396c5b45aa5f5bdc4cd4dfc8076c8476f04dd44b", 53 | "sha256:282126096ba71c1926f28da59cd1cf6913b7e9e7020d577b42dc52ca3c359e93", 54 | "sha256:29c46d79273a72c010a2949d41336bbb5ebafd09e2c2a4342d2f2f4238d378c8", 55 | "sha256:2a3dbf85baf66f0a8b862293c3cd61430d2d379652e3db3e5f979b16db7e374b", 56 | "sha256:2c11eafd262ff47ccb0bf9c28126dde21d3d01205cf6f5b5c2c4dbf04b99f5e9", 57 | "sha256:2d02d4af2682169b8aa744e7eae59d1e05f9b0071a59fb140852dae9b5c8d86c", 58 | "sha256:32d1e7cdced4e29771dacfed68abc92da9ba2300a2929ec5782467316ea4a715", 59 | "sha256:345032ef77bd474d288ea2c4ddd14b552b93d60a40a9b0daf0a82bc078625982", 60 | "sha256:3b3e02b920b61601e2d9713b1e197d8cbab0bd3709774ec6823357cd83ee7b9d", 61 | "sha256:3c953b9430751e792b721dd2265af1759251cdac0ade5642f25e16a6174bcc58", 62 | "sha256:3d34870912bf7501d2a9e7dc75319e55f836fd8412b783afa44c5bfb72be0867", 63 | "sha256:404bda126a34eef839e29fc94fd65c1092b53301b2d0abc9388f02cc5ba87ac9", 64 | "sha256:43353ae1e204b1f7fb18150f7dc5357592be37431e84f799c6cf21a4b7a52dcc", 65 | "sha256:4649a5501f0d8e5c96c297896377e9f73d0167df139109536187c57c60be1e90", 66 | "sha256:46b77e4a545d5ba00d17432853b26b50299129047d4f999c007fb9b6db3cfdd6", 67 | "sha256:47d3eb18511e83068a8ce476a9f7ad8642a35189e682f5a1053970ec9d98272a", 68 | "sha256:4c776290a06b03e8dd5ca061d9fefde13be37fb25700c56bb513343262ea1729", 69 | "sha256:4e00664780836b353113804f8e0f860322abf5ef723d615ba6f49d9e78874944", 70 | "sha256:50c529e5ecf3ec5b3e57efbb9a5def5125ceb7b95f12e2c89c34535856abb1ac", 71 | "sha256:5396ebb51753a8243a18080e2efa9f085bac5d43185d5a1dd9a3679ff7fb09c5", 72 | "sha256:5c249d83655942ebe7db99c4e981de547867a7d30ace34e61f3ccc5b7a14402c", 73 | "sha256:5e980571081c93152bb04de07bbde6852462e1674349eb3eafe703f5fa81a836", 74 | "sha256:65d5971eb8c060eb3c9218c25181001e25982dfdf88e0b284447f837a4318a5f", 75 | "sha256:682860cd6ed84e0ffdaf84c82c21b192858261964b3ed126bc54d52cc8a480b4", 76 | "sha256:690c0654b92c6de0893c004d0a46d5d5b5fd76e9017dda328a2435afdf3c55a0", 77 | "sha256:6e9ece8e7e4399473e1f9a4733445d93148c3205e1b87c158894287f3213bf6b", 78 | "sha256:71e517327bff75cc5e60fe105da7da6621a75ba05a5050869e33b4bdbe838288", 79 | "sha256:7563c4a015f51eb36d92874c0448bb8df504041d894e61e6c9cb9e6613132470", 80 | "sha256:769d7747056380ca4fb7923b7031b5732c1b9b9d0d160324cc88a32d7c98127c", 81 | "sha256:7762c6780fe7ab64d64f8658ab54d79cb5d3d0fbdcc76290f5fc19b41fc01ad5", 82 | "sha256:793bddad1a36eb7c8c04775867942cf2adfe09d482311791022c4ab4802169b4", 83 | "sha256:7c10d4f0fa592b798a71c0b2e273e4b899a14b3634a48cbc444917b254ddce37", 84 | "sha256:7caa1ce59fe1cefd845093d1354244c59d286fcc1196a15297fb189a5bb749c6", 85 | "sha256:8214a8b0fb1277e026301f60101af323c93868eefcad69984e7285bea5c1ac3f", 86 | "sha256:88495333e79937cdf7edac35ec36aca41d50134dbb23f2f1684a1685a4295433", 87 | "sha256:8f3e6ea86053ec0c9945ae313fba8ba06dc4ccc397369709bba956dd48db95a7", 88 | "sha256:90788d250f727720747784e67fbc50917f5ce051e24bc49661850f98b1b9ed42", 89 | "sha256:974de113c63e35668fbbbff656fef718e586abed3fc875eae4fece279a1e8a11", 90 | "sha256:9a5843fbfb22b70ea13ec624d43c818b396ff1f62d9bd84f9ed10e3fef06ccf0", 91 | "sha256:9add3d9ba86fa2fb5604e429ca811b9fa6b4c55fe5330bd9f0fcf51f2c5bebf8", 92 | "sha256:9b7c9799a45e6fff8c38395d370b318b8ce6841710c2082f180ea7d189f7d229", 93 | "sha256:9c7ec361e05d932c5355825982613077ac8cb5b63d95022d571290d8ca667188", 94 | "sha256:9fa9d5013a06aa09392f1d02d9724a9856f4f4111794ca9be17a016c83c6546a", 95 | "sha256:a6847c2541f9cbdd596df821a575222f471175cd710fb967ffc51801dae58d68", 96 | "sha256:a91eacc06ac89a2134c6b0f35ac65c45e18c984baf24b03d0f5187071074a597", 97 | "sha256:a97e647ac11ca5131a73628ee063233378c03100f0f408c77f9b45cb358619ab", 98 | "sha256:ab2ca1c51724b779a2531d2bef1480faae203c8917b9cc3d0a3d3acb37c1d7ad", 99 | "sha256:ad3fef486be7adddd349fe9a9c393789061312cf98ebc533b489be34f484cb79", 100 | "sha256:b0d5481e3af166d73da373ffda0eab1bd709b0177daa2616ce95816483942c21", 101 | "sha256:b3f6837c1e2fd7c66100828953063dfe8a1d283bc48e1144d621b35bf19ce79f", 102 | "sha256:becb01602dc6d5439101e1ac5877b25e35817b1bd131b9af709a5a181e6b8026", 103 | "sha256:c0be42065ad39219eaf890c224cc7cc140ed72691b97b0905dd7a89abebdf474", 104 | "sha256:c19482a595deed90e5b8542df1ed861e2a4a9d99bd8a9ff108e3a7c66bc7c6c0", 105 | "sha256:d225e06748aca078a02529054c6678ba3e5b7cc2080b5be30e33ede9eac5efb2", 106 | "sha256:d412925a73b6b848fd1076fbc392d445ff4a1ab5b5bb278e358f78768677c963", 107 | "sha256:d85c20691dbd2db5b7c60f40e4a5ced6a35be60264a81dc08804483917b41ea9", 108 | "sha256:dd9af8e86b3c811ce74f11a12f275c873bd38f40de6ce76b7ddc3664e113a98e", 109 | "sha256:dea916b28ee38c904ece3a26986b6943a073666c038ae6b6d6d131668da20f59", 110 | "sha256:e051969dc712d7050d0f3d6c6c8ed063941a004381e84f072815350476118f81", 111 | "sha256:e361efe6c8a667fa221d42b7fa2beb7fada86e901a0f0e1e17c7c7927d66b2ff", 112 | "sha256:eece0bc316c2b050e8c3596320e124c8ccea2a7872e593193d30eecab7f0acf6", 113 | "sha256:f04c7a8d4e5b50a570681b990ff3be09bce5efbd91a521442c0ebfc36e0ce422", 114 | "sha256:f3f962a499f95b3a5e90de36ac396cdb59c0c46b8003fbfcc1e2d78d7edc14f8", 115 | "sha256:f50a395dc3c950974ac73b2476136785c6995f611a81e14d2a7c6aa59b342abf", 116 | "sha256:f576699ca59f3f76127d70210a0ba20e7def93ab1a7e3587d55dd4b770775788", 117 | "sha256:f7c1cfa9dac4f1363d9620384f9881a1ec968ff825be1e9b2ecdb4cb5375fbf2", 118 | "sha256:f8368f0d5131f47da60f7cea4a5932418ca0bcd12c22fcf700f36af93fdf2a6a", 119 | "sha256:fb4426cddefb48683068e94ed4748710507bbd3f0a4d71574535443c75a16e36", 120 | "sha256:fb5efacbb5dd568d44f4f31a4764a52eefb78288f0445da016652fe7143cdde3", 121 | "sha256:fcedc43012527edb4ca2b97a6c8176dd2384a006e47549d4e73143f7982deaff" 122 | ], 123 | "version": "==2.0" 124 | }, 125 | "redis": { 126 | "hashes": [ 127 | "sha256:0dab495cd5753069d3bc650a0dde8a8f9edde16fc5691b689a566eda58100d0f", 128 | "sha256:ed4802971884ae19d640775ba3b03aa2e7bd5e8fb8dfaed2decce4d0fc48391f" 129 | ], 130 | "index": "pypi", 131 | "version": "==5.0.1" 132 | }, 133 | "sortedcontainers": { 134 | "hashes": [ 135 | "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88", 136 | "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0" 137 | ], 138 | "version": "==2.4.0" 139 | } 140 | }, 141 | "develop": { 142 | "attrs": { 143 | "hashes": [ 144 | "sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04", 145 | "sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015" 146 | ], 147 | "markers": "python_version >= '3.7'", 148 | "version": "==23.1.0" 149 | }, 150 | "certifi": { 151 | "hashes": [ 152 | "sha256:9b469f3a900bf28dc19b8cfbf8019bf47f7fdd1a65a1d4ffb98fc14166beb4d1", 153 | "sha256:e036ab49d5b79556f99cfc2d9320b34cfbe5be05c5871b51de9329f0603b0474" 154 | ], 155 | "markers": "python_version >= '3.6'", 156 | "version": "==2023.11.17" 157 | }, 158 | "charset-normalizer": { 159 | "hashes": [ 160 | "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027", 161 | "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087", 162 | "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786", 163 | "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8", 164 | "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09", 165 | "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185", 166 | "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574", 167 | "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e", 168 | "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519", 169 | "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898", 170 | "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269", 171 | "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3", 172 | "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f", 173 | "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6", 174 | "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8", 175 | "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a", 176 | "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73", 177 | "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc", 178 | "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714", 179 | "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2", 180 | "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc", 181 | "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce", 182 | "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d", 183 | "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e", 184 | "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6", 185 | "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269", 186 | "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96", 187 | "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d", 188 | "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a", 189 | "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4", 190 | "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77", 191 | "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d", 192 | "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0", 193 | "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed", 194 | "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068", 195 | "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac", 196 | "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25", 197 | "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8", 198 | "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab", 199 | "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26", 200 | "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2", 201 | "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db", 202 | "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f", 203 | "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5", 204 | "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99", 205 | "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c", 206 | "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d", 207 | "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811", 208 | "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa", 209 | "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a", 210 | "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03", 211 | "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b", 212 | "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04", 213 | "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c", 214 | "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001", 215 | "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458", 216 | "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389", 217 | "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99", 218 | "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985", 219 | "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537", 220 | "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238", 221 | "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f", 222 | "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d", 223 | "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796", 224 | "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a", 225 | "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143", 226 | "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8", 227 | "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c", 228 | "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5", 229 | "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5", 230 | "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711", 231 | "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4", 232 | "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6", 233 | "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c", 234 | "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7", 235 | "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4", 236 | "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b", 237 | "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae", 238 | "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12", 239 | "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c", 240 | "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae", 241 | "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8", 242 | "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887", 243 | "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b", 244 | "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4", 245 | "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f", 246 | "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5", 247 | "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33", 248 | "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519", 249 | "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561" 250 | ], 251 | "markers": "python_version >= '3.7'", 252 | "version": "==3.3.2" 253 | }, 254 | "coverage": { 255 | "extras": [ 256 | "toml" 257 | ], 258 | "hashes": [ 259 | "sha256:027018943386e7b942fa832372ebc120155fd970837489896099f5cfa2890f79", 260 | "sha256:11b990d520ea75e7ee8dcab5bc908072aaada194a794db9f6d7d5cfd19661e5a", 261 | "sha256:12adf310e4aafddc58afdb04d686795f33f4d7a6fa67a7a9d4ce7d6ae24d949f", 262 | "sha256:1431986dac3923c5945271f169f59c45b8802a114c8f548d611f2015133df77a", 263 | "sha256:1ef221513e6f68b69ee9e159506d583d31aa3567e0ae84eaad9d6ec1107dddaa", 264 | "sha256:20c8ac5386253717e5ccc827caad43ed66fea0efe255727b1053a8154d952398", 265 | "sha256:2198ea6fc548de52adc826f62cb18554caedfb1d26548c1b7c88d8f7faa8f6ba", 266 | "sha256:255758a1e3b61db372ec2736c8e2a1fdfaf563977eedbdf131de003ca5779b7d", 267 | "sha256:265de0fa6778d07de30bcf4d9dc471c3dc4314a23a3c6603d356a3c9abc2dfcf", 268 | "sha256:33a7da4376d5977fbf0a8ed91c4dffaaa8dbf0ddbf4c8eea500a2486d8bc4d7b", 269 | "sha256:42eafe6778551cf006a7c43153af1211c3aaab658d4d66fa5fcc021613d02518", 270 | "sha256:4433b90fae13f86fafff0b326453dd42fc9a639a0d9e4eec4d366436d1a41b6d", 271 | "sha256:4a5375e28c5191ac38cca59b38edd33ef4cc914732c916f2929029b4bfb50795", 272 | "sha256:4a8dbc1f0fbb2ae3de73eb0bdbb914180c7abfbf258e90b311dcd4f585d44bd2", 273 | "sha256:59f53f1dc5b656cafb1badd0feb428c1e7bc19b867479ff72f7a9dd9b479f10e", 274 | "sha256:5dbec3b9095749390c09ab7c89d314727f18800060d8d24e87f01fb9cfb40b32", 275 | "sha256:633713d70ad6bfc49b34ead4060531658dc6dfc9b3eb7d8a716d5873377ab745", 276 | "sha256:6b07130585d54fe8dff3d97b93b0e20290de974dc8177c320aeaf23459219c0b", 277 | "sha256:6c4459b3de97b75e3bd6b7d4b7f0db13f17f504f3d13e2a7c623786289dd670e", 278 | "sha256:6d4817234349a80dbf03640cec6109cd90cba068330703fa65ddf56b60223a6d", 279 | "sha256:723e8130d4ecc8f56e9a611e73b31219595baa3bb252d539206f7bbbab6ffc1f", 280 | "sha256:784f53ebc9f3fd0e2a3f6a78b2be1bd1f5575d7863e10c6e12504f240fd06660", 281 | "sha256:7b6be138d61e458e18d8e6ddcddd36dd96215edfe5f1168de0b1b32635839b62", 282 | "sha256:7ccf362abd726b0410bf8911c31fbf97f09f8f1061f8c1cf03dfc4b6372848f6", 283 | "sha256:83516205e254a0cb77d2d7bb3632ee019d93d9f4005de31dca0a8c3667d5bc04", 284 | "sha256:851cf4ff24062c6aec510a454b2584f6e998cada52d4cb58c5e233d07172e50c", 285 | "sha256:8f830ed581b45b82451a40faabb89c84e1a998124ee4212d440e9c6cf70083e5", 286 | "sha256:94e2565443291bd778421856bc975d351738963071e9b8839ca1fc08b42d4bef", 287 | "sha256:95203854f974e07af96358c0b261f1048d8e1083f2de9b1c565e1be4a3a48cfc", 288 | "sha256:97117225cdd992a9c2a5515db1f66b59db634f59d0679ca1fa3fe8da32749cae", 289 | "sha256:98e8a10b7a314f454d9eff4216a9a94d143a7ee65018dd12442e898ee2310578", 290 | "sha256:a1170fa54185845505fbfa672f1c1ab175446c887cce8212c44149581cf2d466", 291 | "sha256:a6b7d95969b8845250586f269e81e5dfdd8ff828ddeb8567a4a2eaa7313460c4", 292 | "sha256:a8fb6cf131ac4070c9c5a3e21de0f7dc5a0fbe8bc77c9456ced896c12fcdad91", 293 | "sha256:af4fffaffc4067232253715065e30c5a7ec6faac36f8fc8d6f64263b15f74db0", 294 | "sha256:b4a5be1748d538a710f87542f22c2cad22f80545a847ad91ce45e77417293eb4", 295 | "sha256:b5604380f3415ba69de87a289a2b56687faa4fe04dbee0754bfcae433489316b", 296 | "sha256:b9023e237f4c02ff739581ef35969c3739445fb059b060ca51771e69101efffe", 297 | "sha256:bc8ef5e043a2af066fa8cbfc6e708d58017024dc4345a1f9757b329a249f041b", 298 | "sha256:c4ed2820d919351f4167e52425e096af41bfabacb1857186c1ea32ff9983ed75", 299 | "sha256:cca4435eebea7962a52bdb216dec27215d0df64cf27fc1dd538415f5d2b9da6b", 300 | "sha256:d900bb429fdfd7f511f868cedd03a6bbb142f3f9118c09b99ef8dc9bf9643c3c", 301 | "sha256:d9ecf0829c6a62b9b573c7bb6d4dcd6ba8b6f80be9ba4fc7ed50bf4ac9aecd72", 302 | "sha256:dbdb91cd8c048c2b09eb17713b0c12a54fbd587d79adcebad543bc0cd9a3410b", 303 | "sha256:de3001a203182842a4630e7b8d1a2c7c07ec1b45d3084a83d5d227a3806f530f", 304 | "sha256:e07f4a4a9b41583d6eabec04f8b68076ab3cd44c20bd29332c6572dda36f372e", 305 | "sha256:ef8674b0ee8cc11e2d574e3e2998aea5df5ab242e012286824ea3c6970580e53", 306 | "sha256:f4f05d88d9a80ad3cac6244d36dd89a3c00abc16371769f1340101d3cb899fc3", 307 | "sha256:f642e90754ee3e06b0e7e51bce3379590e76b7f76b708e1a71ff043f87025c84", 308 | "sha256:fc2af30ed0d5ae0b1abdb4ebdce598eafd5b35397d4d75deb341a614d333d987" 309 | ], 310 | "index": "pypi", 311 | "markers": "python_version >= '3.7'", 312 | "version": "==6.5.0" 313 | }, 314 | "coveralls": { 315 | "hashes": [ 316 | "sha256:b32a8bb5d2df585207c119d6c01567b81fba690c9c10a753bfe27a335bfc43ea", 317 | "sha256:f42015f31d386b351d4226389b387ae173207058832fbf5c8ec4b40e27b16026" 318 | ], 319 | "index": "pypi", 320 | "version": "==3.3.1" 321 | }, 322 | "docopt": { 323 | "hashes": [ 324 | "sha256:49b3a825280bd66b3aa83585ef59c4a8c82f2c8a522dbe754a8bc8d08c85c491" 325 | ], 326 | "version": "==0.6.2" 327 | }, 328 | "flake8": { 329 | "hashes": [ 330 | "sha256:d5b3857f07c030bdb5bf41c7f53799571d75c4491748a3adcd47de929e34cd23", 331 | "sha256:ffdfce58ea94c6580c77888a86506937f9a1a227dfcd15f245d694ae20a6b6e5" 332 | ], 333 | "index": "pypi", 334 | "version": "==6.1.0" 335 | }, 336 | "flake8-annotations": { 337 | "hashes": [ 338 | "sha256:af78e3216ad800d7e144745ece6df706c81b3255290cbf870e54879d495e8ade", 339 | "sha256:ff37375e71e3b83f2a5a04d443c41e2c407de557a884f3300a7fa32f3c41cb0a" 340 | ], 341 | "index": "pypi", 342 | "version": "==3.0.1" 343 | }, 344 | "flake8-bugbear": { 345 | "hashes": [ 346 | "sha256:90cf04b19ca02a682feb5aac67cae8de742af70538590509941ab10ae8351f71", 347 | "sha256:b182cf96ea8f7a8595b2f87321d7d9b28728f4d9c3318012d896543d19742cb5" 348 | ], 349 | "index": "pypi", 350 | "version": "==23.9.16" 351 | }, 352 | "flake8-docstrings": { 353 | "hashes": [ 354 | "sha256:4c8cc748dc16e6869728699e5d0d685da9a10b0ea718e090b1ba088e67a941af", 355 | "sha256:51f2344026da083fc084166a9353f5082b01f72901df422f74b4d953ae88ac75" 356 | ], 357 | "index": "pypi", 358 | "version": "==1.7.0" 359 | }, 360 | "flake8-import-order": { 361 | "hashes": [ 362 | "sha256:82ed59f1083b629b030ee9d3928d9e06b6213eb196fe745b3a7d4af2168130df", 363 | "sha256:e23941f892da3e0c09d711babbb0c73bc735242e9b216b726616758a920d900e" 364 | ], 365 | "index": "pypi", 366 | "version": "==0.18.2" 367 | }, 368 | "flake8-string-format": { 369 | "hashes": [ 370 | "sha256:65f3da786a1461ef77fca3780b314edb2853c377f2e35069723348c8917deaa2", 371 | "sha256:812ff431f10576a74c89be4e85b8e075a705be39bc40c4b4278b5b13e2afa9af" 372 | ], 373 | "index": "pypi", 374 | "version": "==0.3.0" 375 | }, 376 | "flake8-tidy-imports": { 377 | "hashes": [ 378 | "sha256:b0387fb2ea200441bd142309e716fb7b8f4b0937bdf5f8b7c0c118a5f5e2b8ed", 379 | "sha256:bd6cf86465402d2b86903009b748d85a628e599e17b76e810c9857e3a2815173" 380 | ], 381 | "index": "pypi", 382 | "version": "==4.10.0" 383 | }, 384 | "flake8-todo": { 385 | "hashes": [ 386 | "sha256:6e4c5491ff838c06fe5a771b0e95ee15fc005ca57196011011280fc834a85915" 387 | ], 388 | "index": "pypi", 389 | "version": "==0.7" 390 | }, 391 | "idna": { 392 | "hashes": [ 393 | "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca", 394 | "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f" 395 | ], 396 | "markers": "python_version >= '3.5'", 397 | "version": "==3.6" 398 | }, 399 | "mccabe": { 400 | "hashes": [ 401 | "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325", 402 | "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e" 403 | ], 404 | "markers": "python_version >= '3.6'", 405 | "version": "==0.7.0" 406 | }, 407 | "pep8-naming": { 408 | "hashes": [ 409 | "sha256:1705f046dfcd851378aac3be1cd1551c7c1e5ff363bacad707d43007877fa971", 410 | "sha256:1a86b8c71a03337c97181917e2b472f0f5e4ccb06844a0d6f0a33522549e7a80" 411 | ], 412 | "index": "pypi", 413 | "version": "==0.13.3" 414 | }, 415 | "pycodestyle": { 416 | "hashes": [ 417 | "sha256:41ba0e7afc9752dfb53ced5489e89f8186be00e599e712660695b7a75ff2663f", 418 | "sha256:44fe31000b2d866f2e41841b18528a505fbd7fef9017b04eff4e2648a0fadc67" 419 | ], 420 | "markers": "python_version >= '3.8'", 421 | "version": "==2.11.1" 422 | }, 423 | "pydocstyle": { 424 | "hashes": [ 425 | "sha256:118762d452a49d6b05e194ef344a55822987a462831ade91ec5c06fd2169d019", 426 | "sha256:7ce43f0c0ac87b07494eb9c0b462c0b73e6ff276807f204d6b53edc72b7e44e1" 427 | ], 428 | "markers": "python_version >= '3.6'", 429 | "version": "==6.3.0" 430 | }, 431 | "pyflakes": { 432 | "hashes": [ 433 | "sha256:4132f6d49cb4dae6819e5379898f2b8cce3c5f23994194c24b77d5da2e36f774", 434 | "sha256:a0aae034c444db0071aa077972ba4768d40c830d9539fd45bf4cd3f8f6992efc" 435 | ], 436 | "markers": "python_version >= '3.8'", 437 | "version": "==3.1.0" 438 | }, 439 | "python-dateutil": { 440 | "hashes": [ 441 | "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86", 442 | "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9" 443 | ], 444 | "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", 445 | "version": "==2.8.2" 446 | }, 447 | "requests": { 448 | "hashes": [ 449 | "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f", 450 | "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1" 451 | ], 452 | "markers": "python_version >= '3.7'", 453 | "version": "==2.31.0" 454 | }, 455 | "setuptools": { 456 | "hashes": [ 457 | "sha256:1e8fdff6797d3865f37397be788a4e3cba233608e9b509382a2777d25ebde7f2", 458 | "sha256:735896e78a4742605974de002ac60562d286fa8051a7e2299445e8e8fbb01aa6" 459 | ], 460 | "markers": "python_version >= '3.8'", 461 | "version": "==69.0.2" 462 | }, 463 | "six": { 464 | "hashes": [ 465 | "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926", 466 | "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254" 467 | ], 468 | "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", 469 | "version": "==1.16.0" 470 | }, 471 | "snowballstemmer": { 472 | "hashes": [ 473 | "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1", 474 | "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a" 475 | ], 476 | "version": "==2.2.0" 477 | }, 478 | "time-machine": { 479 | "hashes": [ 480 | "sha256:02b33a8c19768c94f7ffd6aa6f9f64818e88afce23250016b28583929d20fb12", 481 | "sha256:0c9829b2edfcf6b5d72a6ff330d4380f36a937088314c675531b43d3423dd8af", 482 | "sha256:0cc116056a8a2a917a4eec85661dfadd411e0d8faae604ef6a0e19fe5cd57ef1", 483 | "sha256:0db97f92be3efe0ac62fd3f933c91a78438cef13f283b6dfc2ee11123bfd7d8a", 484 | "sha256:12eed2e9171c85b703d75c985dab2ecad4fe7025b7d2f842596fce1576238ece", 485 | "sha256:1812e48c6c58707db9988445a219a908a710ea065b2cc808d9a50636291f27d4", 486 | "sha256:19a3b10161c91ca8e0fd79348665cca711fd2eac6ce336ff9e6b447783817f93", 487 | "sha256:1a22be4df364f49a507af4ac9ea38108a0105f39da3f9c60dce62d6c6ea4ccdc", 488 | "sha256:1ac8ff145c63cd0dcfd9590fe694b5269aacbc130298dc7209b095d101f8cdde", 489 | "sha256:20205422fcf2caf9a7488394587df86e5b54fdb315c1152094fbb63eec4e9304", 490 | "sha256:21bef5854d49b62e2c33848b5c3e8acf22a3b46af803ef6ff19529949cb7cf9f", 491 | "sha256:2bd4169b808745d219a69094b3cb86006938d45e7293249694e6b7366225a186", 492 | "sha256:2dc76ee55a7d915a55960a726ceaca7b9097f67e4b4e681ef89871bcf98f00be", 493 | "sha256:32b71e50b07f86916ac04bd1eefc2bd2c93706b81393748b08394509ee6585dc", 494 | "sha256:34dcdbbd25c1e124e17fe58050452960fd16a11f9d3476aaa87260e28ecca0fd", 495 | "sha256:3a7a0a49ce50d9c306c4343a7d6a3baa11092d4399a4af4355c615ccc321a9d3", 496 | "sha256:3c87856105dcb25b5bbff031d99f06ef4d1c8380d096222e1bc63b496b5258e6", 497 | "sha256:42ef5349135626ad6cd889a0a81400137e5c6928502b0817ea9e90bb10702000", 498 | "sha256:4ca20f85a973a4ca8b00cf466cd72c27ccc72372549b138fd48d7e70e5a190ab", 499 | "sha256:4e3a2611f8788608ebbcb060a5e36b45911bc3b8adc421b1dc29d2c81786ce4d", 500 | "sha256:4f2ae8d0e359b216b695f1e7e7256f208c390db0480601a439c5dd1e1e4e16ce", 501 | "sha256:5aee23cd046abf9caeddc982113e81ba9097a01f3972e9560f5ed64e3495f66d", 502 | "sha256:5c6245db573863b335d9ca64b3230f623caf0988594ae554c0c794e7f80e3e66", 503 | "sha256:5f87787d562e42bf1006a87eb689814105b98c4d5545874a281280d0f8b9a2d9", 504 | "sha256:5fe3fda5fa73fec74278912e438fce1612a79c36fd0cc323ea3dc2d5ce629f31", 505 | "sha256:62fd14a80b8b71726e07018628daaee0a2e00937625083f96f69ed6b8e3304c0", 506 | "sha256:66fb3877014dca0b9286b0f06fa74062357bd23f2d9d102d10e31e0f8fa9b324", 507 | "sha256:679cbf9b15bfde1654cf48124128d3fbe52f821fa158a98fcee5fe7e05db1917", 508 | "sha256:67fa45cd813821e4f5bec0ac0820869e8e37430b15509d3f5fad74ba34b53852", 509 | "sha256:685d98593f13649ad5e7ce3e58efe689feca1badcf618ba397d3ab877ee59326", 510 | "sha256:6c16d90a597a8c2d3ce22d6be2eb3e3f14786974c11b01886e51b3cf0d5edaf7", 511 | "sha256:71acbc1febbe87532c7355eca3308c073d6e502ee4ce272b5028967847c8e063", 512 | "sha256:7558622a62243be866a7e7c41da48eacd82c874b015ecf67d18ebf65ca3f7436", 513 | "sha256:7693704c0f2f6b9beed912ff609781edf5fcf5d63aff30c92be4093e09d94b8e", 514 | "sha256:88601de1da06c7cab3d5ed3d5c3801ef683366e769e829e96383fdab6ae2fe42", 515 | "sha256:8d526cdcaca06a496877cfe61cc6608df2c3a6fce210e076761964ebac7f77cc", 516 | "sha256:918f8389de29b4f41317d121f1150176fae2cdb5fa41f68b2aee0b9dc88df5c3", 517 | "sha256:924377d398b1c48e519ad86a71903f9f36117f69e68242c99fb762a2465f5ad2", 518 | "sha256:9f128db8997c3339f04f7f3946dd9bb2a83d15e0a40d35529774da1e9e501511", 519 | "sha256:9fad549521c4c13bdb1e889b2855a86ec835780d534ffd8f091c2647863243be", 520 | "sha256:a26bdf3462d5f12a4c1009fdbe54366c6ef22c7b6f6808705b51dedaaeba8296", 521 | "sha256:ab04cf4e56e1ee65bee2adaa26a04695e92eb1ed1ccc65fbdafd0d114399595a", 522 | "sha256:b0c8f24ae611a58782773af34dd356f1f26756272c04be2be7ea73b47e5da37d", 523 | "sha256:bdfe4a7f033e6783c3e9a7f8d8fc0b115367330762e00a03ff35fedf663994f3", 524 | "sha256:c23b2408e3adcedec84ea1131e238f0124a5bc0e491f60d1137ad7239b37c01a", 525 | "sha256:ccbce292380ebf63fb9a52e6b03d91677f6a003e0c11f77473efe3913a75f289", 526 | "sha256:cfef4ebfb4f055ce3ebc7b6c1c4d0dbfcffdca0e783ad8c6986c992915a57ed3", 527 | "sha256:d4a2d3db2c3b8e519d5ef436cd405abd33542a7b7761fb05ef5a5f782a8ce0b1", 528 | "sha256:dabb3b155819811b4602f7e9be936e2024e20dc99a90f103e36b45768badf9c3", 529 | "sha256:de01f33aa53da37530ad97dcd17e9affa25a8df4ab822506bb08101bab0c2673", 530 | "sha256:dec0ec2135a4e2a59623e40c31d6e8a8ae73305ade2634380e4263d815855750", 531 | "sha256:e433827eccd6700a34a2ab28fd9361ff6e4d4923f718d2d1dac6d1dcd9d54da6", 532 | "sha256:e58d82fe0e59d6e096ada3281d647a2e7420f7da5453b433b43880e1c2e8e0c5", 533 | "sha256:e9935aff447f5400a2665ab10ed2da972591713080e1befe1bb8954e7c0c7806", 534 | "sha256:e9a9d150e098be3daee5c9f10859ab1bd14a61abebaed86e6d71f7f18c05b9d7", 535 | "sha256:f5fa9610f7e73fff42806a2ed8b06d862aa59ce4d178a52181771d6939c3e237" 536 | ], 537 | "index": "pypi", 538 | "version": "==2.13.0" 539 | }, 540 | "tomli": { 541 | "hashes": [ 542 | "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc", 543 | "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f" 544 | ], 545 | "version": "==2.0.1" 546 | }, 547 | "urllib3": { 548 | "hashes": [ 549 | "sha256:55901e917a5896a349ff771be919f8bd99aff50b79fe58fec595eb37bbc56bb3", 550 | "sha256:df7aa8afb0148fa78488e7899b2c59b5f4ffcfa82e6c54ccb9dd37c1d7b52d54" 551 | ], 552 | "markers": "python_version >= '3.8'", 553 | "version": "==2.1.0" 554 | } 555 | } 556 | } 557 | --------------------------------------------------------------------------------