├── aiocache ├── py.typed ├── backends │ ├── __init__.py │ ├── memory.py │ ├── memcached.py │ └── valkey.py ├── exceptions.py ├── serializers │ ├── __init__.py │ └── serializers.py ├── __init__.py ├── plugins.py └── lock.py ├── tests ├── __init__.py ├── ut │ ├── __init__.py │ ├── backends │ │ ├── __init__.py │ │ └── test_valkey.py │ ├── test_exceptions.py │ ├── conftest.py │ ├── test_plugins.py │ ├── test_lock.py │ └── test_serializers.py ├── acceptance │ ├── __init__.py │ ├── conftest.py │ ├── test_plugins.py │ ├── test_serializers.py │ ├── test_decorators.py │ ├── test_base.py │ └── test_lock.py ├── performance │ ├── __init__.py │ ├── conftest.py │ ├── test_concurrency.py │ ├── server.py │ └── test_footprint.py ├── conftest.py └── utils.py ├── .codacy.yml ├── pyproject.toml ├── docs ├── images │ ├── architecture.png │ └── set_operation_flow.png ├── readthedocs.yml ├── testing.rst ├── decorators.rst ├── v1_migration.rst ├── locking.rst ├── plugins.rst ├── serializers.rst ├── caches.rst ├── index.rst ├── Makefile └── conf.py ├── .coveragerc ├── docker-compose.yml ├── requirements.txt ├── examples ├── run_all.sh ├── testing.py ├── simple_valkey.py ├── cached_decorator.py ├── frameworks │ ├── tornado_example.py │ ├── sanic_example.py │ └── aiohttp_example.py ├── python_object.py ├── redlock.py ├── multicached_decorator.py ├── serializer_function.py ├── optimistic_lock.py ├── plugins.py ├── marshmallow_serializer_class.py └── serializer_class.py ├── .codecov.yml ├── .github ├── dependabot.yml └── workflows │ ├── auto-merge.yml │ ├── codeql.yml │ └── ci.yml ├── MANIFEST.in ├── requirements-dev.txt ├── .release_notes.tpl ├── .pre-commit-config.yaml ├── .readthedocs.yml ├── setup.cfg ├── Makefile ├── CONTRIBUTING.rst ├── .flake8 ├── .mypy.ini ├── scripts └── make_release ├── setup.py ├── LICENSE ├── .gitignore ├── README.rst └── .gitchangelog.rc /aiocache/py.typed: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/ut/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/acceptance/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /aiocache/backends/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/performance/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/ut/backends/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /.codacy.yml: -------------------------------------------------------------------------------- 1 | exclude_paths: 2 | - tests/** 3 | - examples/** 4 | -------------------------------------------------------------------------------- /aiocache/exceptions.py: -------------------------------------------------------------------------------- 1 | class InvalidCacheType(Exception): 2 | pass 3 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.black] 2 | line-length = 99 3 | target-version = ['py38', 'py39', 'py310', 'py311'] 4 | -------------------------------------------------------------------------------- /docs/images/architecture.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aio-libs/aiocache/HEAD/docs/images/architecture.png -------------------------------------------------------------------------------- /docs/images/set_operation_flow.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aio-libs/aiocache/HEAD/docs/images/set_operation_flow.png -------------------------------------------------------------------------------- /tests/ut/test_exceptions.py: -------------------------------------------------------------------------------- 1 | from aiocache.exceptions import InvalidCacheType 2 | 3 | 4 | def test_inherit_from_exception(): 5 | assert isinstance(InvalidCacheType(), Exception) 6 | -------------------------------------------------------------------------------- /.coveragerc: -------------------------------------------------------------------------------- 1 | [run] 2 | branch = True 3 | source = aiocache, tests 4 | 5 | [report] 6 | exclude_also = 7 | if TYPE_CHECKING 8 | assert False 9 | : \.\.\.(\s*#.*)?$ 10 | ^ +\.\.\.$ 11 | -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: '2' 2 | services: 3 | valkey: 4 | image: valkey/valkey 5 | ports: 6 | - "6379:6379" 7 | memcached: 8 | image: memcached 9 | ports: 10 | - "11211:11211" 11 | -------------------------------------------------------------------------------- /docs/readthedocs.yml: -------------------------------------------------------------------------------- 1 | formats: 2 | - none 3 | 4 | build: 5 | image: latest 6 | 7 | python: 8 | version: 3.11 9 | pip_install: true 10 | extra_requirements: 11 | - redis 12 | - memcached 13 | - msgpack 14 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | -e . 2 | 3 | aiomcache==0.8.2 4 | aiohttp==3.13.2 5 | marshmallow==3.26.1 6 | msgpack==1.1.2 7 | pytest==8.4.2 8 | pytest-asyncio==1.2.0 9 | pytest-cov==7.0.0 10 | pytest-mock==3.15.1 11 | valkey-glide==2.0.1 12 | -------------------------------------------------------------------------------- /examples/run_all.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | pushd "$(dirname "$0")" 4 | 5 | for f in `find . -name '*.py' -not -path "./frameworks/*"`; do 6 | echo "########## Running $f #########" 7 | python $f || exit 1 8 | echo;echo;echo 9 | done 10 | 11 | popd 12 | -------------------------------------------------------------------------------- /.codecov.yml: -------------------------------------------------------------------------------- 1 | codecov: 2 | notify: 3 | after_n_builds: 4 4 | 5 | coverage: 6 | status: 7 | patch: no 8 | changes: no 9 | 10 | comment: 11 | layout: "reach, diff, flags, files, footer" 12 | behavior: default 13 | require_changes: yes 14 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: pip 4 | directory: "/" 5 | schedule: 6 | interval: daily 7 | 8 | - package-ecosystem: "github-actions" 9 | directory: "/" 10 | schedule: 11 | interval: "monthly" 12 | -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | 4 | @pytest.fixture 5 | def valkey_config(): 6 | from glide import GlideClientConfiguration, NodeAddress 7 | 8 | addresses = [NodeAddress("localhost", 6379)] 9 | return GlideClientConfiguration(addresses=addresses, database_id=0) 10 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include CHANGES.rst 2 | include LICENSE 3 | include README.rst 4 | include Makefile 5 | include requirements.txt 6 | include requirements-dev.txt 7 | include setup.cfg 8 | include .coveragerc 9 | graft aiocache 10 | graft docs 11 | graft examples 12 | graft tests 13 | global-exclude *.pyc 14 | -------------------------------------------------------------------------------- /requirements-dev.txt: -------------------------------------------------------------------------------- 1 | -r requirements.txt 2 | 3 | flake8==7.3.0 4 | flake8-bandit==4.1.1 5 | flake8-bugbear==24.12.12 6 | flake8-import-order==0.19.2 7 | flake8-requirements==2.3.0 8 | mypy==1.19.0; implementation_name=="cpython" 9 | types-ujson==5.10.0.20250822 10 | aiocache-dynamodb==1.0.2 # used for documentation 11 | -------------------------------------------------------------------------------- /.release_notes.tpl: -------------------------------------------------------------------------------- 1 | {{#general_title}} 2 | # {{{title}}} 3 | 4 | 5 | {{/general_title}} 6 | {{#versions}} 7 | ## {{{label}}} 8 | 9 | {{#sections}} 10 | #### {{{label}}} 11 | 12 | {{#commits}} 13 | * {{{subject}}} - {{{author}}} 14 | {{#body}} 15 | _{{{body}}}_ 16 | {{/body}} 17 | 18 | {{/commits}} 19 | {{/sections}} 20 | 21 | 22 | {{/versions}} 23 | -------------------------------------------------------------------------------- /docs/testing.rst: -------------------------------------------------------------------------------- 1 | Testing 2 | ======= 3 | 4 | It's really easy to cut the dependency with aiocache functionality: 5 | 6 | .. literalinclude:: ../examples/testing.py 7 | 8 | Note that we are passing the :ref:`basecache` as the spec for the Mock. 9 | 10 | Also, for debuging purposes you can use `AIOCACHE_DISABLE = 1 python myscript.py` to disable caching. 11 | -------------------------------------------------------------------------------- /examples/testing.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from unittest.mock import MagicMock 3 | 4 | from aiocache.base import BaseCache 5 | 6 | 7 | async def main(): 8 | mocked_cache = MagicMock(spec=BaseCache) 9 | mocked_cache.get.return_value = "world" 10 | print(await mocked_cache.get("hello")) 11 | 12 | 13 | if __name__ == "__main__": 14 | asyncio.run(main()) 15 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | # See https://pre-commit.com for more information 2 | # See https://pre-commit.com/hooks.html for more hooks 3 | repos: 4 | - repo: https://github.com/pre-commit/pre-commit-hooks 5 | rev: v5.0.0 6 | hooks: 7 | - id: trailing-whitespace 8 | - id: end-of-file-fixer 9 | - id: check-yaml 10 | - id: check-added-large-files 11 | - repo: https://github.com/PyCQA/flake8 12 | rev: '7.1.1' 13 | hooks: 14 | - id: flake8 15 | exclude: "^docs/" 16 | -------------------------------------------------------------------------------- /.readthedocs.yml: -------------------------------------------------------------------------------- 1 | # Read the Docs configuration file 2 | # See https://docs.readthedocs.io/en/stable/config-file/v2.html 3 | # for details 4 | 5 | --- 6 | version: 2 7 | 8 | sphinx: 9 | configuration: docs/conf.py 10 | 11 | submodules: 12 | include: all 13 | exclude: [] 14 | recursive: true 15 | 16 | build: 17 | os: ubuntu-24.04 18 | tools: 19 | python: "3.12" 20 | apt_packages: 21 | - graphviz 22 | 23 | jobs: 24 | post_create_environment: 25 | - pip install -r requirements-dev.txt 26 | 27 | ... 28 | -------------------------------------------------------------------------------- /tests/performance/conftest.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | 4 | @pytest.fixture 5 | async def valkey_cache(valkey_config): 6 | # valkey connection pool raises ConnectionError but doesn't wait for conn reuse 7 | # when exceeding max pool size. 8 | from aiocache.backends.valkey import ValkeyCache 9 | 10 | async with ValkeyCache(valkey_config, namespace="test") as cache: 11 | yield cache 12 | 13 | 14 | @pytest.fixture 15 | async def memcached_cache(): 16 | from aiocache.backends.memcached import MemcachedCache 17 | 18 | async with MemcachedCache(namespace="test", pool_size=1) as cache: 19 | yield cache 20 | -------------------------------------------------------------------------------- /aiocache/serializers/__init__.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | from .serializers import ( 4 | BaseSerializer, 5 | JsonSerializer, 6 | NullSerializer, 7 | PickleSerializer, 8 | StringSerializer, 9 | ) 10 | 11 | logger = logging.getLogger(__name__) 12 | 13 | 14 | try: 15 | import msgpack 16 | except ImportError: 17 | logger.debug("msgpack not installed, MsgPackSerializer unavailable") 18 | else: 19 | from .serializers import MsgPackSerializer 20 | 21 | del msgpack 22 | 23 | 24 | __all__ = [ 25 | "BaseSerializer", 26 | "NullSerializer", 27 | "StringSerializer", 28 | "PickleSerializer", 29 | "JsonSerializer", 30 | "MsgPackSerializer", 31 | ] 32 | -------------------------------------------------------------------------------- /docs/decorators.rst: -------------------------------------------------------------------------------- 1 | .. _decorators: 2 | 3 | Decorators 4 | ========== 5 | 6 | aiocache comes with a couple of decorators for caching results from asynchronous functions. Do not use the decorator in synchronous functions, it may lead to unexpected behavior. 7 | 8 | .. _cached: 9 | 10 | cached 11 | ------ 12 | 13 | .. automodule:: aiocache 14 | :members: cached 15 | 16 | .. literalinclude:: ../examples/cached_decorator.py 17 | :language: python 18 | :linenos: 19 | 20 | .. _multi_cached: 21 | 22 | multi_cached 23 | ------------ 24 | 25 | .. automodule:: aiocache 26 | :members: multi_cached 27 | 28 | .. literalinclude:: ../examples/multicached_decorator.py 29 | :language: python 30 | :linenos: 31 | -------------------------------------------------------------------------------- /.github/workflows/auto-merge.yml: -------------------------------------------------------------------------------- 1 | name: Dependabot auto-merge 2 | on: pull_request_target 3 | 4 | permissions: 5 | pull-requests: write 6 | contents: write 7 | 8 | jobs: 9 | dependabot: 10 | runs-on: ubuntu-latest 11 | if: ${{ github.actor == 'dependabot[bot]' }} 12 | steps: 13 | - name: Dependabot metadata 14 | id: metadata 15 | uses: dependabot/fetch-metadata@v2.4.0 16 | with: 17 | github-token: "${{ secrets.GITHUB_TOKEN }}" 18 | - name: Enable auto-merge for Dependabot PRs 19 | run: gh pr merge --auto --squash "$PR_URL" 20 | env: 21 | PR_URL: ${{github.event.pull_request.html_url}} 22 | GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}} 23 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [bdist_wheel] 2 | universal=1 3 | 4 | [pep8] 5 | max-line-length=100 6 | 7 | [tool:pytest] 8 | addopts = --cov=aiocache --cov=tests/ --cov-report term --strict-markers 9 | asyncio_mode = auto 10 | junit_suite_name = aiohttp_test_suite 11 | filterwarnings= 12 | error 13 | # Can be removed once using aiojobs or similar in decorator() 14 | ignore:never awaited 15 | testpaths = tests/ 16 | junit_family=xunit2 17 | xfail_strict = true 18 | markers = 19 | memcached: tests requiring memcached backend 20 | valkey: tests requiring valkey backend 21 | 22 | [coverage:run] 23 | branch = True 24 | parallel = True 25 | source = aiocache 26 | 27 | [coverage:report] 28 | show_missing = true 29 | skip_covered = true 30 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | cov-report = true 2 | 3 | 4 | lint: 5 | flake8 tests/ aiocache/ 6 | 7 | install-dev: 8 | pip install -e .[valkey,memcached,msgpack,dev] 9 | 10 | pylint: 11 | pylint --disable=C0111 aiocache 12 | 13 | unit: 14 | coverage run -m pytest tests/ut 15 | @if [ $(cov-report) = true ]; then\ 16 | coverage combine;\ 17 | coverage report;\ 18 | fi 19 | 20 | acceptance: 21 | pytest -sv tests/acceptance 22 | 23 | doc: 24 | make -C docs/ html 25 | 26 | functional: 27 | bash examples/run_all.sh 28 | 29 | performance: 30 | pytest -sv tests/performance 31 | 32 | test: lint unit acceptance functional 33 | 34 | _release: 35 | scripts/make_release 36 | 37 | release: test _release 38 | 39 | changelog: 40 | gitchangelog 41 | -------------------------------------------------------------------------------- /CONTRIBUTING.rst: -------------------------------------------------------------------------------- 1 | Contributing 2 | ============ 3 | 4 | #. Clone the repository with ``git clone git@github.com:argaen/aiocache.git`` 5 | #. Install dependencies with ``make install-dev`` 6 | #. Make a change (means writing code, tests without reducing coverage and docs) 7 | #. Ensure syntax is correct with ``make lint``. If there are errors, you can format the code with ``make format`` 8 | #. Ensure all tests pass with ``make test``. For fast iterations, use ``make unit`` which will build just the unit tests. You will need docker and docker-compose to be able to pass acceptance and functional tests. 9 | #. Ensure documentation is OK with ``sphinx-autobuild docs/ docs/_build/html/`` 10 | #. Make the PR in Github (you must have a fork of your own) 11 | -------------------------------------------------------------------------------- /docs/v1_migration.rst: -------------------------------------------------------------------------------- 1 | .. _v1_migration: 2 | 3 | Migrating from v0.x to v1 4 | ====== 5 | 6 | The v1 release of aiocache is a major release that introduces several breaking changes. 7 | 8 | Changes to Cache Instantiation 9 | --------- 10 | 11 | The abstraction and factories around cache instantiation have been removed in favor of a more direct approach. 12 | 13 | * The `aiocache.Cache` class has been removed. Instead, use the specific cache class directly. For example, use `aiocache.RedisCache` instead of `aiocache.Cache.REDIS`. 14 | * Caches should be fully instantiated when passed to decorators, rather than being instantiated with a factory function. 15 | * Cache aliases have been removed. Create an instance of the cache class directly instead. 16 | -------------------------------------------------------------------------------- /.flake8: -------------------------------------------------------------------------------- 1 | [flake8] 2 | enable-extensions = G 3 | max-doc-length = 90 4 | max-line-length = 90 5 | select = A,B,C,D,E,F,G,H,I,J,K,L,M,N,O,P,Q,R,S,T,U,V,W,X,Y,Z,B901,B902,B903,B950 6 | # E226: Missing whitespace around arithmetic operators can help group things together. 7 | # E501,W505: Superseeded by B950 (from Bugbear) 8 | # E722: Superseeded by B001 (from Bugbear) 9 | # W503: Mutually exclusive with W504. 10 | ignore = E226,E501,E722,W503,W505 11 | per-file-ignores = 12 | # S*: Bandit security checks not useful in tests. 13 | tests/*:S 14 | 15 | # flake8-import-order 16 | application-import-names = aiocache 17 | import-order-style = pycharm 18 | 19 | # flake8-quotes 20 | inline-quotes = " 21 | # flake8-requirements 22 | known-modules = valkey-glide:[glide] 23 | requirements-file = requirements-dev.txt 24 | -------------------------------------------------------------------------------- /.mypy.ini: -------------------------------------------------------------------------------- 1 | [mypy] 2 | files = aiocache, examples, tests 3 | #check_untyped_defs = True 4 | follow_imports_for_stubs = True 5 | #disallow_any_decorated = True 6 | disallow_any_generics = True 7 | disallow_incomplete_defs = True 8 | disallow_subclassing_any = True 9 | #disallow_untyped_calls = True 10 | disallow_untyped_decorators = True 11 | #disallow_untyped_defs = True 12 | implicit_reexport = False 13 | no_implicit_optional = True 14 | show_error_codes = True 15 | strict_equality = True 16 | warn_incomplete_stub = True 17 | warn_redundant_casts = True 18 | warn_unreachable = True 19 | warn_unused_ignores = True 20 | disallow_any_unimported = True 21 | #warn_return_any = True 22 | 23 | [mypy-tests.*] 24 | disallow_any_decorated = False 25 | disallow_untyped_calls = False 26 | disallow_untyped_defs = False 27 | 28 | 29 | [mypy-msgpack.*] 30 | ignore_missing_imports = True 31 | -------------------------------------------------------------------------------- /examples/simple_valkey.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | from glide import GlideClientConfiguration, NodeAddress 4 | 5 | from aiocache import ValkeyCache 6 | 7 | addresses = [NodeAddress("localhost", 6379)] 8 | config = GlideClientConfiguration(addresses=addresses, database_id=0) 9 | 10 | 11 | async def valkey(cache): 12 | await cache.set("key", "value") 13 | await cache.set("expire_me", "value", ttl=10) 14 | 15 | assert await cache.get("key") == "value" 16 | assert await cache.get("expire_me") == "value" 17 | assert await cache.raw("ttl", "main:expire_me") > 0 18 | 19 | 20 | async def test_valkey(): 21 | async with ValkeyCache(config, namespace="main") as cache: 22 | await valkey(cache) 23 | await cache.delete("key") 24 | await cache.delete("expire_me") 25 | await cache.close() 26 | 27 | 28 | if __name__ == "__main__": 29 | asyncio.run(test_valkey()) 30 | -------------------------------------------------------------------------------- /docs/locking.rst: -------------------------------------------------------------------------------- 1 | .. _locking: 2 | 3 | .. WARNING:: 4 | This was added in version 0.7.0 and the API is new. This means its open to breaking changes in future versions until the API is considered stable. 5 | 6 | 7 | Locking 8 | ======= 9 | 10 | 11 | .. WARNING:: 12 | The implementations provided are **NOT** intented for consistency/synchronization purposes. If you need a locking mechanism focused on consistency, consider implementing your mechanism based on more serious tools like https://zookeeper.apache.org/. 13 | 14 | 15 | There are a couple of locking implementations than can help you to protect against different scenarios: 16 | 17 | 18 | .. _redlock: 19 | 20 | RedLock 21 | ------- 22 | 23 | .. autoclass:: aiocache.lock.RedLock 24 | :members: 25 | 26 | 27 | .. _optimisticlock: 28 | 29 | OptimisticLock 30 | -------------- 31 | 32 | .. autoclass:: aiocache.lock.OptimisticLock 33 | :members: 34 | -------------------------------------------------------------------------------- /examples/cached_decorator.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from collections import namedtuple 3 | 4 | from glide import GlideClientConfiguration, NodeAddress 5 | 6 | from aiocache import cached 7 | from aiocache import ValkeyCache 8 | from aiocache.serializers import PickleSerializer 9 | 10 | Result = namedtuple("Result", "content, status") 11 | 12 | addresses = [NodeAddress("localhost", 6379)] 13 | config = GlideClientConfiguration(addresses=addresses, database_id=0) 14 | cache = ValkeyCache(config=config, namespace="main", serializer=PickleSerializer()) 15 | 16 | 17 | @cached(cache, ttl=10, key_builder=lambda *args, **kw: "key") 18 | async def cached_call(): 19 | return Result("content", 200) 20 | 21 | 22 | async def test_cached(): 23 | async with cache: 24 | await cached_call() 25 | exists = await cache.exists("key") 26 | assert exists is True 27 | await cache.delete("key") 28 | 29 | 30 | if __name__ == "__main__": 31 | asyncio.run(test_cached()) 32 | -------------------------------------------------------------------------------- /examples/frameworks/tornado_example.py: -------------------------------------------------------------------------------- 1 | import tornado.web 2 | import tornado.ioloop 3 | from datetime import datetime 4 | from aiocache import cached, SimpleMemoryCache 5 | from aiocache.serializers import JsonSerializer 6 | 7 | 8 | class MainHandler(tornado.web.RequestHandler): 9 | 10 | # Due some incompatibilities between tornado and asyncio, caches can't use the "ttl" feature 11 | # in order to make it work, you will have to specify it always to 0 12 | @cached(SimpleMemoryCache(serializer=JsonSerializer, timeout=0), key_builder=lambda x: "my_custom_key") 13 | async def time(self): 14 | return {"time": datetime.now().isoformat()} 15 | 16 | async def get(self): 17 | self.write(await self.time()) 18 | 19 | 20 | if __name__ == "__main__": 21 | tornado.ioloop.IOLoop.configure('tornado.platform.asyncio.AsyncIOLoop') 22 | app = tornado.web.Application([(r"/", MainHandler)]) 23 | app.listen(8888) 24 | tornado.ioloop.IOLoop.current().start() 25 | -------------------------------------------------------------------------------- /examples/python_object.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from collections import namedtuple 3 | 4 | from glide import GlideClientConfiguration, NodeAddress 5 | 6 | from aiocache import ValkeyCache 7 | from aiocache.serializers import PickleSerializer 8 | 9 | MyObject = namedtuple("MyObject", ["x", "y"]) 10 | addresses = [NodeAddress("localhost", 6379)] 11 | config = GlideClientConfiguration(addresses=addresses, database_id=0) 12 | 13 | 14 | async def complex_object(cache): 15 | obj = MyObject(x=1, y=2) 16 | await cache.set("key", obj) 17 | my_object = await cache.get("key") 18 | 19 | assert my_object.x == 1 20 | assert my_object.y == 2 21 | 22 | 23 | async def test_python_object(): 24 | async with ValkeyCache( 25 | config, namespace="main", serializer=PickleSerializer() 26 | ) as cache: 27 | await complex_object(cache) 28 | await cache.delete("key") 29 | await cache.close() 30 | 31 | 32 | if __name__ == "__main__": 33 | asyncio.run(test_python_object()) 34 | -------------------------------------------------------------------------------- /.github/workflows/codeql.yml: -------------------------------------------------------------------------------- 1 | name: "CodeQL" 2 | 3 | on: 4 | push: 5 | branches: [ "master" ] 6 | pull_request: 7 | branches: [ "master" ] 8 | schedule: 9 | - cron: "28 18 * * 3" 10 | 11 | jobs: 12 | analyze: 13 | name: Analyze 14 | runs-on: ubuntu-latest 15 | permissions: 16 | actions: read 17 | contents: read 18 | security-events: write 19 | 20 | strategy: 21 | fail-fast: false 22 | matrix: 23 | language: [ python ] 24 | 25 | steps: 26 | - name: Checkout 27 | uses: actions/checkout@v5 28 | 29 | - name: Initialize CodeQL 30 | uses: github/codeql-action/init@v4 31 | with: 32 | languages: ${{ matrix.language }} 33 | queries: +security-and-quality 34 | 35 | - name: Autobuild 36 | uses: github/codeql-action/autobuild@v4 37 | 38 | - name: Perform CodeQL Analysis 39 | uses: github/codeql-action/analyze@v4 40 | with: 41 | category: "/language:${{ matrix.language }}" 42 | -------------------------------------------------------------------------------- /scripts/make_release: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | SCRIPT=$(readlink -f "$0") 4 | pushd $(dirname "$SCRIPT") 5 | cd .. 6 | 7 | version=$(grep -o -E "([0-9]+\.[0-9]+\.[0-9]+)" aiocache/_version.py) 8 | echo -n "New version number (current is $version): " 9 | read new_version 10 | gitchangelog ^$version HEAD | sed "s/Unreleased/$new_version (`date +%Y-%m-%d`)/g" > _release_notes 11 | cat _release_notes 12 | echo -n "Are you happy with the release notes (if not, modify the ./_release_notes file manually)? (y/n) " 13 | read answer 14 | 15 | if echo "$answer" | grep -iq "^y" ;then 16 | echo "Generating new release..." 17 | sed -i "s/$version/$new_version/" aiocache/_version.py 18 | sed -i '1s/^/# Changelog\n\n\n/' _release_notes && sed -i '1,3d' CHANGELOG.md && cat CHANGELOG.md >> _release_notes && mv _release_notes CHANGELOG.md 19 | git add CHANGELOG.md aiocache/_version.py 20 | git commit -m "Bump version $new_version" 21 | git tag -a "$new_version" -m "$new_version" 22 | git push --follow-tags 23 | 24 | else 25 | exit 1 26 | fi 27 | 28 | popd 29 | -------------------------------------------------------------------------------- /aiocache/__init__.py: -------------------------------------------------------------------------------- 1 | import logging 2 | from typing import Any, Type 3 | 4 | from .backends.memory import SimpleMemoryCache 5 | from .base import BaseCache 6 | 7 | __version__ = "1.0.0a0" 8 | 9 | logger = logging.getLogger(__name__) 10 | 11 | _AIOCACHE_CACHES: list[Type[BaseCache[Any]]] = [SimpleMemoryCache] 12 | 13 | try: 14 | import glide 15 | except ImportError: 16 | logger.debug("glide not installed, ValkeyCache unavailable") 17 | else: 18 | from aiocache.backends.valkey import ValkeyCache 19 | 20 | _AIOCACHE_CACHES.append(ValkeyCache) 21 | del glide 22 | 23 | try: 24 | import aiomcache 25 | except ImportError: 26 | logger.debug("aiomcache not installed, Memcached unavailable") 27 | else: 28 | from aiocache.backends.memcached import MemcachedCache 29 | 30 | _AIOCACHE_CACHES.append(MemcachedCache) 31 | del aiomcache 32 | 33 | from .decorators import cached, cached_stampede, multi_cached # noqa: E402,I202 34 | 35 | __all__ = ( 36 | "cached", 37 | "cached_stampede", 38 | "multi_cached", 39 | *sorted(c.__name__ for c in _AIOCACHE_CACHES), 40 | ) 41 | -------------------------------------------------------------------------------- /examples/frameworks/sanic_example.py: -------------------------------------------------------------------------------- 1 | """ 2 | Example of caching using aiocache package: 3 | 4 | /: Does a 3 seconds sleep. Only the first time because its using the `cached` decorator 5 | /reuse: Returns the data stored in "main" endpoint 6 | """ 7 | 8 | import asyncio 9 | 10 | from sanic import Sanic 11 | from sanic.response import json 12 | from sanic.log import logger 13 | from aiocache import cached, SimpleMemoryCache 14 | from aiocache.serializers import JsonSerializer 15 | 16 | app = Sanic(__name__) 17 | 18 | 19 | @cached(SimpleMemoryCache(), key_builder=lambda x: "my_custom_key") 20 | async def expensive_call(): 21 | logger.info("Expensive has been called") 22 | await asyncio.sleep(3) 23 | return {"test": True} 24 | 25 | 26 | async def reuse_data(): 27 | cache = SimpleMemoryCache(serializer=JsonSerializer()) # Not ideal to define here 28 | data = await cache.get("my_custom_key") # Note the key is defined in `cached` decorator 29 | return data 30 | 31 | 32 | @app.route("/") 33 | async def main(request): 34 | logger.info("Received GET /") 35 | return json(await expensive_call()) 36 | 37 | 38 | @app.route("/reuse") 39 | async def reuse(request): 40 | logger.info("Received GET /reuse") 41 | return json(await reuse_data()) 42 | 43 | 44 | app.run(host="0.0.0.0", port=8000) 45 | -------------------------------------------------------------------------------- /tests/acceptance/conftest.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | import pytest 4 | 5 | from ..utils import KEY_LOCK, Keys 6 | 7 | 8 | @pytest.fixture 9 | async def valkey_cache(valkey_config): 10 | from aiocache.backends.valkey import ValkeyCache 11 | 12 | async with ValkeyCache(valkey_config, namespace="test") as cache: 13 | yield cache 14 | await asyncio.gather(*(cache.delete(k) for k in (*Keys, KEY_LOCK))) 15 | 16 | 17 | @pytest.fixture 18 | async def memory_cache(): 19 | from aiocache.backends.memory import SimpleMemoryCache 20 | 21 | async with SimpleMemoryCache(namespace="test") as cache: 22 | yield cache 23 | await asyncio.gather(*(cache.delete(k) for k in (*Keys, KEY_LOCK))) 24 | 25 | 26 | @pytest.fixture 27 | async def memcached_cache(): 28 | from aiocache.backends.memcached import MemcachedCache 29 | 30 | async with MemcachedCache(namespace="test") as cache: 31 | yield cache 32 | await asyncio.gather(*(cache.delete(k) for k in (*Keys, KEY_LOCK))) 33 | 34 | 35 | @pytest.fixture( 36 | params=( 37 | pytest.param("valkey_cache", marks=pytest.mark.valkey), 38 | "memory_cache", 39 | pytest.param("memcached_cache", marks=pytest.mark.memcached), 40 | ) 41 | ) 42 | def cache(request): 43 | return request.getfixturevalue(request.param) 44 | -------------------------------------------------------------------------------- /examples/redlock.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import logging 3 | 4 | from glide import GlideClientConfiguration, NodeAddress 5 | 6 | from aiocache import ValkeyCache 7 | from aiocache.lock import RedLock 8 | 9 | logger = logging.getLogger(__name__) 10 | addresses = [NodeAddress("localhost", 6379)] 11 | config = GlideClientConfiguration(addresses=addresses, database_id=0) 12 | 13 | 14 | async def expensive_function(): 15 | logger.warning("Expensive is being executed...") 16 | await asyncio.sleep(1) 17 | return "result" 18 | 19 | 20 | async def my_view(cache): 21 | async with RedLock(cache, "key", lease=2): # Wait at most 2 seconds 22 | result = await cache.get("key") 23 | if result is not None: 24 | logger.info("Found the value in the cache hurray!") 25 | return result 26 | 27 | result = await expensive_function() 28 | await cache.set("key", result) 29 | return result 30 | 31 | 32 | async def concurrent(cache): 33 | await asyncio.gather(my_view(cache), my_view(cache), my_view(cache)) 34 | 35 | 36 | async def test_redis(): 37 | async with ValkeyCache(config, namespace="main") as cache: 38 | await concurrent(cache) 39 | await cache.delete("key") 40 | await cache.close() 41 | 42 | 43 | if __name__ == "__main__": 44 | asyncio.run(test_redis()) 45 | -------------------------------------------------------------------------------- /examples/multicached_decorator.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | from glide import GlideClientConfiguration, NodeAddress 4 | 5 | from aiocache import multi_cached 6 | from aiocache import ValkeyCache 7 | 8 | DICT = {"a": "Z", "b": "Y", "c": "X", "d": "W"} 9 | 10 | addresses = [NodeAddress("localhost", 6379)] 11 | config = GlideClientConfiguration(addresses=addresses, database_id=0) 12 | cache = ValkeyCache(config=config, namespace="main") 13 | 14 | 15 | @multi_cached(cache, keys_from_attr="ids") 16 | async def multi_cached_ids(ids=None): 17 | return {id_: DICT[id_] for id_ in ids} 18 | 19 | 20 | @multi_cached(cache, keys_from_attr="keys") 21 | async def multi_cached_keys(keys=None): 22 | return {id_: DICT[id_] for id_ in keys} 23 | 24 | 25 | async def test_multi_cached(): 26 | async with cache: 27 | await multi_cached_ids(ids=("a", "b")) 28 | await multi_cached_ids(ids=("a", "c")) 29 | await multi_cached_keys(keys=("d",)) 30 | 31 | assert await cache.exists("a") 32 | assert await cache.exists("b") 33 | assert await cache.exists("c") 34 | assert await cache.exists("d") 35 | 36 | await cache.delete("a") 37 | await cache.delete("b") 38 | await cache.delete("c") 39 | await cache.delete("d") 40 | 41 | 42 | if __name__ == "__main__": 43 | asyncio.run(test_multi_cached()) 44 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | import re 2 | from pathlib import Path 3 | 4 | from setuptools import setup 5 | 6 | p = Path(__file__).with_name("aiocache") / "__init__.py" 7 | try: 8 | version = re.findall(r"^__version__ = \"([^']+)\"\r?$", p.read_text(), re.M)[0] 9 | except IndexError: 10 | raise RuntimeError("Unable to determine version.") 11 | 12 | readme = Path(__file__).with_name("README.rst").read_text() 13 | 14 | 15 | setup( 16 | name="aiocache", 17 | version=version, 18 | author="Manuel Miranda", 19 | url="https://github.com/aio-libs/aiocache", 20 | author_email="manu.mirandad@gmail.com", 21 | license="BSD-3-Clause", 22 | description="multi backend asyncio cache", 23 | long_description=readme, 24 | classifiers=[ 25 | "Programming Language :: Python", 26 | "Programming Language :: Python :: 3.9", 27 | "Programming Language :: Python :: 3.10", 28 | "Programming Language :: Python :: 3.11", 29 | "Programming Language :: Python :: 3.12", 30 | "Programming Language :: Python :: 3.13", 31 | "Framework :: AsyncIO", 32 | ], 33 | python_requires=">=3.9", 34 | packages=("aiocache",), 35 | install_requires=None, 36 | extras_require={ 37 | "valkey": ["valkey-glide>=2.0.0"], 38 | "memcached": ["aiomcache>=0.5.2"], 39 | "msgpack": ["msgpack>=0.5.5"], 40 | }, 41 | include_package_data=True, 42 | ) 43 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2016, Manuel Miranda de Cid 2 | All rights reserved. 3 | 4 | Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 5 | 6 | 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 7 | 8 | 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 9 | 10 | 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. 11 | 12 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 13 | -------------------------------------------------------------------------------- /examples/serializer_function.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import json 3 | 4 | from glide import GlideClientConfiguration, NodeAddress 5 | 6 | from marshmallow import Schema, fields, post_load 7 | 8 | from aiocache import ValkeyCache 9 | 10 | 11 | addresses = [NodeAddress("localhost", 6379)] 12 | config = GlideClientConfiguration(addresses=addresses, database_id=0) 13 | 14 | 15 | class MyType: 16 | def __init__(self, x, y): 17 | self.x = x 18 | self.y = y 19 | 20 | 21 | class MyTypeSchema(Schema): 22 | x = fields.Number() 23 | y = fields.Number() 24 | 25 | @post_load 26 | def build_object(self, data, **kwargs): 27 | return MyType(data["x"], data["y"]) 28 | 29 | 30 | def dumps(value): 31 | return MyTypeSchema().dumps(value) 32 | 33 | 34 | def loads(value): 35 | return MyTypeSchema().loads(value) 36 | 37 | 38 | async def serializer_function(cache): 39 | await cache.set("key", MyType(1, 2), dumps_fn=dumps) 40 | 41 | obj = await cache.get("key", loads_fn=loads) 42 | 43 | assert obj.x == 1 44 | assert obj.y == 2 45 | assert await cache.get("key") == json.loads(('{"y": 2.0, "x": 1.0}')) 46 | assert json.loads(await cache.raw("get", "main:key")) == {"y": 2.0, "x": 1.0} 47 | 48 | 49 | async def test_serializer_function(): 50 | async with ValkeyCache(config, namespace="main") as cache: 51 | await serializer_function(cache) 52 | await cache.delete("key") 53 | 54 | 55 | if __name__ == "__main__": 56 | asyncio.run(test_serializer_function()) 57 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | env/ 12 | build/ 13 | develop-eggs/ 14 | dist/ 15 | downloads/ 16 | eggs/ 17 | .eggs/ 18 | lib/ 19 | lib64/ 20 | parts/ 21 | sdist/ 22 | var/ 23 | *.egg-info/ 24 | .installed.cfg 25 | *.egg 26 | 27 | # PyInstaller 28 | # Usually these files are written by a python script from a template 29 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 30 | *.manifest 31 | *.spec 32 | 33 | # Installer logs 34 | pip-log.txt 35 | pip-delete-this-directory.txt 36 | 37 | # Unit test / coverage reports 38 | htmlcov/ 39 | .tox/ 40 | .coverage 41 | .coverage.* 42 | .cache 43 | nosetests.xml 44 | coverage.xml 45 | *,cover 46 | .hypothesis/ 47 | 48 | # Translations 49 | *.mo 50 | *.pot 51 | 52 | # Django stuff: 53 | *.log 54 | local_settings.py 55 | 56 | # Flask stuff: 57 | instance/ 58 | .webassets-cache 59 | 60 | # Scrapy stuff: 61 | .scrapy 62 | 63 | # Sphinx documentation 64 | docs/_build/ 65 | docs/_build_html/ 66 | 67 | # PyBuilder 68 | target/ 69 | 70 | # IPython Notebook 71 | .ipynb_checkpoints 72 | 73 | # pyenv 74 | .python-version 75 | 76 | # celery beat schedule file 77 | celerybeat-schedule 78 | 79 | # dotenv 80 | .env 81 | 82 | # virtualenv 83 | venv/ 84 | ENV/ 85 | 86 | # Spyder project settings 87 | .spyderproject 88 | 89 | # Rope project settings 90 | .ropeproject 91 | 92 | dump.rdb 93 | _release_notes 94 | 95 | tags 96 | .mypy_cache/ 97 | .pytest_cache/ 98 | -------------------------------------------------------------------------------- /tests/performance/test_concurrency.py: -------------------------------------------------------------------------------- 1 | import platform 2 | import re 3 | import subprocess 4 | import time 5 | from multiprocessing import Process, set_start_method 6 | 7 | import pytest 8 | 9 | from .server import run_server 10 | 11 | # Spawn is needed to avoid potential segfaults in forked processes. 12 | set_start_method("spawn") 13 | 14 | 15 | @pytest.fixture(params=("memcached", "memory", "valkey")) 16 | def server(request): 17 | p = Process(target=run_server, args=(request.param,)) 18 | p.start() 19 | time.sleep(2) 20 | yield 21 | p.terminate() 22 | p.join(timeout=15) 23 | 24 | 25 | @pytest.mark.skipif(platform.python_implementation() == "PyPy", reason="Not working currently.") 26 | def test_concurrency_error_rates(server): 27 | """Test with Apache benchmark tool.""" 28 | 29 | total_requests = 1500 30 | # On some platforms, it's required to enlarge number of "open file descriptors" 31 | # with "ulimit -n number" before doing the benchmark. 32 | cmd = ("ab", "-n", str(total_requests), "-c", "500", "http://127.0.0.1:8080/") 33 | result = subprocess.run(cmd, capture_output=True, check=True, encoding="utf-8") 34 | 35 | m = re.search(r"Failed requests:\s+([0-9]+)", result.stdout) 36 | assert m, "Missing output from ab: " + result.stdout 37 | failed_requests = int(m.group(1)) 38 | 39 | m = re.search(r"Non-2xx responses:\s+([0-9]+)", result.stdout) 40 | non_200 = int(m.group(1)) if m else 0 41 | 42 | assert failed_requests / total_requests < 0.75, result.stdout 43 | assert non_200 / total_requests < 0.75, result.stdout 44 | -------------------------------------------------------------------------------- /docs/plugins.rst: -------------------------------------------------------------------------------- 1 | .. _plugins: 2 | 3 | Plugins 4 | ======= 5 | 6 | Plugins can be used to enrich the behavior of the cache. By default all caches are configured without any plugin but can add new ones in the constructor or after initializing the cache class:: 7 | 8 | >>> from aiocache import SimpleMemoryCache 9 | >>> from aiocache.plugins import TimingPlugin 10 | cache = SimpleMemoryCache(plugins=[HitMissRatioPlugin()]) 11 | cache.plugins += [TimingPlugin()] 12 | 13 | You can define your custom plugin by inheriting from `BasePlugin`_ and overriding the needed methods (the overrides NEED to be async). All commands have ``pre_`` and ``post_`` hooks. 14 | 15 | .. WARNING:: 16 | Both pre and post hooks are executed awaiting the coroutine. If you perform expensive operations with the hooks, you will add more latency to the command being executed and thus, there are more probabilities of raising a timeout error. If a timeout error is raised, be aware that previous actions **won't be rolled back**. 17 | 18 | A complete example of using plugins: 19 | 20 | .. literalinclude:: ../examples/plugins.py 21 | :language: python 22 | :linenos: 23 | 24 | 25 | .. _baseplugin: 26 | 27 | BasePlugin 28 | ---------- 29 | 30 | .. autoclass:: aiocache.plugins.BasePlugin 31 | :members: 32 | :undoc-members: 33 | 34 | .. _timingplugin: 35 | 36 | TimingPlugin 37 | ------------ 38 | 39 | .. autoclass:: aiocache.plugins.TimingPlugin 40 | :members: 41 | :undoc-members: 42 | 43 | .. _hitmissratioplugin: 44 | 45 | HitMissRatioPlugin 46 | ------------------ 47 | 48 | .. autoclass:: aiocache.plugins.HitMissRatioPlugin 49 | :members: 50 | :undoc-members: 51 | -------------------------------------------------------------------------------- /examples/optimistic_lock.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import logging 3 | import random 4 | 5 | from glide import GlideClientConfiguration, NodeAddress 6 | 7 | from aiocache import ValkeyCache 8 | from aiocache.lock import OptimisticLock, OptimisticLockError 9 | 10 | logger = logging.getLogger(__name__) 11 | addresses = [NodeAddress("localhost", 6379)] 12 | config = GlideClientConfiguration(addresses=addresses, database_id=0) 13 | 14 | 15 | async def expensive_function(): 16 | logger.warning("Expensive is being executed...") 17 | await asyncio.sleep(random.uniform(0, 2)) 18 | return "result" 19 | 20 | 21 | async def my_view(cache): 22 | async with OptimisticLock(cache, "key") as lock: 23 | result = await expensive_function() 24 | try: 25 | await lock.cas(result) 26 | except OptimisticLockError: 27 | logger.warning( 28 | "I failed setting the value because it is different since the lock started!" 29 | ) 30 | return result 31 | 32 | 33 | async def concurrent(cache): 34 | await cache.set("key", "initial_value") 35 | # All three calls will read 'initial_value' as the value to check and only 36 | # the first one finishing will succeed because the others, when trying to set 37 | # the value, will see that the value is not the same as when the lock started 38 | await asyncio.gather(my_view(cache), my_view(cache), my_view(cache)) 39 | 40 | 41 | async def test_redis(): 42 | async with ValkeyCache(config, namespace="main") as cache: 43 | await concurrent(cache) 44 | await cache.delete("key") 45 | 46 | 47 | if __name__ == "__main__": 48 | asyncio.run(test_redis()) 49 | -------------------------------------------------------------------------------- /examples/plugins.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import random 3 | import logging 4 | 5 | from aiocache import SimpleMemoryCache 6 | from aiocache.plugins import HitMissRatioPlugin, TimingPlugin, BasePlugin 7 | 8 | 9 | logger = logging.getLogger(__name__) 10 | 11 | 12 | class MyCustomPlugin(BasePlugin): 13 | 14 | async def pre_set(self, *args, **kwargs): 15 | logger.info("I'm the pre_set hook being called with %s %s" % (args, kwargs)) 16 | 17 | async def post_set(self, *args, **kwargs): 18 | logger.info("I'm the post_set hook being called with %s %s" % (args, kwargs)) 19 | 20 | 21 | cache = SimpleMemoryCache( 22 | plugins=[HitMissRatioPlugin(), TimingPlugin(), MyCustomPlugin()], 23 | namespace="main") 24 | 25 | 26 | async def run(): 27 | await cache.set("a", "1") 28 | await cache.set("b", "2") 29 | await cache.set("c", "3") 30 | await cache.set("d", "4") 31 | 32 | possible_keys = ["a", "b", "c", "d", "e", "f"] 33 | 34 | for t in range(1000): 35 | await cache.get(random.choice(possible_keys)) 36 | 37 | assert cache.hit_miss_ratio["hit_ratio"] > 0.5 38 | assert cache.hit_miss_ratio["total"] == 1000 39 | 40 | assert cache.profiling["get_min"] > 0 41 | assert cache.profiling["set_min"] > 0 42 | assert cache.profiling["get_max"] > 0 43 | assert cache.profiling["set_max"] > 0 44 | 45 | print(cache.hit_miss_ratio) 46 | print(cache.profiling) 47 | 48 | 49 | async def test_run(): 50 | await run() 51 | await cache.delete("a") 52 | await cache.delete("b") 53 | await cache.delete("c") 54 | await cache.delete("d") 55 | 56 | 57 | if __name__ == "__main__": 58 | asyncio.run(test_run()) 59 | -------------------------------------------------------------------------------- /tests/ut/conftest.py: -------------------------------------------------------------------------------- 1 | from contextlib import ExitStack 2 | from unittest.mock import create_autospec, patch 3 | 4 | import pytest 5 | 6 | from aiocache.plugins import BasePlugin 7 | from ..utils import AbstractBaseCache, ConcreteBaseCache 8 | 9 | 10 | @pytest.fixture 11 | def mock_cache(mocker): 12 | return create_autospec(ConcreteBaseCache()) 13 | 14 | 15 | @pytest.fixture 16 | def mock_base_cache(): 17 | """Return BaseCache instance with unimplemented methods mocked out.""" 18 | plugin = create_autospec(BasePlugin, instance=True) 19 | cache = ConcreteBaseCache(timeout=0.002, plugins=(plugin,)) 20 | methods = ("_add", "_get", "_gets", "_set", "_multi_get", "_multi_set", "_delete", 21 | "_exists", "_increment", "_expire", "_clear", "_raw", "_close", 22 | "_redlock_release", "acquire_conn", "release_conn") 23 | with ExitStack() as stack: 24 | for f in methods: 25 | stack.enter_context(patch.object(cache, f, autospec=True)) 26 | stack.enter_context(patch.object(cache, "_serializer", autospec=True)) 27 | stack.enter_context(patch.object(cache, "build_key", cache._str_build_key)) 28 | yield cache 29 | 30 | 31 | @pytest.fixture 32 | def abstract_base_cache(): 33 | return AbstractBaseCache() 34 | 35 | 36 | @pytest.fixture 37 | def base_cache(): 38 | cache = ConcreteBaseCache() 39 | return cache 40 | 41 | 42 | @pytest.fixture 43 | async def valkey_cache(valkey_config): 44 | from aiocache.backends.valkey import ValkeyCache 45 | 46 | async with ValkeyCache(valkey_config) as cache: 47 | yield cache 48 | 49 | 50 | @pytest.fixture 51 | async def memcached_cache(): 52 | from aiocache.backends.memcached import MemcachedCache 53 | 54 | async with MemcachedCache() as cache: 55 | yield cache 56 | -------------------------------------------------------------------------------- /examples/frameworks/aiohttp_example.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import logging 3 | from collections.abc import AsyncIterator 4 | from datetime import datetime 5 | from aiohttp import web 6 | from aiocache import cached, SimpleMemoryCache 7 | from aiocache.serializers import JsonSerializer 8 | 9 | cache = SimpleMemoryCache(serializer=JsonSerializer()) 10 | 11 | 12 | async def cache_context(app: web.Application) -> AsyncIterator[None]: 13 | async with cache: 14 | yield 15 | 16 | 17 | @cached(cache, key_builder=lambda x: "time") 18 | async def time(): 19 | return {"time": datetime.now().isoformat()} 20 | 21 | 22 | async def handle(request): 23 | return web.json_response(await time()) 24 | 25 | 26 | # It is also possible to cache the whole route, but for this you will need to 27 | # override `cached.get_from_cache` and regenerate the response since aiohttp 28 | # forbids reusing responses 29 | class CachedOverride(cached): 30 | def __init__(self, *args, **kwargs): 31 | super().__init__(*args, **kwargs) 32 | 33 | async def get_from_cache(self, key): 34 | try: 35 | value = await self.cache.get(key) 36 | if type(value) is web.Response: 37 | return web.Response( 38 | body=value.body, 39 | status=value.status, 40 | reason=value.reason, 41 | headers=value.headers, 42 | ) 43 | return value 44 | except Exception: 45 | logging.exception("Couldn't retrieve %s, unexpected error", key) 46 | return None 47 | 48 | 49 | @CachedOverride(cache, key_builder="route") 50 | async def handle2(request): 51 | return web.json_response(await asyncio.sleep(3)) 52 | 53 | 54 | if __name__ == "__main__": 55 | app = web.Application() 56 | app.router.add_get('/handle', handle) 57 | app.router.add_get('/handle2', handle2) 58 | 59 | app.cleanup_ctx.append(cache_context) 60 | 61 | web.run_app(app) 62 | -------------------------------------------------------------------------------- /examples/marshmallow_serializer_class.py: -------------------------------------------------------------------------------- 1 | import random 2 | import string 3 | import asyncio 4 | from typing import Any 5 | 6 | from marshmallow import fields, Schema, post_load 7 | 8 | from aiocache import SimpleMemoryCache 9 | from aiocache.serializers import BaseSerializer 10 | 11 | 12 | class RandomModel: 13 | MY_CONSTANT = "CONSTANT" 14 | 15 | def __init__(self, int_type=None, str_type=None, dict_type=None, list_type=None): 16 | self.int_type = int_type or random.randint(1, 10) 17 | self.str_type = str_type or random.choice(string.ascii_lowercase) 18 | self.dict_type = dict_type or {} 19 | self.list_type = list_type or [] 20 | 21 | def __eq__(self, obj): 22 | return self.__dict__ == obj.__dict__ 23 | 24 | 25 | class RandomSchema(Schema): 26 | int_type = fields.Integer() 27 | str_type = fields.String() 28 | dict_type = fields.Dict() 29 | list_type = fields.List(fields.Integer()) 30 | 31 | @post_load 32 | def build_my_type(self, data, **kwargs): 33 | return RandomModel(**data) 34 | 35 | class Meta: 36 | strict = True 37 | 38 | 39 | class MarshmallowSerializer(BaseSerializer): 40 | def __init__(self, *args: Any, **kwargs: Any): 41 | super().__init__(*args, **kwargs) 42 | self.schema = RandomSchema() 43 | 44 | def dumps(self, value: Any) -> str: 45 | return self.schema.dumps(value) 46 | 47 | def loads(self, value: str) -> Any: 48 | return self.schema.loads(value) 49 | 50 | 51 | cache = SimpleMemoryCache(serializer=MarshmallowSerializer(), namespace="main") 52 | 53 | 54 | async def serializer(): 55 | model = RandomModel() 56 | await cache.set("key", model) 57 | 58 | result = await cache.get("key") 59 | 60 | assert result.int_type == model.int_type 61 | assert result.str_type == model.str_type 62 | assert result.dict_type == model.dict_type 63 | assert result.list_type == model.list_type 64 | 65 | 66 | async def test_serializer(): 67 | await serializer() 68 | await cache.delete("key") 69 | 70 | 71 | if __name__ == "__main__": 72 | asyncio.run(test_serializer()) 73 | -------------------------------------------------------------------------------- /docs/serializers.rst: -------------------------------------------------------------------------------- 1 | .. _serializers: 2 | 3 | Serializers 4 | =========== 5 | 6 | Serializers can be attached to backends in order to serialize/deserialize data sent and retrieved from the backend. This allows to apply transformations to data in case you want it to be saved in a specific format in your cache backend. For example, imagine you have your ``Model`` and want to serialize it to something that Redis can understand (Redis can't store python objects). This is the task of a serializer. 7 | 8 | To use a specific serializer:: 9 | 10 | >>> from aiocache import SimpleMemoryCache 11 | >>> from aiocache.serializers import PickleSerializer 12 | cache = SimpleMemoryCache(serializer=PickleSerializer()) 13 | 14 | Currently the following are built in: 15 | 16 | 17 | .. _nullserializer: 18 | 19 | NullSerializer 20 | -------------- 21 | .. autoclass:: aiocache.serializers.NullSerializer 22 | :members: 23 | 24 | 25 | .. _stringserializer: 26 | 27 | StringSerializer 28 | ---------------- 29 | 30 | .. autoclass:: aiocache.serializers.StringSerializer 31 | :members: 32 | 33 | .. _pickleserializer: 34 | 35 | PickleSerializer 36 | ---------------- 37 | 38 | .. autoclass:: aiocache.serializers.PickleSerializer 39 | :members: 40 | 41 | .. _jsonserializer: 42 | 43 | JsonSerializer 44 | -------------- 45 | 46 | .. autoclass:: aiocache.serializers.JsonSerializer 47 | :members: 48 | 49 | .. _msgpackserializer: 50 | 51 | MsgPackSerializer 52 | ----------------- 53 | 54 | .. autoclass:: aiocache.serializers.MsgPackSerializer 55 | :members: 56 | 57 | In case the current serializers are not covering your needs, you can always define your custom serializer as shown in ``examples/serializer_class.py``: 58 | 59 | .. literalinclude:: ../examples/serializer_class.py 60 | :language: python 61 | :linenos: 62 | 63 | You can also use marshmallow as your serializer (``examples/marshmallow_serializer_class.py``): 64 | 65 | .. literalinclude:: ../examples/marshmallow_serializer_class.py 66 | :language: python 67 | :linenos: 68 | 69 | By default cache backends assume they are working with ``str`` types. If your custom implementation transform data to bytes, you will need to set the class attribute ``encoding`` to ``None``. 70 | -------------------------------------------------------------------------------- /examples/serializer_class.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import zlib 3 | 4 | from glide import GlideClientConfiguration, NodeAddress 5 | 6 | from aiocache import ValkeyCache 7 | from aiocache.serializers import BaseSerializer 8 | 9 | addresses = [NodeAddress("localhost", 6379)] 10 | config = GlideClientConfiguration(addresses=addresses, database_id=0) 11 | 12 | 13 | class CompressionSerializer(BaseSerializer): 14 | 15 | # This is needed because zlib works with bytes. 16 | # this way the underlying backend knows how to 17 | # store/retrieve values 18 | DEFAULT_ENCODING = None 19 | 20 | def dumps(self, value): 21 | print("I've received:\n{}".format(value)) 22 | compressed = zlib.compress(value.encode()) 23 | print("But I'm storing:\n{}".format(compressed)) 24 | return compressed 25 | 26 | def loads(self, value): 27 | print("I've retrieved:\n{}".format(value)) 28 | decompressed = zlib.decompress(value).decode() 29 | print("But I'm returning:\n{}".format(decompressed)) 30 | return decompressed 31 | 32 | 33 | async def serializer(cache): 34 | text = ( 35 | "Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt" 36 | "ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation" 37 | "ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in" 38 | "reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur" 39 | "sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit" 40 | "anim id est laborum." 41 | ) 42 | await cache.set("key", text) 43 | print("-----------------------------------") 44 | real_value = await cache.get("key") 45 | compressed_value = await cache.raw("get", "main:key") 46 | assert len(compressed_value) < len(real_value.encode()) 47 | 48 | 49 | async def test_serializer(): 50 | async with ValkeyCache( 51 | config, namespace="main", serializer=CompressionSerializer() 52 | ) as cache: 53 | await serializer(cache) 54 | await cache.delete("key") 55 | await cache.close() 56 | 57 | 58 | if __name__ == "__main__": 59 | asyncio.run(test_serializer()) 60 | -------------------------------------------------------------------------------- /docs/caches.rst: -------------------------------------------------------------------------------- 1 | .. _caches: 2 | 3 | Caches 4 | ====== 5 | 6 | You can use different caches according to your needs. All the caches implement the same interface. 7 | 8 | Caches are always working together with a serializer which transforms data when storing and retrieving from the backend. It may also contain plugins that are able to enrich the behavior of your cache (like adding metrics, logs, etc). 9 | 10 | This is the flow of the ``set`` command: 11 | 12 | .. image:: images/set_operation_flow.png 13 | :align: center 14 | 15 | Let's go with a more specific case. Let's pick Redis as the cache with namespace "test" and PickleSerializer as the serializer: 16 | 17 | #. We receive ``set("key", "value")``. 18 | #. Hook ``pre_set`` of all attached plugins (none by default) is called. 19 | #. "key" will become "test:key" when calling ``build_key``. 20 | #. "value" will become an array of bytes when calling ``serializer.dumps`` because of ``PickleSerializer``. 21 | #. the byte array is stored together with the key using ``set`` cmd in Redis. 22 | #. Hook ``post_set`` of all attached plugins is called. 23 | 24 | By default, all commands are covered by a timeout that will trigger an ``asyncio.TimeoutError`` in case of timeout. Timeout can be set at instance level or when calling the command. 25 | 26 | The supported commands are: 27 | 28 | - add 29 | - get 30 | - set 31 | - multi_get 32 | - multi_set 33 | - delete 34 | - exists 35 | - increment 36 | - expire 37 | - clear 38 | - raw 39 | 40 | If you feel a command is missing here do not hesitate to `open an issue `_ 41 | 42 | 43 | .. _basecache: 44 | 45 | BaseCache 46 | --------- 47 | 48 | .. autoclass:: aiocache.base.BaseCache 49 | :members: 50 | 51 | 52 | .. _rediscache: 53 | 54 | RedisCache 55 | ---------- 56 | 57 | .. autoclass:: aiocache.backends.redis.RedisCache 58 | :members: 59 | 60 | 61 | .. _simplememorycache: 62 | 63 | SimpleMemoryCache 64 | ----------------- 65 | 66 | .. autoclass:: aiocache.SimpleMemoryCache 67 | :members: 68 | 69 | 70 | .. _memcachedcache: 71 | 72 | MemcachedCache 73 | -------------- 74 | 75 | .. autoclass:: aiocache.backends.memcached.MemcachedCache 76 | :members: 77 | 78 | 79 | .. _dynamodbcache: 80 | 81 | Third-party caches 82 | ================== 83 | 84 | Additional cache backends are available through other libraries. 85 | 86 | DynamoDBCache 87 | ------------- 88 | 89 | `aiocache-dynamodb `_ provides support for DynamoDB. 90 | 91 | .. autoclass:: aiocache_dynamodb.DynamoDBCache 92 | :members: 93 | -------------------------------------------------------------------------------- /tests/utils.py: -------------------------------------------------------------------------------- 1 | from enum import Enum 2 | from typing import Optional, Union 3 | 4 | from aiocache.base import BaseCache 5 | 6 | 7 | class Keys(str, Enum): 8 | KEY: str = "key" 9 | KEY_1: str = "random" 10 | 11 | 12 | KEY_LOCK = Keys.KEY + "-lock" 13 | 14 | 15 | def ensure_key(key: Union[str, Enum]) -> str: 16 | if isinstance(key, Enum): 17 | return key.value 18 | else: 19 | return key 20 | 21 | 22 | class AbstractBaseCache(BaseCache[str]): 23 | """BaseCache that can be mocked for NotImplementedError tests""" 24 | def __init__(self, *args, **kwargs): 25 | super().__init__(*args, **kwargs) 26 | 27 | def build_key(self, key: str, namespace: Optional[str] = None) -> str: 28 | return super().build_key(key, namespace) 29 | 30 | async def _add(self, key, value, ttl, _conn=None): 31 | return await super()._add(key, value, ttl, _conn) 32 | 33 | async def _get(self, key, encoding, _conn=None): 34 | return await super()._get(key, encoding, _conn) 35 | 36 | async def _gets(self, key, encoding="utf-8", _conn=None): 37 | return await super()._gets(key, encoding, _conn) 38 | 39 | async def _multi_get(self, keys, encoding, _conn=None): 40 | return await super()._multi_get(keys, encoding, _conn) 41 | 42 | async def _set(self, key, value, ttl, _cas_token=None, _conn=None): 43 | return await super()._set(key, value, ttl, _cas_token, _conn) 44 | 45 | async def _multi_set(self, pairs, ttl, _conn=None): 46 | return await super()._multi_set(pairs, ttl, _conn) 47 | 48 | async def _delete(self, key, _conn=None): 49 | return await super()._delete(key, _conn) 50 | 51 | async def _exists(self, key, _conn=None): 52 | return await super()._exists(key, _conn) 53 | 54 | async def _increment(self, key, delta, _conn=None): 55 | return await super()._increment(key, delta, _conn) 56 | 57 | async def _expire(self, key, ttl, _conn=None): 58 | return await super()._expire(key, ttl, _conn) 59 | 60 | async def _clear(self, namespace, _conn=None): 61 | return await super()._clear(namespace, _conn) 62 | 63 | async def _raw(self, command, *args, **kwargs): 64 | return await super()._raw(command, *args, **kwargs) 65 | 66 | async def _redlock_release(self, key, value): 67 | return await super()._redlock_release(key, value) 68 | 69 | 70 | class ConcreteBaseCache(AbstractBaseCache): 71 | """BaseCache that can be mocked for tests""" 72 | def __init__(self, *args, **kwargs): 73 | super().__init__(*args, **kwargs) 74 | 75 | def build_key(self, key: str, namespace: Optional[str] = None) -> str: 76 | return self._str_build_key(key, namespace) 77 | -------------------------------------------------------------------------------- /tests/performance/server.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import logging 3 | import sys 4 | import uuid 5 | from functools import partial 6 | from types import TracebackType 7 | from typing import AsyncIterator, Optional 8 | 9 | from aiohttp import web 10 | 11 | if sys.version_info >= (3, 11): 12 | from typing import Self 13 | else: 14 | from typing import Any as Self 15 | 16 | logging.getLogger("aiohttp.access").propagate = False 17 | 18 | 19 | class CacheManager: 20 | def __init__(self, backend: str): 21 | if backend == "valkey": 22 | from aiocache.backends.valkey import ValkeyCache 23 | from glide import GlideClientConfiguration, NodeAddress 24 | 25 | config = GlideClientConfiguration(addresses=[NodeAddress()], database_id=0) 26 | cache = ValkeyCache(config) 27 | elif backend == "memcached": 28 | from aiocache.backends.memcached import MemcachedCache 29 | cache = MemcachedCache() 30 | elif backend == "memory": 31 | from aiocache.backends.memory import SimpleMemoryCache 32 | cache = SimpleMemoryCache() 33 | else: 34 | raise ValueError("Invalid backend") 35 | self.cache = cache 36 | 37 | async def get(self, key): 38 | return await self.cache.get(key, timeout=0.1) 39 | 40 | async def set(self, key, value): 41 | return await self.cache.set(key, value, timeout=0.1) 42 | 43 | async def __aenter__(self) -> Self: 44 | await self.cache.__aenter__() 45 | return self 46 | 47 | async def __aexit__( 48 | self, 49 | exc_type: Optional[type[BaseException]], 50 | exc: Optional[BaseException], 51 | tb: Optional[TracebackType], 52 | ) -> None: 53 | await self.cache.__aexit__(exc_type, exc, tb) 54 | 55 | 56 | cache_key = web.AppKey("cache_key", CacheManager) 57 | 58 | 59 | async def handler_get(req: web.Request) -> web.Response: 60 | try: 61 | data = await req.app[cache_key].get("testkey") 62 | if data: 63 | return web.Response(text=data) 64 | except asyncio.TimeoutError: # pragma: no cover 65 | # This won't be reached if the concurrency tests achieve 100% success rates. 66 | return web.Response(status=404) 67 | 68 | data = str(uuid.uuid4()) 69 | await req.app[cache_key].set("testkey", data) 70 | return web.Response(text=str(data)) 71 | 72 | 73 | async def ctx(app: web.Application, backend: str) -> AsyncIterator[None]: 74 | async with CacheManager(backend) as cm: 75 | app[cache_key] = cm 76 | yield 77 | 78 | 79 | def run_server(backend: str) -> None: 80 | app = web.Application() 81 | app.cleanup_ctx.append(partial(ctx, backend=backend)) 82 | app.router.add_route("GET", "/", handler_get) 83 | web.run_app(app) 84 | -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | .. aiocache documentation master file, created by 2 | sphinx-quickstart on Sat Oct 1 16:53:45 2016. 3 | You can adapt this file completely to your liking, but it should at least 4 | contain the root `toctree` directive. 5 | 6 | Welcome to aiocache's documentation! 7 | ==================================== 8 | 9 | 10 | Installing 11 | ---------- 12 | 13 | - ``pip install aiocache`` 14 | - ``pip install aiocache[redis]`` 15 | - ``pip install aiocache[memcached]`` 16 | - ``pip install aiocache[redis,memcached]`` 17 | 18 | 19 | Usage 20 | ----- 21 | 22 | Using a cache is as simple as 23 | 24 | .. code-block:: python 25 | 26 | >>> import asyncio 27 | >>> from aiocache import SimpleMemoryCache 28 | >>> cache = SimpleMemoryCache() 29 | >>> with asyncio.Runner() as runner: 30 | >>> runner.run(cache.set("key", "value")) 31 | True 32 | >>> runner.run(cache.get("key")) 33 | 'value' 34 | 35 | Here we are using the :ref:`simplememorycache` but you can use any other supported backends as listed in :ref:`caches`. 36 | All caches contain the same minimum interface which consists of the following functions: 37 | 38 | - ``add``: Only adds key/value if key does not exist. Otherwise raises ValueError. 39 | - ``get``: Retrieve value identified by key. 40 | - ``set``: Sets key/value. 41 | - ``multi_get``: Retrieves multiple key/values. 42 | - ``multi_set``: Sets multiple key/values. 43 | - ``exists``: Returns True if key exists False otherwise. 44 | - ``increment``: Increment the value stored in the given key. 45 | - ``delete``: Deletes key and returns number of deleted items. 46 | - ``clear``: Clears the items stored. 47 | - ``raw``: Executes the specified command using the underlying client. 48 | 49 | See the `examples folder `_ for different use cases: 50 | 51 | - `Sanic, Aiohttp and Tornado `_ 52 | - `Python object in Redis `_ 53 | - `Custom serializer for compressing data `_ 54 | - `TimingPlugin and HitMissRatioPlugin demos `_ 55 | - `Using marshmallow as a serializer `_ 56 | - `Using cached decorator `_. 57 | - `Using multi_cached decorator `_. 58 | 59 | 60 | Contents 61 | -------- 62 | 63 | .. toctree:: 64 | 65 | caches 66 | serializers 67 | plugins 68 | decorators 69 | locking 70 | testing 71 | v1_migration 72 | 73 | Indices and tables 74 | ================== 75 | 76 | * :ref:`genindex` 77 | * :ref:`modindex` 78 | * :ref:`search` 79 | -------------------------------------------------------------------------------- /tests/ut/test_plugins.py: -------------------------------------------------------------------------------- 1 | from unittest.mock import create_autospec 2 | 3 | import pytest 4 | 5 | from aiocache.base import API, BaseCache 6 | from aiocache.plugins import BasePlugin, HitMissRatioPlugin, TimingPlugin 7 | from ..utils import Keys 8 | 9 | 10 | class TestBasePlugin: 11 | async def test_interface_methods(self): 12 | for method in API.CMDS: 13 | pre = await getattr(BasePlugin, "pre_{}".format(method.__name__))(None) 14 | assert pre is None 15 | post = await getattr(BasePlugin, "post_{}".format(method.__name__))(None) 16 | assert post is None 17 | 18 | async def test_do_nothing(self): 19 | assert await BasePlugin().do_nothing() is None 20 | 21 | 22 | class TestTimingPlugin: 23 | async def test_save_time(self, mock_cache): 24 | do_save_time = TimingPlugin().save_time("get") 25 | await do_save_time("self", mock_cache, took=1) 26 | await do_save_time("self", mock_cache, took=2) 27 | 28 | assert mock_cache.profiling["get_total"] == 2 29 | assert mock_cache.profiling["get_max"] == 2 30 | assert mock_cache.profiling["get_min"] == 1 31 | assert mock_cache.profiling["get_avg"] == 1.5 32 | 33 | async def test_save_time_post_set(self, mock_cache): 34 | await TimingPlugin().post_set(mock_cache, took=1) 35 | await TimingPlugin().post_set(mock_cache, took=2) 36 | 37 | assert mock_cache.profiling["set_total"] == 2 38 | assert mock_cache.profiling["set_max"] == 2 39 | assert mock_cache.profiling["set_min"] == 1 40 | assert mock_cache.profiling["set_avg"] == 1.5 41 | 42 | async def test_interface_methods(self): 43 | for method in API.CMDS: 44 | assert hasattr(TimingPlugin, "pre_{}".format(method.__name__)) 45 | assert hasattr(TimingPlugin, "post_{}".format(method.__name__)) 46 | 47 | 48 | class TestHitMissRatioPlugin: 49 | @pytest.fixture 50 | def plugin(self): 51 | return HitMissRatioPlugin() 52 | 53 | async def test_post_get(self, plugin): 54 | client = create_autospec(BaseCache, instance=True) 55 | await plugin.post_get(client, Keys.KEY) 56 | 57 | assert client.hit_miss_ratio["hits"] == 0 58 | assert client.hit_miss_ratio["total"] == 1 59 | assert client.hit_miss_ratio["hit_ratio"] == 0 60 | 61 | await plugin.post_get(client, Keys.KEY, ret="value") 62 | assert client.hit_miss_ratio["hits"] == 1 63 | assert client.hit_miss_ratio["total"] == 2 64 | assert client.hit_miss_ratio["hit_ratio"] == 0.5 65 | 66 | async def test_post_multi_get(self, plugin): 67 | client = create_autospec(BaseCache, instance=True) 68 | await plugin.post_multi_get(client, [Keys.KEY, Keys.KEY_1], ret=[None, None]) 69 | 70 | assert client.hit_miss_ratio["hits"] == 0 71 | assert client.hit_miss_ratio["total"] == 2 72 | assert client.hit_miss_ratio["hit_ratio"] == 0 73 | 74 | await plugin.post_multi_get(client, [Keys.KEY, Keys.KEY_1], ret=["value", "random"]) 75 | assert client.hit_miss_ratio["hits"] == 2 76 | assert client.hit_miss_ratio["total"] == 4 77 | assert client.hit_miss_ratio["hit_ratio"] == 0.5 78 | -------------------------------------------------------------------------------- /tests/acceptance/test_plugins.py: -------------------------------------------------------------------------------- 1 | from typing import OrderedDict 2 | 3 | import pytest 4 | 5 | from aiocache.plugins import HitMissRatioPlugin, TimingPlugin 6 | 7 | 8 | class TestHitMissRatioPlugin: 9 | @pytest.mark.parametrize( 10 | "data, ratio", 11 | [ 12 | ({"testa": 1, "testb": 2, "testc": 3}, 0.6), 13 | ({"testa": 1, "testz": 0}, 0.2), 14 | ({}, 0), 15 | ({"testa": 1, "testb": 2, "testc": 3, "testd": 4, "teste": 5}, 1), 16 | ], 17 | ) 18 | async def test_get_hit_miss_ratio(self, memory_cache, data, ratio): 19 | keys = ["a", "b", "c", "d", "e", "f"] 20 | memory_cache.plugins = [HitMissRatioPlugin()] 21 | memory_cache._cache = OrderedDict(data) 22 | 23 | for key in keys: 24 | await memory_cache.get(key) 25 | 26 | hits = [x for x in keys if "test" + x in data] 27 | assert memory_cache.hit_miss_ratio["hits"] == len(hits) 28 | assert ( 29 | memory_cache.hit_miss_ratio["hit_ratio"] 30 | == len(hits) / memory_cache.hit_miss_ratio["total"] 31 | ) 32 | 33 | @pytest.mark.parametrize( 34 | "data, ratio", 35 | [ 36 | ({"testa": 1, "testb": 2, "testc": 3}, 0.6), 37 | ({"testa": 1, "testz": 0}, 0.2), 38 | ({}, 0), 39 | ({"testa": 1, "testb": 2, "testc": 3, "testd": 4, "teste": 5}, 1), 40 | ], 41 | ) 42 | async def test_multi_get_hit_miss_ratio(self, memory_cache, data, ratio): 43 | keys = ["a", "b", "c", "d", "e", "f"] 44 | memory_cache.plugins = [HitMissRatioPlugin()] 45 | memory_cache._cache = OrderedDict(data) 46 | 47 | for key in keys: 48 | await memory_cache.multi_get([key]) 49 | 50 | hits = [x for x in keys if "test" + x in data] 51 | assert memory_cache.hit_miss_ratio["hits"] == len(hits) 52 | assert ( 53 | memory_cache.hit_miss_ratio["hit_ratio"] 54 | == len(hits) / memory_cache.hit_miss_ratio["total"] 55 | ) 56 | 57 | async def test_set_and_get_using_namespace(self, memory_cache): 58 | memory_cache.plugins = [HitMissRatioPlugin()] 59 | key = "A" 60 | namespace = "test" 61 | value = 1 62 | await memory_cache.set(key, value, namespace=namespace) 63 | result = await memory_cache.get(key, namespace=namespace) 64 | assert result == value 65 | 66 | 67 | class TestTimingPlugin: 68 | @pytest.mark.parametrize( 69 | "data, ratio", 70 | [ 71 | ({"testa": 1, "testb": 2, "testc": 3}, 0.6), 72 | ({"testa": 1, "testz": 0}, 0.2), 73 | ({}, 0), 74 | ({"testa": 1, "testb": 2, "testc": 3, "testd": 4, "teste": 5}, 1), 75 | ], 76 | ) 77 | async def test_get_avg_min_max(self, memory_cache, data, ratio): 78 | keys = ["a", "b", "c", "d", "e", "f"] 79 | memory_cache.plugins = [TimingPlugin()] 80 | memory_cache._cache = OrderedDict(data) 81 | 82 | for key in keys: 83 | await memory_cache.get(key) 84 | 85 | assert "get_max" in memory_cache.profiling 86 | assert "get_min" in memory_cache.profiling 87 | assert "get_total" in memory_cache.profiling 88 | assert "get_avg" in memory_cache.profiling 89 | -------------------------------------------------------------------------------- /aiocache/plugins.py: -------------------------------------------------------------------------------- 1 | """ 2 | This module implements different plugins you can attach to your cache instance. They 3 | are coded in a collaborative so you can use multiple inheritance. 4 | """ 5 | 6 | from aiocache.base import API 7 | 8 | 9 | class BasePlugin: 10 | @classmethod 11 | def add_hook(cls, func, hooks): 12 | for hook in hooks: 13 | setattr(cls, hook, func) 14 | 15 | async def do_nothing(self, *args, **kwargs): 16 | pass 17 | 18 | 19 | BasePlugin.add_hook( 20 | BasePlugin.do_nothing, ["pre_{}".format(method.__name__) for method in API.CMDS] 21 | ) 22 | BasePlugin.add_hook( 23 | BasePlugin.do_nothing, ["post_{}".format(method.__name__) for method in API.CMDS] 24 | ) 25 | 26 | 27 | class TimingPlugin(BasePlugin): 28 | """ 29 | Calculates average, min and max times each command takes. The data is saved 30 | in the cache class as a dict attribute called ``profiling``. For example, to 31 | access the average time of the operation get, you can do ``cache.profiling['get_avg']`` 32 | """ 33 | 34 | @classmethod 35 | def save_time(cls, method): 36 | async def do_save_time(self, client, *args, took=0, **kwargs): 37 | if not hasattr(client, "profiling"): 38 | client.profiling = {} 39 | 40 | previous_total = client.profiling.get("{}_total".format(method), 0) 41 | previous_avg = client.profiling.get("{}_avg".format(method), 0) 42 | previous_max = client.profiling.get("{}_max".format(method), 0) 43 | previous_min = client.profiling.get("{}_min".format(method)) 44 | 45 | client.profiling["{}_total".format(method)] = previous_total + 1 46 | client.profiling["{}_avg".format(method)] = previous_avg + (took - previous_avg) / ( 47 | previous_total + 1 48 | ) 49 | client.profiling["{}_max".format(method)] = max(took, previous_max) 50 | client.profiling["{}_min".format(method)] = ( 51 | min(took, previous_min) if previous_min else took 52 | ) 53 | 54 | return do_save_time 55 | 56 | 57 | for method in API.CMDS: 58 | TimingPlugin.add_hook( 59 | TimingPlugin.save_time(method.__name__), ["post_{}".format(method.__name__)] 60 | ) 61 | 62 | 63 | class HitMissRatioPlugin(BasePlugin): 64 | """ 65 | Calculates the ratio of hits the cache has. The data is saved in the cache class as a dict 66 | attribute called ``hit_miss_ratio``. For example, to access the hit ratio of the cache, 67 | you can do ``cache.hit_miss_ratio['hit_ratio']``. It also provides the "total" and "hits" 68 | keys. 69 | """ 70 | 71 | async def post_get(self, client, key, took=0, ret=None, **kwargs): 72 | if not hasattr(client, "hit_miss_ratio"): 73 | client.hit_miss_ratio = {} 74 | client.hit_miss_ratio["total"] = 0 75 | client.hit_miss_ratio["hits"] = 0 76 | 77 | client.hit_miss_ratio["total"] += 1 78 | if ret is not None: 79 | client.hit_miss_ratio["hits"] += 1 80 | 81 | client.hit_miss_ratio["hit_ratio"] = ( 82 | client.hit_miss_ratio["hits"] / client.hit_miss_ratio["total"] 83 | ) 84 | 85 | async def post_multi_get(self, client, keys, took=0, ret=None, **kwargs): 86 | if not hasattr(client, "hit_miss_ratio"): 87 | client.hit_miss_ratio = {} 88 | client.hit_miss_ratio["total"] = 0 89 | client.hit_miss_ratio["hits"] = 0 90 | 91 | client.hit_miss_ratio["total"] += len(keys) 92 | for result in ret: 93 | if result is not None: 94 | client.hit_miss_ratio["hits"] += 1 95 | 96 | client.hit_miss_ratio["hit_ratio"] = ( 97 | client.hit_miss_ratio["hits"] / client.hit_miss_ratio["total"] 98 | ) 99 | -------------------------------------------------------------------------------- /tests/ut/test_lock.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from unittest.mock import Mock, patch 3 | 4 | import pytest 5 | 6 | from aiocache.lock import OptimisticLock, OptimisticLockError, RedLock 7 | from ..utils import KEY_LOCK, Keys 8 | 9 | 10 | class TestRedLock: 11 | @pytest.fixture 12 | def lock(self, mock_base_cache): 13 | RedLock._EVENTS = {} 14 | yield RedLock(mock_base_cache, Keys.KEY, 20) 15 | 16 | async def test_acquire(self, mock_base_cache, lock): 17 | await lock._acquire() 18 | mock_base_cache._add.assert_called_with(KEY_LOCK, lock._value, ttl=20) 19 | assert lock._EVENTS[KEY_LOCK].is_set() is False 20 | 21 | async def test_release(self, mock_base_cache, lock): 22 | mock_base_cache._redlock_release.return_value = True 23 | await lock._acquire() 24 | await lock._release() 25 | mock_base_cache._redlock_release.assert_called_with(KEY_LOCK, lock._value) 26 | assert KEY_LOCK not in lock._EVENTS 27 | 28 | async def test_release_no_acquire(self, mock_base_cache, lock): 29 | mock_base_cache._redlock_release.return_value = False 30 | assert KEY_LOCK not in lock._EVENTS 31 | await lock._release() 32 | assert KEY_LOCK not in lock._EVENTS 33 | 34 | async def test_context_manager(self, mock_base_cache, lock): 35 | async with lock: 36 | pass 37 | mock_base_cache._add.assert_called_with(KEY_LOCK, lock._value, ttl=20) 38 | mock_base_cache._redlock_release.assert_called_with(KEY_LOCK, lock._value) 39 | 40 | async def test_raises_exceptions(self, mock_base_cache, lock): 41 | mock_base_cache._redlock_release.return_value = True 42 | with pytest.raises(ValueError): 43 | async with lock: 44 | raise ValueError 45 | 46 | async def test_acquire_block_timeouts(self, mock_base_cache, lock): 47 | await lock._acquire() 48 | 49 | # Mock .wait() to avoid unawaited coroutine warning. 50 | with patch.object(RedLock._EVENTS[lock.key], "wait", Mock(spec_set=())): 51 | with patch("asyncio.wait_for", autospec=True, side_effect=asyncio.TimeoutError): 52 | mock_base_cache._add.side_effect = ValueError 53 | result = await lock._acquire() 54 | assert result is None 55 | 56 | async def test_wait_for_release_no_acquire(self, mock_base_cache, lock): 57 | mock_base_cache._add.side_effect = ValueError 58 | assert await lock._acquire() is None 59 | 60 | async def test_multiple_locks_lock(self, mock_base_cache, lock): 61 | lock_1 = RedLock(mock_base_cache, Keys.KEY, 20) 62 | lock_2 = RedLock(mock_base_cache, Keys.KEY, 20) 63 | mock_base_cache._add.side_effect = [True, ValueError(), ValueError()] 64 | await lock._acquire() 65 | event = lock._EVENTS[KEY_LOCK] 66 | 67 | assert KEY_LOCK in lock._EVENTS 68 | assert KEY_LOCK in lock_1._EVENTS 69 | assert KEY_LOCK in lock_2._EVENTS 70 | assert not event.is_set() 71 | 72 | await asyncio.gather(lock_1._acquire(), lock._release(), lock_2._acquire()) 73 | 74 | assert KEY_LOCK not in lock._EVENTS 75 | assert KEY_LOCK not in lock_1._EVENTS 76 | assert KEY_LOCK not in lock_2._EVENTS 77 | assert event.is_set() 78 | 79 | 80 | class TestOptimisticLock: 81 | @pytest.fixture 82 | def lock(self, mock_base_cache): 83 | yield OptimisticLock(mock_base_cache, Keys.KEY) 84 | 85 | def test_init(self, mock_base_cache, lock): 86 | assert lock.client == mock_base_cache 87 | assert lock._token is None 88 | assert lock.key == Keys.KEY 89 | assert lock.ns_key == mock_base_cache.build_key(Keys.KEY) 90 | 91 | async def test_aenter_returns_lock(self, lock): 92 | assert await lock.__aenter__() is lock 93 | 94 | async def test_aexit_not_crashing(self, lock): 95 | async with lock: 96 | pass 97 | 98 | async def test_acquire_calls_get(self, lock): 99 | await lock._acquire() 100 | lock.client._gets.assert_called_with(Keys.KEY) 101 | assert lock._token == lock.client._gets.return_value 102 | 103 | async def test_cas_calls_set_with_token(self, lock, mocker): 104 | m = mocker.spy(lock.client, "set") 105 | await lock._acquire() 106 | await lock.cas("value") 107 | m.assert_called_with(Keys.KEY, "value", _cas_token=lock._token) 108 | 109 | async def test_wrong_token_raises_error(self, mock_base_cache, lock): 110 | mock_base_cache._set.return_value = 0 111 | with pytest.raises(OptimisticLockError): 112 | await lock.cas("value") 113 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: CI 2 | 3 | on: 4 | push: 5 | branches: 6 | - master 7 | - '[0-9].[0-9]+' # matches to backport branches, e.g. 3.6 8 | tags: [ 'v*' ] 9 | pull_request: 10 | branches: 11 | - master 12 | - '[0-9].[0-9]+' 13 | 14 | jobs: 15 | lint: 16 | name: Linter 17 | runs-on: ubuntu-latest 18 | timeout-minutes: 10 19 | steps: 20 | - name: Checkout 21 | uses: actions/checkout@v5 22 | - name: Setup Python 23 | uses: actions/setup-python@v6 24 | with: 25 | python-version: 3.13 26 | cache: 'pip' 27 | cache-dependency-path: '**/requirements*.txt' 28 | - name: Pre-Commit hooks 29 | uses: pre-commit/action@v3.0.1 30 | - name: Install dependencies 31 | uses: py-actions/py-dependency-install@v4 32 | with: 33 | path: requirements-dev.txt 34 | - name: Install itself 35 | run: | 36 | pip install . 37 | - name: Run linter 38 | run: | 39 | make lint 40 | - name: Prepare twine checker 41 | run: | 42 | pip install -U build twine wheel 43 | python -m build 44 | - name: Run twine checker 45 | run: | 46 | twine check dist/* 47 | 48 | test: 49 | name: Test 50 | strategy: 51 | matrix: 52 | os: [ubuntu] 53 | pyver: ['3.9', '3.10', '3.11', '3.12', '3.13'] 54 | valkey: ['latest'] 55 | ujson: [''] 56 | include: 57 | - os: ubuntu 58 | pyver: pypy-3.9 59 | valkey: 'latest' 60 | - os: ubuntu 61 | pyver: '3.9' 62 | valkey: '7.2.8' 63 | - os: ubuntu 64 | pyver: '3.9' 65 | valkey: 'latest' 66 | ujson: 'ujson' 67 | services: 68 | valkey: 69 | image: valkey/valkey:${{ matrix.valkey }} 70 | ports: 71 | - 6379:6379 72 | options: >- 73 | --health-cmd "valkey-cli ping" 74 | --health-interval 10s 75 | --health-timeout 5s 76 | --health-retries 5 77 | memcached: 78 | image: memcached 79 | ports: 80 | - 11211:11211 81 | runs-on: ${{ matrix.os }}-latest 82 | timeout-minutes: 15 83 | steps: 84 | - name: Checkout 85 | uses: actions/checkout@v5 86 | - name: Setup Python ${{ matrix.pyver }} 87 | uses: actions/setup-python@v6 88 | with: 89 | python-version: ${{ matrix.pyver }} 90 | allow-prereleases: true 91 | cache: 'pip' 92 | cache-dependency-path: '**/requirements*.txt' 93 | - name: Install ujson 94 | if: ${{ matrix.ujson == 'ujson' }} 95 | run: pip install ujson 96 | - name: Install dependencies 97 | uses: py-actions/py-dependency-install@v4 98 | with: 99 | path: requirements.txt 100 | - name: Run unittests 101 | env: 102 | COLOR: 'yes' 103 | run: pytest tests --cov-report xml --cov-report html 104 | - name: Run functional tests 105 | run: bash examples/run_all.sh 106 | - name: Uninstall optional backends 107 | run: pip uninstall -y aiomcache valkey-glide 108 | - name: Run unittests with minimal backend set 109 | env: 110 | COLOR: 'yes' 111 | run: | 112 | pytest --cov-report xml --cov-report html --cov-append tests/acceptance tests/ut -m "not memcached and not valkey" --ignore "tests/ut/backends/test_memcached.py" --ignore "tests/ut/backends/test_valkey.py" 113 | - name: Produce coverage report 114 | run: python -m coverage xml 115 | - name: Upload coverage 116 | uses: codecov/codecov-action@v5 117 | with: 118 | fail_ci_if_error: true 119 | token: ${{ secrets.CODECOV_TOKEN }} 120 | 121 | check: # This job does nothing and is only used for the branch protection 122 | if: always() 123 | 124 | needs: [lint, test] 125 | 126 | runs-on: ubuntu-latest 127 | 128 | steps: 129 | - name: Decide whether the needed jobs succeeded or failed 130 | uses: re-actors/alls-green@release/v1 131 | with: 132 | jobs: ${{ toJSON(needs) }} 133 | 134 | deploy: 135 | name: Deploy 136 | environment: release 137 | if: github.event_name == 'push' && contains(github.ref, 'refs/tags/') 138 | needs: [check] 139 | runs-on: ubuntu-latest 140 | steps: 141 | - name: Checkout 142 | uses: actions/checkout@v5 143 | - name: Update pip, wheel, setuptools, build, twine 144 | run: | 145 | python -m pip install -U pip wheel setuptools build twine 146 | - name: Build dists 147 | run: | 148 | python -m build 149 | - name: Make Release 150 | uses: aio-libs/create-release@v1.6.6 151 | with: 152 | changes_file: CHANGES.rst 153 | name: aiocache 154 | version_file: aiocache/__init__.py 155 | github_token: ${{ secrets.GITHUB_TOKEN }} 156 | pypi_token: ${{ secrets.PYPI_API_TOKEN }} 157 | dist_dir: dist 158 | fix_issue_regex: "`#(\\d+) `" 159 | fix_issue_repl: "(#\\1)" 160 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | aiocache 2 | ######## 3 | 4 | Asyncio cache supporting multiple backends (memory, redis, memcached, etc.). 5 | 6 | .. image:: https://codecov.io/gh/aio-libs/aiocache/branch/master/graph/badge.svg 7 | :target: https://codecov.io/gh/aio-libs/aiocache 8 | 9 | .. image:: https://badge.fury.io/py/aiocache.svg 10 | :target: https://pypi.python.org/pypi/aiocache 11 | 12 | .. image:: https://img.shields.io/pypi/pyversions/aiocache.svg 13 | :target: https://pypi.python.org/pypi/aiocache 14 | 15 | This library aims for simplicity over specialization. All caches contain the same minimum interface which consists on the following functions: 16 | 17 | - ``add``: Only adds key/value if key does not exist. 18 | - ``get``: Retrieve value identified by key. 19 | - ``set``: Sets key/value. 20 | - ``multi_get``: Retrieves multiple key/values. 21 | - ``multi_set``: Sets multiple key/values. 22 | - ``exists``: Returns True if key exists False otherwise. 23 | - ``increment``: Increment the value stored in the given key. 24 | - ``delete``: Deletes key and returns number of deleted items. 25 | - ``clear``: Clears the items stored. 26 | - ``raw``: Executes the specified command using the underlying client. 27 | 28 | 29 | .. role:: python(code) 30 | :language: python 31 | 32 | .. contents:: 33 | 34 | .. section-numbering: 35 | 36 | 37 | Installing 38 | ========== 39 | 40 | - ``pip install aiocache`` 41 | - ``pip install aiocache[redis]`` 42 | - ``pip install aiocache[memcached]`` 43 | - ``pip install aiocache[redis,memcached]`` 44 | - ``pip install aiocache[msgpack]`` 45 | 46 | 47 | Usage 48 | ===== 49 | 50 | Using a cache is as simple as 51 | 52 | .. code-block:: python 53 | 54 | >>> import asyncio 55 | >>> from aiocache import SimpleMemoryCache 56 | >>> cache = SimpleMemoryCache() # Or RedisCache, MemcachedCache... 57 | >>> with asyncio.Runner() as runner: 58 | >>> runner.run(cache.set('key', 'value')) 59 | True 60 | >>> runner.run(cache.get('key')) 61 | 'value' 62 | 63 | Or as a decorator 64 | 65 | .. code-block:: python 66 | 67 | import asyncio 68 | 69 | from collections import namedtuple 70 | 71 | from aiocache import RedisCache, cached 72 | from aiocache.serializers import PickleSerializer 73 | # With this we can store python objects in backends like Redis! 74 | 75 | Result = namedtuple('Result', "content, status") 76 | redis_client = redis.Redis(host="127.0.0.1", port=6379) 77 | redis_cache = RedisCache(redis_client, namespace="main") 78 | 79 | 80 | @cached(redis_cache, key="key", serializer=PickleSerializer(), port=6379, namespace="main") 81 | async def cached_call(): 82 | print("Sleeping for three seconds zzzz.....") 83 | await asyncio.sleep(3) 84 | return Result("content", 200) 85 | 86 | 87 | async def run(): 88 | async with redis_client, redis_cache: 89 | await cached_call() 90 | await cached_call() 91 | await cached_call() 92 | await redis_cache.delete("key") 93 | 94 | if __name__ == "__main__": 95 | asyncio.run(run()) 96 | 97 | 98 | How does it work 99 | ================ 100 | 101 | Aiocache provides 3 main entities: 102 | 103 | - **backends**: Allow you specify which backend you want to use for your cache. See the docs for a full list of supported backends. 104 | - **serializers**: Serialize and deserialize the data between your code and the backends. This allows you to save any Python object into your cache. Currently supporting: StringSerializer, PickleSerializer, JsonSerializer, and MsgPackSerializer. But you can also build custom ones. 105 | - **plugins**: Implement a hooks system that allows to execute extra behavior before and after of each command. 106 | 107 | If you are missing an implementation of backend, serializer or plugin you think it could be interesting for the package, do not hesitate to open a new issue. 108 | 109 | .. image:: docs/images/architecture.png 110 | :align: center 111 | 112 | Those 3 entities combine during some of the cache operations to apply the desired command (backend), data transformation (serializer) and pre/post hooks (plugins). To have a better vision of what happens, here you can check how ``set`` function works in ``aiocache``: 113 | 114 | .. image:: docs/images/set_operation_flow.png 115 | :align: center 116 | 117 | 118 | Amazing examples 119 | ================ 120 | 121 | In `examples folder `_ you can check different use cases: 122 | 123 | - `Sanic, Aiohttp and Tornado `_ 124 | - `Python object in Redis `_ 125 | - `Custom serializer for compressing data `_ 126 | - `TimingPlugin and HitMissRatioPlugin demos `_ 127 | - `Using marshmallow as a serializer `_ 128 | - `Using cached decorator `_. 129 | - `Using multi_cached decorator `_. 130 | 131 | 132 | 133 | Documentation 134 | ============= 135 | 136 | - `Usage `_ 137 | - `Caches `_ 138 | - `Serializers `_ 139 | - `Plugins `_ 140 | - `Configuration `_ 141 | - `Decorators `_ 142 | - `Testing `_ 143 | - `Examples `_ 144 | -------------------------------------------------------------------------------- /aiocache/backends/memory.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from typing import Optional, OrderedDict 3 | 4 | from aiocache.base import BaseCache 5 | from aiocache.serializers import NullSerializer 6 | 7 | 8 | class SimpleMemoryCache(BaseCache[str]): 9 | """ 10 | Memory cache implementation with the following components as defaults: 11 | - serializer: :class:`aiocache.serializers.NullSerializer` 12 | - plugins: None 13 | - backend: dict 14 | 15 | Config options are: 16 | 17 | :param serializer: obj derived from :class:`aiocache.serializers.BaseSerializer`. 18 | :param plugins: list of :class:`aiocache.plugins.BasePlugin` derived classes. 19 | :param namespace: string to use as default prefix for the key used in all operations of 20 | the backend. Default is an empty string, "". 21 | :param timeout: int or float in seconds specifying maximum timeout for the operations to last. 22 | By default, its 5. 23 | :param maxsize: int maximum number of keys to store (None for unlimited) 24 | """ 25 | 26 | NAME = "memory" 27 | 28 | # TODO(PY312): https://peps.python.org/pep-0692/ 29 | def __init__(self, **kwargs): 30 | # Extract maxsize before passing kwargs to base class 31 | self.maxsize = kwargs.pop('maxsize', None) 32 | if "serializer" not in kwargs: 33 | kwargs["serializer"] = NullSerializer() 34 | super().__init__(**kwargs) 35 | 36 | self._cache: OrderedDict[str, object] = OrderedDict() 37 | self._handlers: dict[str, asyncio.TimerHandle] = {} 38 | 39 | def _mark_accessed(self, key: str) -> None: 40 | """Move key to end to mark as recently used.""" 41 | if key in self._cache: 42 | self._cache.move_to_end(key) 43 | 44 | def _evict_if_needed(self) -> None: 45 | """Evict least recently used items if over maxsize.""" 46 | if self.maxsize is None: 47 | return 48 | 49 | while len(self._cache) > self.maxsize: 50 | key, _ = self._cache.popitem(last=False) # Remove LRU item 51 | if key in self._handlers: 52 | self._handlers[key].cancel() 53 | del self._handlers[key] 54 | 55 | async def _get(self, key, encoding="utf-8", _conn=None): 56 | self._mark_accessed(key) 57 | return self._cache.get(key) 58 | 59 | async def _gets(self, key, encoding="utf-8", _conn=None): 60 | return await self._get(key, encoding=encoding, _conn=_conn) 61 | 62 | async def _multi_get(self, keys, encoding="utf-8", _conn=None): 63 | return [await self._get(key, encoding=encoding, _conn=_conn) for key in keys] 64 | 65 | async def _set(self, key, value, ttl=None, _cas_token=None, _conn=None): 66 | if _cas_token is not None and self._cache.get(key) != _cas_token: 67 | return 0 68 | 69 | if key in self._handlers: 70 | self._handlers[key].cancel() 71 | 72 | self._cache[key] = value 73 | self._cache.move_to_end(key) 74 | 75 | if ttl: 76 | loop = asyncio.get_running_loop() 77 | self._handlers[key] = loop.call_later(ttl, self.__delete, key) 78 | 79 | # Evict the oldest items if over limit 80 | self._evict_if_needed() 81 | return True 82 | 83 | async def _multi_set(self, pairs, ttl=None, _conn=None): 84 | for key, value in pairs: 85 | await self._set(key, value, ttl=ttl) 86 | return True 87 | 88 | async def _add(self, key, value, ttl=None, _conn=None): 89 | if key in self._cache: 90 | raise ValueError(f"Key {key} already exists, use .set to update") 91 | return await self._set(key, value, ttl=ttl) 92 | 93 | async def _exists(self, key, _conn=None): 94 | return key in self._cache 95 | 96 | async def _increment(self, key, delta, _conn=None): 97 | if key not in self._cache: 98 | self._cache[key] = delta 99 | else: 100 | try: 101 | self._cache[key] = int(self._cache[key]) + delta 102 | except ValueError: 103 | raise TypeError("Value is not an integer") from None 104 | self._mark_accessed(key) 105 | return self._cache[key] 106 | 107 | async def _expire(self, key, ttl, _conn=None): 108 | if key not in self._cache: 109 | return False 110 | 111 | # Cancel existing timer 112 | if key in self._handlers: 113 | self._handlers[key].cancel() 114 | 115 | # Set new timer 116 | if ttl: 117 | loop = asyncio.get_running_loop() 118 | self._handlers[key] = loop.call_later(ttl, self.__delete, key) 119 | 120 | self._mark_accessed(key) 121 | return True 122 | 123 | async def _delete(self, key, _conn=None): 124 | return self.__delete(key) 125 | 126 | async def _clear(self, namespace=None, _conn=None): 127 | if namespace: 128 | for key in list(self._cache): 129 | if key.startswith(namespace): 130 | self.__delete(key) 131 | else: 132 | self._cache = OrderedDict() 133 | self._handlers = {} 134 | return True 135 | 136 | async def _raw(self, command, *args, encoding="utf-8", _conn=None, **kwargs): 137 | return getattr(self._cache, command)(*args, **kwargs) 138 | 139 | async def _redlock_release(self, key, value): 140 | if self._cache.get(key) == value: 141 | return self.__delete(key) 142 | return 0 143 | 144 | def __delete(self, key): 145 | if self._cache.pop(key, None) is not None: 146 | handle = self._handlers.pop(key, None) 147 | if handle: 148 | handle.cancel() 149 | return 1 150 | 151 | return 0 152 | 153 | def build_key(self, key: str, namespace: Optional[str] = None) -> str: 154 | return self._str_build_key(key, namespace) 155 | 156 | @classmethod 157 | def parse_uri_path(cls, path): 158 | return {} 159 | -------------------------------------------------------------------------------- /tests/acceptance/test_serializers.py: -------------------------------------------------------------------------------- 1 | import pickle 2 | import random 3 | from typing import Any 4 | 5 | import pytest 6 | from marshmallow import Schema, fields, post_load 7 | 8 | try: 9 | import ujson as json # noqa: I900 10 | except ImportError: 11 | import json # type: ignore[no-redef] 12 | 13 | from aiocache.serializers import ( 14 | BaseSerializer, 15 | JsonSerializer, 16 | NullSerializer, 17 | PickleSerializer, 18 | StringSerializer, 19 | ) 20 | from ..utils import Keys 21 | 22 | 23 | class MyType: 24 | MY_CONSTANT = "CONSTANT" 25 | 26 | def __init__(self, r=None): 27 | self.r = r or random.randint(1, 10) 28 | 29 | def __eq__(self, obj): 30 | return self.__dict__ == obj.__dict__ 31 | 32 | 33 | class MySchema(Schema): 34 | r = fields.Integer() 35 | 36 | @post_load 37 | def build_my_type(self, data, **kwargs): 38 | return MyType(**data) 39 | 40 | class Meta: 41 | strict = True 42 | 43 | 44 | class MyTypeSchema(BaseSerializer): 45 | def __init__(self, *args: Any, **kwargs: Any): 46 | super().__init__(*args, **kwargs) 47 | self.schema = MySchema() 48 | 49 | def dumps(self, value: Any) -> str: 50 | return self.schema.dumps(value) 51 | 52 | def loads(self, value: str) -> Any: 53 | return self.schema.loads(value) 54 | 55 | 56 | class TestNullSerializer: 57 | TYPES = (1, 2.0, "hi", True, ["1", 1], {"key": "value"}, MyType()) 58 | 59 | @pytest.mark.parametrize("obj", TYPES) 60 | async def test_set_get_types(self, memory_cache, obj): 61 | memory_cache.serializer = NullSerializer() 62 | assert await memory_cache.set(Keys.KEY, obj) is True 63 | assert await memory_cache.get(Keys.KEY) is obj 64 | 65 | @pytest.mark.parametrize("obj", TYPES) 66 | async def test_add_get_types(self, memory_cache, obj): 67 | memory_cache.serializer = NullSerializer() 68 | assert await memory_cache.add(Keys.KEY, obj) is True 69 | assert await memory_cache.get(Keys.KEY) is obj 70 | 71 | @pytest.mark.parametrize("obj", TYPES) 72 | async def test_multi_set_multi_get_types(self, memory_cache, obj): 73 | memory_cache.serializer = NullSerializer() 74 | assert await memory_cache.multi_set([(Keys.KEY, obj)]) is True 75 | assert (await memory_cache.multi_get([Keys.KEY]))[0] is obj 76 | 77 | 78 | class TestStringSerializer: 79 | TYPES = (1, 2.0, "hi", True, ["1", 1], {"key": "value"}, MyType()) 80 | 81 | @pytest.mark.parametrize("obj", TYPES) 82 | async def test_set_get_types(self, cache, obj): 83 | cache.serializer = StringSerializer() 84 | assert await cache.set(Keys.KEY, obj) is True 85 | assert await cache.get(Keys.KEY) == str(obj) 86 | 87 | @pytest.mark.parametrize("obj", TYPES) 88 | async def test_add_get_types(self, cache, obj): 89 | cache.serializer = StringSerializer() 90 | assert await cache.add(Keys.KEY, obj) is True 91 | assert await cache.get(Keys.KEY) == str(obj) 92 | 93 | @pytest.mark.parametrize("obj", TYPES) 94 | async def test_multi_set_multi_get_types(self, cache, obj): 95 | cache.serializer = StringSerializer() 96 | assert await cache.multi_set([(Keys.KEY, obj)]) is True 97 | assert await cache.multi_get([Keys.KEY]) == [str(obj)] 98 | 99 | 100 | class TestJsonSerializer: 101 | TYPES = (1, 2.0, "hi", True, ["1", 1], {"key": "value"}) 102 | 103 | @pytest.mark.parametrize("obj", TYPES) 104 | async def test_set_get_types(self, cache, obj): 105 | cache.serializer = JsonSerializer() 106 | assert await cache.set(Keys.KEY, obj) is True 107 | assert await cache.get(Keys.KEY) == json.loads(json.dumps(obj)) 108 | 109 | @pytest.mark.parametrize("obj", TYPES) 110 | async def test_add_get_types(self, cache, obj): 111 | cache.serializer = JsonSerializer() 112 | assert await cache.add(Keys.KEY, obj) is True 113 | assert await cache.get(Keys.KEY) == json.loads(json.dumps(obj)) 114 | 115 | @pytest.mark.parametrize("obj", TYPES) 116 | async def test_multi_set_multi_get_types(self, cache, obj): 117 | cache.serializer = JsonSerializer() 118 | assert await cache.multi_set([(Keys.KEY, obj)]) is True 119 | assert await cache.multi_get([Keys.KEY]) == [json.loads(json.dumps(obj))] 120 | 121 | 122 | class TestPickleSerializer: 123 | TYPES = (1, 2.0, "hi", True, ["1", 1], {"key": "value"}, MyType()) 124 | 125 | @pytest.mark.parametrize("obj", TYPES) 126 | async def test_set_get_types(self, cache, obj): 127 | cache.serializer = PickleSerializer() 128 | assert await cache.set(Keys.KEY, obj) is True 129 | assert await cache.get(Keys.KEY) == pickle.loads(pickle.dumps(obj)) 130 | 131 | @pytest.mark.parametrize("obj", TYPES) 132 | async def test_add_get_types(self, cache, obj): 133 | cache.serializer = PickleSerializer() 134 | assert await cache.add(Keys.KEY, obj) is True 135 | assert await cache.get(Keys.KEY) == pickle.loads(pickle.dumps(obj)) 136 | 137 | @pytest.mark.parametrize("obj", TYPES) 138 | async def test_multi_set_multi_get_types(self, cache, obj): 139 | cache.serializer = PickleSerializer() 140 | assert await cache.multi_set([(Keys.KEY, obj)]) is True 141 | assert await cache.multi_get([Keys.KEY]) == [pickle.loads(pickle.dumps(obj))] 142 | 143 | 144 | class TestAltSerializers: 145 | async def test_get_set_alt_serializer_functions(self, cache): 146 | cache.serializer = StringSerializer() 147 | await cache.set(Keys.KEY, "value", dumps_fn=lambda _: "v4lu3") 148 | assert await cache.get(Keys.KEY) == "v4lu3" 149 | assert await cache.get(Keys.KEY, loads_fn=lambda _: "value") == "value" 150 | 151 | async def test_get_set_alt_serializer_class(self, cache): 152 | my_serializer = MyTypeSchema() 153 | my_obj = MyType() 154 | cache.serializer = my_serializer 155 | await cache.set(Keys.KEY, my_obj) 156 | assert await cache.get(Keys.KEY) == my_serializer.loads(my_serializer.dumps(my_obj)) 157 | -------------------------------------------------------------------------------- /tests/performance/test_footprint.py: -------------------------------------------------------------------------------- 1 | import platform 2 | import time 3 | from typing import AsyncIterator 4 | 5 | import aiomcache 6 | import pytest 7 | from glide import GlideClient, GlideClientConfiguration, NodeAddress 8 | 9 | 10 | @pytest.fixture 11 | async def valkey_client() -> AsyncIterator[GlideClient]: 12 | addresses = [NodeAddress("localhost", 6379)] 13 | conf = GlideClientConfiguration(addresses=addresses) 14 | client = await GlideClient.create(conf) 15 | 16 | yield client 17 | 18 | await client.close() 19 | 20 | 21 | @pytest.mark.skipif(platform.python_implementation() == "PyPy", reason="Too slow") 22 | class TestValkey: 23 | async def test_valkey_getsetdel(self, valkey_client, valkey_cache): 24 | N = 10000 25 | valkey_total_time = 0 26 | for _n in range(N): 27 | start = time.time() 28 | await valkey_client.set("hi", "value") 29 | await valkey_client.get("hi") 30 | await valkey_client.delete(["hi"]) 31 | valkey_total_time += time.time() - start 32 | 33 | aiocache_total_time = 0 34 | for _n in range(N): 35 | start = time.time() 36 | await valkey_cache.set("hi", "value", timeout=0) 37 | await valkey_cache.get("hi", timeout=0) 38 | await valkey_cache.delete("hi", timeout=0) 39 | aiocache_total_time += time.time() - start 40 | 41 | print( 42 | "\n{:0.2f}/{:0.2f}: {:0.2f}".format( 43 | aiocache_total_time, valkey_total_time, aiocache_total_time / valkey_total_time 44 | ) 45 | ) 46 | print("aiocache avg call: {:0.5f}s".format(aiocache_total_time / N)) 47 | print("valkey avg call: {:0.5f}s".format(valkey_total_time / N)) 48 | assert aiocache_total_time / valkey_total_time < 1.35 49 | 50 | async def test_valkey_multigetsetdel(self, valkey_client, valkey_cache): 51 | N = 5000 52 | valkey_total_time = 0 53 | values = ["a", "b", "c", "d", "e", "f"] 54 | for _n in range(N): 55 | start = time.time() 56 | await valkey_client.mset({x: x for x in values}) 57 | await valkey_client.mget(values) 58 | for k in values: 59 | await valkey_client.delete([k]) 60 | valkey_total_time += time.time() - start 61 | 62 | aiocache_total_time = 0 63 | for _n in range(N): 64 | start = time.time() 65 | await valkey_cache.multi_set([(x, x) for x in values], timeout=0) 66 | await valkey_cache.multi_get(values, timeout=0) 67 | for k in values: 68 | await valkey_cache.delete(k, timeout=0) 69 | aiocache_total_time += time.time() - start 70 | 71 | print( 72 | "\n{:0.2f}/{:0.2f}: {:0.2f}".format( 73 | aiocache_total_time, valkey_total_time, aiocache_total_time / valkey_total_time 74 | ) 75 | ) 76 | print("aiocache avg call: {:0.5f}s".format(aiocache_total_time / N)) 77 | print("valkey_client avg call: {:0.5f}s".format(valkey_total_time / N)) 78 | assert aiocache_total_time / valkey_total_time < 1.35 79 | 80 | 81 | @pytest.fixture 82 | async def aiomcache_pool(): 83 | client = aiomcache.Client("127.0.0.1", 11211, pool_size=1) 84 | yield client 85 | await client.close() 86 | 87 | 88 | @pytest.mark.skipif(platform.python_implementation() == "PyPy", reason="Too slow") 89 | class TestMemcached: 90 | async def test_memcached_getsetdel(self, aiomcache_pool, memcached_cache): 91 | N = 10000 92 | aiomcache_total_time = 0 93 | for _n in range(N): 94 | start = time.time() 95 | await aiomcache_pool.set(b"hi", b"value") 96 | await aiomcache_pool.get(b"hi") 97 | await aiomcache_pool.delete(b"hi") 98 | aiomcache_total_time += time.time() - start 99 | 100 | aiocache_total_time = 0 101 | for _n in range(N): 102 | start = time.time() 103 | await memcached_cache.set("hi", "value", timeout=0) 104 | await memcached_cache.get("hi", timeout=0) 105 | await memcached_cache.delete("hi", timeout=0) 106 | aiocache_total_time += time.time() - start 107 | 108 | print( 109 | "\n{:0.2f}/{:0.2f}: {:0.2f}".format( 110 | aiocache_total_time, 111 | aiomcache_total_time, 112 | aiocache_total_time / aiomcache_total_time, 113 | ) 114 | ) 115 | print("aiocache avg call: {:0.5f}s".format(aiocache_total_time / N)) 116 | print("aiomcache avg call: {:0.5f}s".format(aiomcache_total_time / N)) 117 | assert aiocache_total_time / aiomcache_total_time < 1.40 118 | 119 | async def test_memcached_multigetsetdel(self, aiomcache_pool, memcached_cache): 120 | N = 2000 121 | aiomcache_total_time = 0 122 | values = [b"a", b"b", b"c", b"d", b"e", b"f"] 123 | for _n in range(N): 124 | start = time.time() 125 | for k in values: 126 | await aiomcache_pool.set(k, k) 127 | await aiomcache_pool.multi_get(*values) 128 | for k in values: 129 | await aiomcache_pool.delete(k) 130 | aiomcache_total_time += time.time() - start 131 | 132 | aiocache_total_time = 0 133 | values = ["a", "b", "c", "d", "e", "f"] 134 | for _n in range(N): 135 | start = time.time() 136 | await memcached_cache.multi_set([(x, x) for x in values], timeout=0) 137 | await memcached_cache.multi_get(values, timeout=0) 138 | for k in values: 139 | await memcached_cache.delete(k, timeout=0) 140 | aiocache_total_time += time.time() - start 141 | 142 | print( 143 | "\n{:0.2f}/{:0.2f}: {:0.2f}".format( 144 | aiocache_total_time, 145 | aiomcache_total_time, 146 | aiocache_total_time / aiomcache_total_time, 147 | ) 148 | ) 149 | print("aiocache avg call: {:0.5f}s".format(aiocache_total_time / N)) 150 | print("aiomcache avg call: {:0.5f}s".format(aiomcache_total_time / N)) 151 | assert aiocache_total_time / aiomcache_total_time < 1.40 152 | -------------------------------------------------------------------------------- /tests/ut/test_serializers.py: -------------------------------------------------------------------------------- 1 | import pickle 2 | from collections import namedtuple 3 | from unittest import mock 4 | 5 | import pytest 6 | 7 | from aiocache.serializers import ( 8 | BaseSerializer, 9 | JsonSerializer, 10 | MsgPackSerializer, 11 | NullSerializer, 12 | PickleSerializer, 13 | StringSerializer, 14 | ) 15 | 16 | 17 | Dummy = namedtuple("Dummy", "a, b") 18 | 19 | TYPES = [1, 2.0, "hi", True, ["1", 1], {"key": "value"}, Dummy(1, 2)] 20 | JSON_TYPES = [1, 2.0, "hi", True, ["1", 1], {"key": "value"}] 21 | 22 | 23 | class TestNullSerializer: 24 | def test_init(self): 25 | serializer = NullSerializer() 26 | assert isinstance(serializer, BaseSerializer) 27 | assert serializer.DEFAULT_ENCODING == "utf-8" 28 | assert serializer.encoding == "utf-8" 29 | 30 | def test_init_encoding(self): 31 | serializer = NullSerializer(encoding="whatever") 32 | assert serializer.DEFAULT_ENCODING == "utf-8" 33 | assert serializer.encoding == "whatever" 34 | 35 | @pytest.mark.parametrize("obj", TYPES) 36 | def test_set_types(self, obj): 37 | assert NullSerializer().dumps(obj) is obj 38 | 39 | def test_loads(self): 40 | assert NullSerializer().loads("hi") == "hi" 41 | 42 | 43 | class TestStringSerializer: 44 | def test_init(self): 45 | serializer = StringSerializer() 46 | assert isinstance(serializer, BaseSerializer) 47 | assert serializer.DEFAULT_ENCODING == "utf-8" 48 | assert serializer.encoding == "utf-8" 49 | 50 | @pytest.mark.parametrize("obj", TYPES) 51 | def test_set_types(self, obj): 52 | assert StringSerializer().dumps(obj) == str(obj) 53 | 54 | def test_loads(self): 55 | assert StringSerializer().loads("hi") == "hi" 56 | 57 | 58 | class TestPickleSerializer: 59 | @pytest.fixture 60 | def serializer(self): 61 | yield PickleSerializer(protocol=4) 62 | 63 | def test_init(self, serializer): 64 | assert isinstance(serializer, PickleSerializer) 65 | assert serializer.DEFAULT_ENCODING is None 66 | assert serializer.encoding is None 67 | assert serializer.protocol == 4 68 | 69 | def test_init_sets_default_protocol(self): 70 | serializer = PickleSerializer() 71 | assert serializer.protocol == pickle.DEFAULT_PROTOCOL 72 | 73 | @pytest.mark.parametrize("obj", TYPES) 74 | def test_set_types(self, obj, serializer): 75 | assert serializer.loads(serializer.dumps(obj)) == obj 76 | 77 | def test_dumps(self, serializer): 78 | expected = b"\x80\x04\x95\x06\x00\x00\x00\x00\x00\x00\x00\x8c\x02hi\x94." 79 | assert serializer.dumps("hi") == expected 80 | 81 | def test_dumps_with_none(self, serializer): 82 | assert isinstance(serializer.dumps(None), bytes) 83 | 84 | def test_loads(self, serializer): 85 | assert serializer.loads(b"\x80\x03X\x02\x00\x00\x00hiq\x00.") == "hi" 86 | 87 | def test_loads_with_none(self, serializer): 88 | assert serializer.loads(None) is None 89 | 90 | def test_dumps_and_loads(self, serializer): 91 | obj = Dummy(1, 2) 92 | assert serializer.loads(serializer.dumps(obj)) == obj 93 | 94 | 95 | class TestJsonSerializer: 96 | def test_init(self): 97 | serializer = JsonSerializer() 98 | assert isinstance(serializer, BaseSerializer) 99 | assert serializer.DEFAULT_ENCODING == "utf-8" 100 | assert serializer.encoding == "utf-8" 101 | 102 | @pytest.mark.parametrize("obj", JSON_TYPES) 103 | def test_set_types(self, obj): 104 | serializer = JsonSerializer() 105 | assert serializer.loads(serializer.dumps(obj)) == obj 106 | 107 | def test_dumps(self): 108 | assert ( 109 | JsonSerializer().dumps({"hi": 1}) == '{"hi": 1}' 110 | or JsonSerializer().dumps({"hi": 1}) == '{"hi":1}' # json 111 | ) # ujson 112 | 113 | def test_dumps_with_none(self): 114 | assert JsonSerializer().dumps(None) == "null" 115 | 116 | def test_loads_with_null(self): 117 | assert JsonSerializer().loads("null") is None 118 | 119 | def test_loads_with_none(self): 120 | assert JsonSerializer().loads(None) is None 121 | 122 | def test_dumps_and_loads(self): 123 | obj = {"hi": 1} 124 | serializer = JsonSerializer() 125 | assert serializer.loads(serializer.dumps(obj)) == obj 126 | 127 | 128 | class TestMsgPackSerializer: 129 | def test_init(self): 130 | serializer = MsgPackSerializer() 131 | assert isinstance(serializer, BaseSerializer) 132 | assert serializer.DEFAULT_ENCODING == "utf-8" 133 | assert serializer.encoding == "utf-8" 134 | 135 | def test_init_fails_if_msgpack_not_installed(self): 136 | with mock.patch("aiocache.serializers.serializers.msgpack", None): 137 | with pytest.raises(RuntimeError): 138 | MsgPackSerializer() 139 | assert JsonSerializer(), "Other serializers should still initialize" 140 | 141 | def test_init_use_list(self): 142 | serializer = MsgPackSerializer(use_list=True) 143 | assert serializer.use_list is True 144 | 145 | @pytest.mark.parametrize("obj", JSON_TYPES) 146 | def test_set_types(self, obj): 147 | serializer = MsgPackSerializer() 148 | assert serializer.loads(serializer.dumps(obj)) == obj 149 | 150 | def test_dumps(self): 151 | assert MsgPackSerializer().dumps("hi") == b"\xa2hi" 152 | 153 | def test_dumps_with_none(self): 154 | assert isinstance(MsgPackSerializer().dumps(None), bytes) 155 | 156 | def test_loads(self): 157 | assert MsgPackSerializer().loads(b"\xa2hi") == "hi" 158 | 159 | def test_loads_no_encoding(self): 160 | assert MsgPackSerializer(encoding=None).loads(b"\xa2hi") == b"hi" 161 | 162 | def test_loads_with_none(self): 163 | assert MsgPackSerializer().loads(None) is None 164 | 165 | def test_dumps_and_loads_tuple(self): 166 | serializer = MsgPackSerializer() 167 | assert serializer.loads(serializer.dumps(Dummy(1, 2))) == [1, 2] 168 | 169 | def test_dumps_and_loads_dict(self): 170 | serializer = MsgPackSerializer() 171 | d = {"a": [1, 2, ("1", 2)], "b": {"b": 1, "c": [1, 2]}} 172 | assert serializer.loads(serializer.dumps(d)) == { 173 | "a": [1, 2, ["1", 2]], 174 | "b": {"b": 1, "c": [1, 2]}, 175 | } 176 | -------------------------------------------------------------------------------- /aiocache/backends/memcached.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from typing import Optional 3 | 4 | import aiomcache 5 | 6 | from aiocache.base import BaseCache 7 | from aiocache.serializers import JsonSerializer 8 | 9 | 10 | class MemcachedCache(BaseCache[bytes]): 11 | """ 12 | Memcached cache implementation with the following components as defaults: 13 | - serializer: :class:`aiocache.serializers.JsonSerializer` 14 | - plugins: [] 15 | 16 | Config options are: 17 | 18 | :param serializer: obj derived from :class:`aiocache.serializers.BaseSerializer`. 19 | :param plugins: list of :class:`aiocache.plugins.BasePlugin` derived classes. 20 | :param namespace: string to use as default prefix for the key used in all operations of 21 | the backend. Default is an empty string, "". 22 | :param timeout: int or float in seconds specifying maximum timeout for the operations to last. 23 | By default its 5. 24 | :param endpoint: str with the endpoint to connect to. Default is 127.0.0.1. 25 | :param port: int with the port to connect to. Default is 11211. 26 | :param pool_size: int size for memcached connections pool. Default is 2. 27 | """ 28 | 29 | NAME = "memcached" 30 | 31 | def __init__(self, host="127.0.0.1", port=11211, pool_size=2, **kwargs): 32 | if "serializer" not in kwargs: 33 | kwargs["serializer"] = JsonSerializer() 34 | 35 | super().__init__(**kwargs) 36 | self.host = host 37 | self.port = port 38 | self.pool_size = int(pool_size) 39 | self.client = aiomcache.Client(self.host, self.port, pool_size=self.pool_size) 40 | 41 | async def _get(self, key, encoding="utf-8", _conn=None): 42 | value = await self.client.get(key) 43 | if encoding is None or value is None: 44 | return value 45 | return value.decode(encoding) 46 | 47 | async def _gets(self, key, encoding="utf-8", _conn=None): 48 | key = key.encode() if isinstance(key, str) else key 49 | _, token = await self.client.gets(key) 50 | return token 51 | 52 | async def _multi_get(self, keys, encoding="utf-8", _conn=None): 53 | values = [] 54 | for value in await self.client.multi_get(*keys): 55 | if encoding is None or value is None: 56 | values.append(value) 57 | else: 58 | values.append(value.decode(encoding)) 59 | return values 60 | 61 | async def _set(self, key, value, ttl=0, _cas_token=None, _conn=None): 62 | value = value.encode() if isinstance(value, str) else value 63 | if _cas_token is not None: 64 | return await self._cas(key, value, _cas_token, ttl=ttl, _conn=_conn) 65 | try: 66 | return await self.client.set(key, value, exptime=ttl or 0) 67 | except aiomcache.exceptions.ValidationException as e: 68 | raise TypeError("aiomcache error: {}".format(str(e))) 69 | 70 | async def _cas(self, key, value, token, ttl=None, _conn=None): 71 | return await self.client.cas(key, value, token, exptime=ttl or 0) 72 | 73 | async def _multi_set(self, pairs, ttl=0, _conn=None): 74 | tasks = [] 75 | for key, value in pairs: 76 | value = str.encode(value) if isinstance(value, str) else value 77 | tasks.append(self.client.set(key, value, exptime=ttl or 0)) 78 | 79 | try: 80 | await asyncio.gather(*tasks) 81 | except aiomcache.exceptions.ValidationException as e: 82 | raise TypeError("aiomcache error: {}".format(str(e))) 83 | 84 | return True 85 | 86 | async def _add(self, key, value, ttl=0, _conn=None): 87 | value = str.encode(value) if isinstance(value, str) else value 88 | try: 89 | ret = await self.client.add(key, value, exptime=ttl or 0) 90 | except aiomcache.exceptions.ValidationException as e: 91 | raise TypeError("aiomcache error: {}".format(str(e))) 92 | if not ret: 93 | raise ValueError("Key {} already exists, use .set to update the value".format(key)) 94 | 95 | return True 96 | 97 | async def _exists(self, key, _conn=None): 98 | return await self.client.append(key, b"") 99 | 100 | async def _increment(self, key, delta, _conn=None): 101 | incremented = None 102 | try: 103 | if delta > 0: 104 | incremented = await self.client.incr(key, delta) 105 | else: 106 | incremented = await self.client.decr(key, abs(delta)) 107 | except aiomcache.exceptions.ClientException as e: 108 | if "NOT_FOUND" in str(e): 109 | await self._set(key, str(delta).encode()) 110 | else: 111 | raise TypeError("aiomcache error: {}".format(str(e))) 112 | 113 | return incremented or delta 114 | 115 | async def _expire(self, key, ttl, _conn=None): 116 | return await self.client.touch(key, ttl) 117 | 118 | async def _delete(self, key, _conn=None): 119 | return 1 if await self.client.delete(key) else 0 120 | 121 | async def _clear(self, namespace=None, _conn=None): 122 | if namespace: 123 | raise ValueError("MemcachedBackend doesnt support flushing by namespace") 124 | else: 125 | await self.client.flush_all() 126 | return True 127 | 128 | async def _raw(self, command, *args, encoding="utf-8", _conn=None, **kwargs): 129 | value = await getattr(self.client, command)(*args, **kwargs) 130 | if command in {"get", "multi_get"}: 131 | if encoding is not None and value is not None: 132 | return value.decode(encoding) 133 | return value 134 | 135 | async def _redlock_release(self, key, _): 136 | # Not ideal, should check the value coincides first but this would introduce 137 | # race conditions 138 | return await self._delete(key) 139 | 140 | async def _close(self, *args, _conn=None, **kwargs): 141 | await self.client.close() 142 | 143 | def build_key(self, key: str, namespace: Optional[str] = None) -> bytes: 144 | ns_key = self._str_build_key(key, namespace).replace(" ", "_") 145 | return str.encode(ns_key) 146 | 147 | @classmethod 148 | def parse_uri_path(cls, path): 149 | return {} 150 | 151 | def __repr__(self): # pragma: no cover 152 | return "MemcachedCache ({}:{})".format(self.host, self.port) 153 | -------------------------------------------------------------------------------- /aiocache/serializers/serializers.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import pickle # noqa: S403 3 | from abc import ABC, abstractmethod 4 | from typing import Any, Optional 5 | 6 | logger = logging.getLogger(__name__) 7 | 8 | try: 9 | import ujson as json # noqa: I900 10 | except ImportError: 11 | logger.debug("ujson module not found, using json") 12 | import json # type: ignore[no-redef] 13 | 14 | try: 15 | import msgpack 16 | except ImportError: 17 | msgpack = None 18 | logger.debug("msgpack not installed, MsgPackSerializer unavailable") 19 | 20 | 21 | _NOT_SET = object() 22 | 23 | 24 | class BaseSerializer(ABC): 25 | 26 | DEFAULT_ENCODING: Optional[str] = "utf-8" 27 | 28 | def __init__(self, *args, encoding=_NOT_SET, **kwargs): 29 | self.encoding = self.DEFAULT_ENCODING if encoding is _NOT_SET else encoding 30 | super().__init__(*args, **kwargs) 31 | 32 | @abstractmethod 33 | def dumps(self, value: Any, /) -> Any: 34 | """Serialise the value to be stored in the backend.""" 35 | 36 | @abstractmethod 37 | def loads(self, value: Any, /) -> Any: 38 | """Decode the value retrieved from the backend.""" 39 | 40 | 41 | class NullSerializer(BaseSerializer): 42 | """ 43 | This serializer does nothing. Its only recommended to be used by 44 | :class:`aiocache.SimpleMemoryCache` because for other backends it will 45 | produce incompatible data unless you work only with str types because it 46 | store data as is. 47 | 48 | DISCLAIMER: Be careful with mutable types and memory storage. The following 49 | behavior is considered normal (same as ``functools.lru_cache``):: 50 | 51 | cache = Cache() 52 | my_list = [1] 53 | await cache.set("key", my_list) 54 | my_list.append(2) 55 | await cache.get("key") # Will return [1, 2] 56 | """ 57 | 58 | def dumps(self, value): 59 | """ 60 | Returns the same value 61 | """ 62 | return value 63 | 64 | def loads(self, value): 65 | """ 66 | Returns the same value 67 | """ 68 | return value 69 | 70 | 71 | class StringSerializer(BaseSerializer): 72 | """ 73 | Converts all input values to str. All return values are also str. Be 74 | careful because this means that if you store an ``int(1)``, you will get 75 | back '1'. 76 | 77 | The transformation is done by just casting to str in the ``dumps`` method. 78 | 79 | If you want to keep python types, use ``PickleSerializer``. ``JsonSerializer`` 80 | may also be useful to keep type of simple python types. 81 | """ 82 | 83 | def dumps(self, value): 84 | """ 85 | Serialize the received value casting it to str. 86 | 87 | :param value: obj Anything support cast to str 88 | :returns: str 89 | """ 90 | return str(value) 91 | 92 | def loads(self, value): 93 | """ 94 | Returns value back without transformations 95 | """ 96 | return value 97 | 98 | 99 | class PickleSerializer(BaseSerializer): 100 | """ 101 | Transform data to bytes using pickle.dumps and pickle.loads to retrieve it back. 102 | """ 103 | 104 | DEFAULT_ENCODING = None 105 | 106 | def __init__(self, *args, protocol=pickle.DEFAULT_PROTOCOL, **kwargs): 107 | super().__init__(*args, **kwargs) 108 | self.protocol = protocol 109 | 110 | def dumps(self, value): 111 | """ 112 | Serialize the received value using ``pickle.dumps``. 113 | 114 | :param value: obj 115 | :returns: bytes 116 | """ 117 | return pickle.dumps(value, protocol=self.protocol) 118 | 119 | def loads(self, value): 120 | """ 121 | Deserialize value using ``pickle.loads``. 122 | 123 | :param value: bytes 124 | :returns: obj 125 | """ 126 | if value is None: 127 | return None 128 | return pickle.loads(value) # noqa: S301 129 | 130 | 131 | class JsonSerializer(BaseSerializer): 132 | """ 133 | Transform data to json string with json.dumps and json.loads to retrieve it back. Check 134 | https://docs.python.org/3/library/json.html#py-to-json-table for how types are converted. 135 | 136 | ujson will be used by default if available. Be careful with differences between built in 137 | json module and ujson: 138 | - ujson dumps supports bytes while json doesn't 139 | - ujson and json outputs may differ sometimes 140 | """ 141 | 142 | def dumps(self, value): 143 | """ 144 | Serialize the received value using ``json.dumps``. 145 | 146 | :param value: dict 147 | :returns: str 148 | """ 149 | return json.dumps(value) 150 | 151 | def loads(self, value): 152 | """ 153 | Deserialize value using ``json.loads``. 154 | 155 | :param value: str 156 | :returns: output of ``json.loads``. 157 | """ 158 | if value is None: 159 | return None 160 | return json.loads(value) 161 | 162 | 163 | class MsgPackSerializer(BaseSerializer): 164 | """ 165 | Transform data to bytes using msgpack.dumps and msgpack.loads to retrieve it back. You need 166 | to have ``msgpack`` installed in order to be able to use this serializer. 167 | 168 | :param encoding: str. Can be used to change encoding param for ``msg.loads`` method. 169 | Default is utf-8. 170 | :param use_list: bool. Can be used to change use_list param for ``msgpack.loads`` method. 171 | Default is True. 172 | """ 173 | 174 | def __init__(self, *args, use_list=True, **kwargs): 175 | if not msgpack: 176 | raise RuntimeError("msgpack not installed, MsgPackSerializer unavailable") 177 | self.use_list = use_list 178 | super().__init__(*args, **kwargs) 179 | 180 | def dumps(self, value): 181 | """ 182 | Serialize the received value using ``msgpack.dumps``. 183 | 184 | :param value: obj 185 | :returns: bytes 186 | """ 187 | return msgpack.dumps(value) 188 | 189 | def loads(self, value): 190 | """ 191 | Deserialize value using ``msgpack.loads``. 192 | 193 | :param value: bytes 194 | :returns: obj 195 | """ 196 | raw = False if self.encoding == "utf-8" else True 197 | if value is None: 198 | return None 199 | return msgpack.loads(value, raw=raw, use_list=self.use_list) 200 | -------------------------------------------------------------------------------- /aiocache/backends/valkey.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import sys 3 | from typing import Optional 4 | 5 | from glide import ( 6 | Batch, 7 | ConditionalChange, 8 | ExpirySet, 9 | ExpiryType, 10 | GlideClient, 11 | GlideClientConfiguration, 12 | ) 13 | from glide.exceptions import RequestError as IncrbyException 14 | 15 | from aiocache.base import BaseCache 16 | from aiocache.serializers import JsonSerializer 17 | 18 | if sys.version_info >= (3, 11): 19 | from typing import Self 20 | else: 21 | from typing import Any as Self 22 | 23 | 24 | logger = logging.getLogger(__name__) 25 | 26 | 27 | class ValkeyCache(BaseCache[str]): 28 | """ 29 | Valkey cache implementation with the following components as defaults: 30 | - serializer: :class:`aiocache.serializers.JsonSerializer` 31 | - plugins: [] 32 | 33 | Config options are: 34 | 35 | :param serializer: obj derived from :class:`aiocache.serializers.BaseSerializer`. 36 | :param plugins: list of :class:`aiocache.plugins.BasePlugin` derived classes. 37 | :param namespace: string to use as default prefix for the key used in all operations of 38 | the backend. Default is an empty string, "". 39 | :param timeout: int or float in seconds specifying maximum timeout for the operations to last. 40 | By default its 5. 41 | :param client: glide.GlideClient which is an active client for working with valkey 42 | """ 43 | 44 | NAME = "valkey" 45 | 46 | def __init__( 47 | self, config: GlideClientConfiguration, **kwargs 48 | ): 49 | self.config = config 50 | 51 | if "serializer" not in kwargs: 52 | kwargs["serializer"] = JsonSerializer() 53 | if "key_builder" not in kwargs: 54 | kwargs["key_builder"] = lambda k, ns: f"{ns}:{k}" if ns else k 55 | 56 | super().__init__(**kwargs) 57 | 58 | async def __aenter__(self) -> Self: 59 | self.client = await GlideClient.create(self.config) 60 | return self 61 | 62 | async def __aexit__(self, *args, **kwargs) -> None: 63 | await self.client.close() 64 | 65 | async def _get(self, key, encoding="utf-8", _conn=None): 66 | value = await self.client.get(key) 67 | if encoding is None or value is None: 68 | return value 69 | return value.decode(encoding) 70 | 71 | _gets = _get 72 | 73 | async def _multi_get(self, keys, encoding="utf-8", _conn=None): 74 | values = await self.client.mget(keys) 75 | if encoding is None: 76 | return values 77 | return [v if v is None else v.decode(encoding) for v in values] 78 | 79 | async def _set(self, key, value, ttl=None, _cas_token=None, _conn=None): 80 | if isinstance(ttl, float): 81 | ttl = ExpirySet(ExpiryType.MILLSEC, int(ttl * 1000)) 82 | elif ttl: 83 | ttl = ExpirySet(ExpiryType.SEC, ttl) 84 | 85 | if _cas_token is not None: 86 | return await self._cas(key, value, _cas_token, ttl=ttl, _conn=_conn) 87 | 88 | return await self.client.set(key, value, expiry=ttl) == "OK" 89 | 90 | async def _cas(self, key, value, token, ttl=None, _conn=None): 91 | if await self._get(key) == token: 92 | return await self.client.set(key, value, expiry=ttl) == "OK" 93 | return 0 94 | 95 | async def _multi_set(self, pairs, ttl=None, _conn=None): 96 | values = dict(pairs) 97 | 98 | if ttl: 99 | await self.__multi_set_ttl(values, ttl) 100 | else: 101 | await self.client.mset(values) 102 | 103 | return True 104 | 105 | async def __multi_set_ttl(self, values, ttl): 106 | transaction = Batch(is_atomic=True) 107 | transaction.mset(values) 108 | ttl, exp = ( 109 | (int(ttl * 1000), transaction.pexpire) 110 | if isinstance(ttl, float) 111 | else (ttl, transaction.expire) 112 | ) 113 | for key in values: 114 | exp(key, ttl) 115 | await self.client.exec(transaction, raise_on_error=True) 116 | 117 | async def _add(self, key, value, ttl=None, _conn=None): 118 | kwargs = {"conditional_set": ConditionalChange.ONLY_IF_DOES_NOT_EXIST} 119 | if isinstance(ttl, float): 120 | kwargs["expiry"] = ExpirySet(ExpiryType.MILLSEC, int(ttl * 1000)) 121 | elif ttl: 122 | kwargs["expiry"] = ExpirySet(ExpiryType.SEC, ttl) 123 | was_set = await self.client.set(key, value, **kwargs) 124 | if was_set != "OK": 125 | raise ValueError( 126 | "Key {} already exists, use .set to update the value".format(key) 127 | ) 128 | return was_set 129 | 130 | async def _exists(self, key, _conn=None): 131 | return bool(await self.client.exists([key])) 132 | 133 | async def _increment(self, key, delta, _conn=None): 134 | try: 135 | return await self.client.incrby(key, delta) 136 | except IncrbyException: 137 | raise TypeError("Value is not an integer") from None 138 | 139 | async def _expire(self, key, ttl, _conn=None): 140 | if ttl == 0: 141 | return await self.client.persist(key) 142 | return await self.client.expire(key, ttl) 143 | 144 | async def _delete(self, key, _conn=None): 145 | return await self.client.delete([key]) 146 | 147 | async def _clear(self, namespace=None, _conn=None): 148 | if not namespace: 149 | return await self.client.flushdb() 150 | 151 | _, keys = await self.client.scan(b"0", "{}:*".format(namespace)) 152 | if keys: 153 | return bool(await self.client.delete(keys)) 154 | 155 | return True 156 | 157 | async def _raw(self, command, *args, encoding="utf-8", _conn=None, **kwargs): 158 | value = await getattr(self.client, command)(*args, **kwargs) 159 | if encoding is not None: 160 | if command == "get" and value is not None: 161 | value = value.decode(encoding) 162 | return value 163 | 164 | async def _redlock_release(self, key, value): 165 | if await self._get(key) == value: 166 | return await self.client.delete([key]) 167 | return 0 168 | 169 | def build_key(self, key: str, namespace: Optional[str] = None) -> str: 170 | return self._str_build_key(key, namespace) 171 | 172 | @classmethod 173 | def parse_uri_path(cls, path): 174 | """ 175 | Given a uri path, return the Valkey specific configuration 176 | options in that path string according to iana definition 177 | http://www.iana.org/assignments/uri-schemes/prov/redis 178 | 179 | :param path: string containing the path. Example: "/0" 180 | :return: mapping containing the options. Example: {"db": "0"} 181 | """ 182 | options = {} 183 | db, *_ = path[1:].split("/") 184 | if db: 185 | options["db"] = db 186 | return options 187 | 188 | def __repr__(self): # pragma: no cover 189 | return ( 190 | f"ValkeyCache ({self.client.config.addresses[0].host}" 191 | f":{self.client.config.addresses[0].port})" 192 | ) 193 | -------------------------------------------------------------------------------- /tests/acceptance/test_decorators.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import random 3 | from unittest import mock 4 | 5 | import pytest 6 | 7 | from aiocache import cached, cached_stampede, multi_cached 8 | from ..utils import Keys, ensure_key 9 | 10 | 11 | async def return_dict(keys=None): 12 | ret = {} 13 | for value, key in enumerate(keys or [Keys.KEY, Keys.KEY_1]): 14 | ret[key] = str(value) 15 | return ret 16 | 17 | 18 | async def stub(arg: float, seconds: int = 0) -> str: 19 | await asyncio.sleep(seconds) 20 | return str(random.randint(1, 50)) 21 | 22 | 23 | class TestCached: 24 | async def test_cached_ttl(self, cache): 25 | @cached(cache=cache, ttl=2, key_builder=lambda *args, **kw: Keys.KEY) 26 | async def fn(): 27 | return str(random.randint(1, 50)) 28 | 29 | resp1 = await fn() 30 | resp2 = await fn() 31 | 32 | assert await cache.get(Keys.KEY) == resp1 == resp2 33 | await asyncio.sleep(2.1) 34 | assert await cache.get(Keys.KEY) is None 35 | 36 | async def test_cached_key_builder(self, cache): 37 | def build_key(f, self, a, b): 38 | return "{}_{}_{}_{}".format(self, f.__name__, a, b) 39 | 40 | @cached(cache=cache, key_builder=build_key) 41 | async def fn(self, a, b=2): 42 | return "1" 43 | 44 | await fn("self", 1, 3) 45 | assert await cache.exists(build_key(fn, "self", 1, 3)) is True 46 | 47 | @pytest.mark.parametrize("decorator", (cached, cached_stampede)) 48 | async def test_cached_skip_cache_func(self, cache, decorator): 49 | @decorator(cache=cache, skip_cache_func=lambda r: r is None) 50 | async def sk_func(x): 51 | return x if x > 0 else None 52 | 53 | arg = 1 54 | res = await sk_func(arg) 55 | assert res 56 | 57 | key = decorator(cache=cache).get_cache_key(sk_func, args=(1,), kwargs={}) 58 | 59 | assert key 60 | assert await cache.exists(key) 61 | assert await cache.get(key) == res 62 | 63 | arg = -1 64 | 65 | await sk_func(arg) 66 | 67 | key = decorator(cache=cache).get_cache_key(sk_func, args=(-1,), kwargs={}) 68 | 69 | assert key 70 | assert not await cache.exists(key) 71 | 72 | async def test_cached_without_namespace(self, cache): 73 | """Default cache key is created when no namespace is provided""" 74 | cache.namespace = None 75 | 76 | @cached(cache=cache) 77 | async def fn(): 78 | return "1" 79 | 80 | await fn() 81 | decorator = cached(cache=cache) 82 | key = decorator.get_cache_key(fn, args=(), kwargs={}) 83 | assert await cache.exists(key, namespace=None) is True 84 | 85 | async def test_cached_with_namespace(self, cache): 86 | """Cache key is prefixed with provided namespace""" 87 | key_prefix = "test" 88 | cache.namespace = key_prefix 89 | 90 | @cached(cache=cache) 91 | async def ns_fn(): 92 | return "1" 93 | 94 | await ns_fn() 95 | decorator = cached(cache=cache) 96 | key = decorator.get_cache_key(ns_fn, args=(), kwargs={}) 97 | assert await cache.exists(key, namespace=key_prefix) is True 98 | 99 | 100 | class TestCachedStampede: 101 | 102 | async def test_cached_stampede(self, mocker, cache): 103 | mocker.spy(cache, "get") 104 | mocker.spy(cache, "set") 105 | decorator = cached_stampede(cache=cache, ttl=10, lease=3) 106 | 107 | await asyncio.gather(decorator(stub)(0.5), decorator(stub)(0.5)) 108 | 109 | cache.get.assert_called_with("tests.acceptance.test_decoratorsstub(0.5,)[]") 110 | assert cache.get.call_count == 4 111 | cache.set.assert_called_with("tests.acceptance.test_decoratorsstub(0.5,)[]", 112 | mock.ANY, ttl=10) 113 | assert cache.set.call_count == 1, cache.set.call_args_list 114 | 115 | async def test_locking_dogpile_lease_expiration(self, mocker, cache): 116 | mocker.spy(cache, "get") 117 | mocker.spy(cache, "set") 118 | decorator = cached_stampede(cache=cache, ttl=10, lease=3) 119 | 120 | await asyncio.gather( 121 | decorator(stub)(1, seconds=1), 122 | decorator(stub)(1, seconds=2), 123 | decorator(stub)(1, seconds=3), 124 | ) 125 | 126 | assert cache.get.call_count == 6 127 | assert cache.set.call_count == 3 128 | 129 | async def test_locking_dogpile_task_cancellation(self, cache): 130 | @cached_stampede(cache=cache) 131 | async def cancel_task(): 132 | raise asyncio.CancelledError() 133 | 134 | with pytest.raises(asyncio.CancelledError): 135 | await cancel_task() 136 | 137 | 138 | class TestMultiCachedDecorator: 139 | async def test_multi_cached(self, cache): 140 | multi_cached_decorator = multi_cached(cache, keys_from_attr="keys") 141 | 142 | default_keys = {Keys.KEY, Keys.KEY_1} 143 | await multi_cached_decorator(return_dict)(keys=default_keys) 144 | 145 | for key in default_keys: 146 | assert await cache.get(key) is not None 147 | 148 | async def test_keys_without_kwarg(self, cache): 149 | @multi_cached(cache, keys_from_attr="keys") 150 | async def fn(keys): 151 | return {Keys.KEY: 1} 152 | 153 | await fn([Keys.KEY]) 154 | assert await cache.exists(Keys.KEY) is True 155 | 156 | async def test_multi_cached_key_builder(self, cache): 157 | def build_key(key, f, self, keys, market="ES"): 158 | return "{}_{}_{}".format(f.__name__, ensure_key(key), market) 159 | 160 | @multi_cached(keys_from_attr="keys", key_builder=build_key, cache=cache) 161 | async def fn(self, keys, market="ES"): 162 | return {Keys.KEY: 1, Keys.KEY_1: 2} 163 | 164 | await fn("self", keys=[Keys.KEY, Keys.KEY_1]) 165 | assert await cache.exists("fn_" + ensure_key(Keys.KEY) + "_ES") is True 166 | assert await cache.exists("fn_" + ensure_key(Keys.KEY_1) + "_ES") is True 167 | 168 | async def test_multi_cached_skip_keys(self, cache): 169 | @multi_cached(cache, keys_from_attr="keys", skip_cache_func=lambda _, v: v is None) 170 | async def multi_sk_fn(keys, values): 171 | return {k: v for k, v in zip(keys, values)} 172 | 173 | res = await multi_sk_fn(keys=[Keys.KEY, Keys.KEY_1], values=[42, None]) 174 | assert res 175 | assert Keys.KEY in res and Keys.KEY_1 in res 176 | 177 | assert await cache.exists(Keys.KEY) 178 | assert await cache.get(Keys.KEY) == res[Keys.KEY] 179 | assert not await cache.exists(Keys.KEY_1) 180 | 181 | async def test_fn_with_args(self, cache): 182 | @multi_cached(cache, keys_from_attr="keys") 183 | async def fn(keys, *args): 184 | assert len(args) == 1 185 | return {Keys.KEY: 1} 186 | 187 | await fn([Keys.KEY], "arg") 188 | assert await cache.exists(Keys.KEY) is True 189 | 190 | async def test_double_decorator(self, cache): 191 | def dummy_d(fn): 192 | async def wrapper(*args, **kwargs): 193 | await fn(*args, **kwargs) 194 | 195 | return wrapper 196 | 197 | @dummy_d 198 | @multi_cached(cache, keys_from_attr="keys") 199 | async def fn(keys): 200 | return {Keys.KEY: 1} 201 | 202 | await fn([Keys.KEY]) 203 | assert await cache.exists(Keys.KEY) is True 204 | -------------------------------------------------------------------------------- /aiocache/lock.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import uuid 3 | from typing import Any, Dict, Generic, Union 4 | 5 | from aiocache.base import BaseCache, CacheKeyType 6 | 7 | 8 | class RedLock(Generic[CacheKeyType]): 9 | """ 10 | Implementation of `Redlock `_ 11 | with a single instance because aiocache is focused on single 12 | instance cache. 13 | 14 | This locking has some limitations and shouldn't be used in 15 | situations where consistency is critical. Those locks are aimed for 16 | performance reasons where failing on locking from time to time 17 | is acceptable. TLDR: do NOT use this if you need real resource 18 | exclusion. 19 | 20 | Couple of considerations with the implementation: 21 | 22 | - If the lease expires and there are calls waiting, all of them 23 | will pass (blocking just happens for the first time). 24 | - When a new call arrives, it will wait always at most lease 25 | time. This means that the call could end up blocked longer 26 | than needed in case the lease from the blocker expires. 27 | 28 | Backend specific implementation: 29 | 30 | - Valkey implements correctly the redlock algorithm. It sets 31 | the key if it doesn't exist. To release, it checks the value 32 | is the same as the instance trying to release and if it is, 33 | it removes the lock. If not it will do nothing 34 | - Memcached follows the same approach with a difference. Due 35 | to memcached lacking a way to execute the operation get and 36 | delete commands atomically, any client is able to release the 37 | lock. This is a limitation that can't be fixed without introducing 38 | race conditions. 39 | - Memory implementation is not distributed, it will only apply 40 | to the process running. Say you have 4 processes running 41 | APIs with aiocache, the locking will apply only per process 42 | (still useful to reduce load per process). 43 | 44 | Example usage:: 45 | 46 | from aiocache import ValkeyCache 47 | from aiocache.lock import RedLock 48 | from glide import GlideClientConfiguration, NodeAddress 49 | 50 | addresses = [NodeAddress("localhost", 6379)] 51 | conf = GlideClientConfiguration(addresses=addresses, database_id=0) 52 | cache = ValkeyCache(conf) 53 | 54 | async with RedLock(cache, 'key', lease=1): # Calls will wait here 55 | result = await cache.get('key') 56 | if result is not None: 57 | return result 58 | result = await super_expensive_function() 59 | await cache.set('key', result) 60 | 61 | In the example, first call will start computing the ``super_expensive_function`` 62 | while consecutive calls will block at most 1 second. If the blocking lasts for 63 | more than 1 second, the calls will proceed to also calculate the 64 | result of ``super_expensive_function``. 65 | """ 66 | 67 | _EVENTS: Dict[str, asyncio.Event] = {} 68 | 69 | def __init__( 70 | self, client: BaseCache[CacheKeyType], key: str, lease: Union[int, float] 71 | ): 72 | self.client = client 73 | self.key = self.client.build_key(key + "-lock") 74 | self.lease = lease 75 | self._value = "" 76 | 77 | async def __aenter__(self): 78 | return await self._acquire() 79 | 80 | async def _acquire(self): 81 | self._value = str(uuid.uuid4()) 82 | try: 83 | await self.client._add(self.key, self._value, ttl=self.lease) 84 | RedLock._EVENTS[self.key] = asyncio.Event() 85 | except ValueError: 86 | await self._wait_for_release() 87 | 88 | async def _wait_for_release(self): 89 | try: 90 | await asyncio.wait_for(RedLock._EVENTS[self.key].wait(), self.lease) 91 | except asyncio.TimeoutError: 92 | pass 93 | except KeyError: # lock was released when wait_for was rescheduled 94 | pass 95 | 96 | async def __aexit__(self, exc_type, exc_value, traceback): 97 | await self._release() 98 | 99 | async def _release(self): 100 | removed = await self.client._redlock_release(self.key, self._value) 101 | if removed: 102 | RedLock._EVENTS.pop(self.key).set() 103 | 104 | 105 | class OptimisticLock(Generic[CacheKeyType]): 106 | """ 107 | Implementation of 108 | `optimistic lock `_ 109 | 110 | Optimistic locking assumes multiple transactions can happen at the same time 111 | and they will only fail if before finish, conflicting modifications with other 112 | transactions are found, producing a roll back. 113 | 114 | Finding a conflict will end up raising an `aiocache.lock.OptimisticLockError` 115 | exception. A conflict happens when the value at the storage is different from 116 | the one we retrieved when the lock started. 117 | 118 | Example usage:: 119 | from aiocache import ValkeyCache 120 | from glide import GlideClientConfiguration, NodeAddress 121 | 122 | addresses = [NodeAddress("localhost", 6379)] 123 | conf = GlideClientConfiguration(addresses=addresses, database_id=0) 124 | cache = ValkeyCache(conf) 125 | 126 | # The value stored in 'key' will be checked here 127 | async with OptimisticLock(cache, 'key') as lock: 128 | result = await super_expensive_call() 129 | await lock.cas(result) 130 | 131 | If any other call sets the value of ``key`` before the ``lock.cas`` is called, 132 | an :class:`aiocache.lock.OptimisticLockError` will be raised. A way to make 133 | the same call crash would be to change the value inside the lock like:: 134 | 135 | cache = ValkeyCache(client) 136 | 137 | # The value stored in 'key' will be checked here 138 | async with OptimisticLock(cache, 'key') as lock: 139 | result = await super_expensive_call() 140 | await cache.set('random_value') # This will make the `lock.cas` call fail 141 | await lock.cas(result) 142 | 143 | If the lock is created with an unexisting key, there will never be conflicts. 144 | """ 145 | 146 | def __init__(self, client: BaseCache[CacheKeyType], key: str): 147 | self.client = client 148 | self.key = key 149 | self.ns_key = self.client.build_key(key) 150 | self._token = None 151 | 152 | async def __aenter__(self): 153 | return await self._acquire() 154 | 155 | async def _acquire(self): 156 | self._token = await self.client._gets(self.ns_key) 157 | return self 158 | 159 | async def __aexit__(self, exc_type, exc_value, traceback): 160 | pass 161 | 162 | async def cas(self, value: Any, **kwargs: Any) -> bool: 163 | """ 164 | Checks and sets the specified value for the locked key. If the value has changed 165 | since the lock was created, it will raise an :class:`aiocache.lock.OptimisticLockError` 166 | exception. 167 | 168 | :raises: :class:`aiocache.lock.OptimisticLockError` 169 | """ 170 | success = await self.client.set( 171 | self.key, value, _cas_token=self._token, **kwargs 172 | ) 173 | if not success: 174 | raise OptimisticLockError("Value has changed since the lock started") 175 | return True 176 | 177 | 178 | class OptimisticLockError(Exception): 179 | """ 180 | Raised when a conflict is found during an optimistic lock 181 | """ 182 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | PAPER = 8 | BUILDDIR = _build 9 | 10 | # Internal variables. 11 | PAPEROPT_a4 = -D latex_paper_size=a4 12 | PAPEROPT_letter = -D latex_paper_size=letter 13 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 14 | # the i18n builder cannot share the environment and doctrees with the others 15 | I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 16 | 17 | .PHONY: help 18 | help: 19 | @echo "Please use \`make ' where is one of" 20 | @echo " html to make standalone HTML files" 21 | @echo " dirhtml to make HTML files named index.html in directories" 22 | @echo " singlehtml to make a single large HTML file" 23 | @echo " pickle to make pickle files" 24 | @echo " json to make JSON files" 25 | @echo " htmlhelp to make HTML files and a HTML help project" 26 | @echo " qthelp to make HTML files and a qthelp project" 27 | @echo " applehelp to make an Apple Help Book" 28 | @echo " devhelp to make HTML files and a Devhelp project" 29 | @echo " epub to make an epub" 30 | @echo " epub3 to make an epub3" 31 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" 32 | @echo " latexpdf to make LaTeX files and run them through pdflatex" 33 | @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" 34 | @echo " text to make text files" 35 | @echo " man to make manual pages" 36 | @echo " texinfo to make Texinfo files" 37 | @echo " info to make Texinfo files and run them through makeinfo" 38 | @echo " gettext to make PO message catalogs" 39 | @echo " changes to make an overview of all changed/added/deprecated items" 40 | @echo " xml to make Docutils-native XML files" 41 | @echo " pseudoxml to make pseudoxml-XML files for display purposes" 42 | @echo " linkcheck to check all external links for integrity" 43 | @echo " doctest to run all doctests embedded in the documentation (if enabled)" 44 | @echo " coverage to run coverage check of the documentation (if enabled)" 45 | @echo " dummy to check syntax errors of document sources" 46 | 47 | .PHONY: clean 48 | clean: 49 | rm -rf $(BUILDDIR)/* 50 | 51 | .PHONY: html 52 | html: 53 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html 54 | @echo 55 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." 56 | 57 | .PHONY: dirhtml 58 | dirhtml: 59 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml 60 | @echo 61 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." 62 | 63 | .PHONY: singlehtml 64 | singlehtml: 65 | $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml 66 | @echo 67 | @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." 68 | 69 | .PHONY: pickle 70 | pickle: 71 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle 72 | @echo 73 | @echo "Build finished; now you can process the pickle files." 74 | 75 | .PHONY: json 76 | json: 77 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json 78 | @echo 79 | @echo "Build finished; now you can process the JSON files." 80 | 81 | .PHONY: htmlhelp 82 | htmlhelp: 83 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp 84 | @echo 85 | @echo "Build finished; now you can run HTML Help Workshop with the" \ 86 | ".hhp project file in $(BUILDDIR)/htmlhelp." 87 | 88 | .PHONY: qthelp 89 | qthelp: 90 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp 91 | @echo 92 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \ 93 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:" 94 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/aiocache.qhcp" 95 | @echo "To view the help file:" 96 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/aiocache.qhc" 97 | 98 | .PHONY: applehelp 99 | applehelp: 100 | $(SPHINXBUILD) -b applehelp $(ALLSPHINXOPTS) $(BUILDDIR)/applehelp 101 | @echo 102 | @echo "Build finished. The help book is in $(BUILDDIR)/applehelp." 103 | @echo "N.B. You won't be able to view it unless you put it in" \ 104 | "~/Library/Documentation/Help or install it in your application" \ 105 | "bundle." 106 | 107 | .PHONY: devhelp 108 | devhelp: 109 | $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp 110 | @echo 111 | @echo "Build finished." 112 | @echo "To view the help file:" 113 | @echo "# mkdir -p $$HOME/.local/share/devhelp/aiocache" 114 | @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/aiocache" 115 | @echo "# devhelp" 116 | 117 | .PHONY: epub 118 | epub: 119 | $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub 120 | @echo 121 | @echo "Build finished. The epub file is in $(BUILDDIR)/epub." 122 | 123 | .PHONY: epub3 124 | epub3: 125 | $(SPHINXBUILD) -b epub3 $(ALLSPHINXOPTS) $(BUILDDIR)/epub3 126 | @echo 127 | @echo "Build finished. The epub3 file is in $(BUILDDIR)/epub3." 128 | 129 | .PHONY: latex 130 | latex: 131 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 132 | @echo 133 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." 134 | @echo "Run \`make' in that directory to run these through (pdf)latex" \ 135 | "(use \`make latexpdf' here to do that automatically)." 136 | 137 | .PHONY: latexpdf 138 | latexpdf: 139 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 140 | @echo "Running LaTeX files through pdflatex..." 141 | $(MAKE) -C $(BUILDDIR)/latex all-pdf 142 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 143 | 144 | .PHONY: latexpdfja 145 | latexpdfja: 146 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 147 | @echo "Running LaTeX files through platex and dvipdfmx..." 148 | $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja 149 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 150 | 151 | .PHONY: text 152 | text: 153 | $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text 154 | @echo 155 | @echo "Build finished. The text files are in $(BUILDDIR)/text." 156 | 157 | .PHONY: man 158 | man: 159 | $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man 160 | @echo 161 | @echo "Build finished. The manual pages are in $(BUILDDIR)/man." 162 | 163 | .PHONY: texinfo 164 | texinfo: 165 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 166 | @echo 167 | @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." 168 | @echo "Run \`make' in that directory to run these through makeinfo" \ 169 | "(use \`make info' here to do that automatically)." 170 | 171 | .PHONY: info 172 | info: 173 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 174 | @echo "Running Texinfo files through makeinfo..." 175 | make -C $(BUILDDIR)/texinfo info 176 | @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." 177 | 178 | .PHONY: gettext 179 | gettext: 180 | $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale 181 | @echo 182 | @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." 183 | 184 | .PHONY: changes 185 | changes: 186 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes 187 | @echo 188 | @echo "The overview file is in $(BUILDDIR)/changes." 189 | 190 | .PHONY: linkcheck 191 | linkcheck: 192 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck 193 | @echo 194 | @echo "Link check complete; look for any errors in the above output " \ 195 | "or in $(BUILDDIR)/linkcheck/output.txt." 196 | 197 | .PHONY: doctest 198 | doctest: 199 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest 200 | @echo "Testing of doctests in the sources finished, look at the " \ 201 | "results in $(BUILDDIR)/doctest/output.txt." 202 | 203 | .PHONY: coverage 204 | coverage: 205 | $(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage 206 | @echo "Testing of coverage in the sources finished, look at the " \ 207 | "results in $(BUILDDIR)/coverage/python.txt." 208 | 209 | .PHONY: xml 210 | xml: 211 | $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml 212 | @echo 213 | @echo "Build finished. The XML files are in $(BUILDDIR)/xml." 214 | 215 | .PHONY: pseudoxml 216 | pseudoxml: 217 | $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml 218 | @echo 219 | @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." 220 | 221 | .PHONY: dummy 222 | dummy: 223 | $(SPHINXBUILD) -b dummy $(ALLSPHINXOPTS) $(BUILDDIR)/dummy 224 | @echo 225 | @echo "Build finished. Dummy builder generates no files." 226 | -------------------------------------------------------------------------------- /tests/acceptance/test_base.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | import pytest 4 | 5 | from aiocache.backends.memory import SimpleMemoryCache 6 | from aiocache.base import _Conn 7 | from aiocache.serializers import NullSerializer 8 | from ..utils import Keys 9 | 10 | 11 | class TestCache: 12 | """ 13 | This class ensures that all caches behave the same way and have the minimum functionality. 14 | To add a new cache just create the fixture for the new cache and add id as a param for the 15 | cache fixture 16 | """ 17 | 18 | async def test_setup(self, cache): 19 | assert cache.namespace == "test" 20 | 21 | async def test_get_missing(self, cache): 22 | assert await cache.get(Keys.KEY) is None 23 | assert await cache.get(Keys.KEY, default=1) == 1 24 | 25 | async def test_get_existing(self, cache): 26 | await cache.set(Keys.KEY, "value") 27 | assert await cache.get(Keys.KEY) == "value" 28 | 29 | async def test_multi_get(self, cache): 30 | await cache.set(Keys.KEY, "value") 31 | assert await cache.multi_get([Keys.KEY, Keys.KEY_1]) == ["value", None] 32 | 33 | async def test_delete_missing(self, cache): 34 | result = await cache.delete(Keys.KEY) 35 | assert result == 0 36 | 37 | async def test_delete_existing(self, cache): 38 | await cache.set(Keys.KEY, "value") 39 | result = await cache.delete(Keys.KEY) 40 | assert result == 1 41 | 42 | value = await cache.get(Keys.KEY) 43 | assert value is None 44 | 45 | async def test_set(self, cache): 46 | assert await cache.set(Keys.KEY, "value") is True 47 | 48 | async def test_set_cancel_previous_ttl_handle(self, cache): 49 | await cache.set(Keys.KEY, "value", ttl=4) 50 | 51 | await asyncio.sleep(2.1) 52 | # Smaller ttl seems flaky, as if this call takes >0.5s... 53 | result = await cache.get(Keys.KEY) 54 | assert result == "value" 55 | await cache.set(Keys.KEY, "new_value", ttl=4) 56 | 57 | await asyncio.sleep(2) 58 | result = await cache.get(Keys.KEY) 59 | assert result == "new_value" 60 | 61 | async def test_multi_set(self, cache): 62 | pairs = [(Keys.KEY, "value"), [Keys.KEY_1, "random_value"]] 63 | assert await cache.multi_set(pairs) is True 64 | assert await cache.multi_get([Keys.KEY, Keys.KEY_1]) == ["value", "random_value"] 65 | 66 | async def test_multi_set_with_ttl(self, cache): 67 | pairs = [(Keys.KEY, "value"), [Keys.KEY_1, "random_value"]] 68 | assert await cache.multi_set(pairs, ttl=1) is True 69 | await asyncio.sleep(1.1) 70 | 71 | assert await cache.multi_get([Keys.KEY, Keys.KEY_1]) == [None, None] 72 | 73 | async def test_set_with_ttl(self, cache): 74 | await cache.set(Keys.KEY, "value", ttl=1) 75 | await asyncio.sleep(1.1) 76 | 77 | assert await cache.get(Keys.KEY) is None 78 | 79 | async def test_add_missing(self, cache): 80 | assert await cache.add(Keys.KEY, "value", ttl=1) is True 81 | 82 | async def test_add_existing(self, cache): 83 | assert await cache.set(Keys.KEY, "value") is True 84 | with pytest.raises(ValueError): 85 | await cache.add(Keys.KEY, "value") 86 | 87 | async def test_exists_missing(self, cache): 88 | assert await cache.exists(Keys.KEY) is False 89 | 90 | async def test_exists_existing(self, cache): 91 | await cache.set(Keys.KEY, "value") 92 | assert await cache.exists(Keys.KEY) is True 93 | 94 | async def test_increment_missing(self, cache): 95 | assert await cache.increment(Keys.KEY, delta=2) == 2 96 | assert await cache.increment(Keys.KEY_1, delta=-2) == -2 97 | 98 | async def test_increment_existing(self, cache): 99 | await cache.set(Keys.KEY, 2) 100 | assert await cache.increment(Keys.KEY, delta=2) == 4 101 | assert await cache.increment(Keys.KEY, delta=1) == 5 102 | assert await cache.increment(Keys.KEY, delta=-3) == 2 103 | 104 | async def test_increment_typeerror(self, cache): 105 | await cache.set(Keys.KEY, "value") 106 | with pytest.raises(TypeError): 107 | assert await cache.increment(Keys.KEY) 108 | 109 | async def test_expire_existing(self, cache): 110 | await cache.set(Keys.KEY, "value") 111 | assert await cache.expire(Keys.KEY, 1) is True 112 | await asyncio.sleep(1.1) 113 | assert await cache.exists(Keys.KEY) is False 114 | 115 | async def test_expire_with_0(self, cache): 116 | await cache.set(Keys.KEY, "value", 1) 117 | assert await cache.expire(Keys.KEY, 0) is True 118 | await asyncio.sleep(1.1) 119 | assert await cache.exists(Keys.KEY) is True 120 | 121 | async def test_expire_missing(self, cache): 122 | assert await cache.expire(Keys.KEY, 1) is False 123 | 124 | async def test_clear(self, cache): 125 | await cache.set(Keys.KEY, "value") 126 | await cache.clear() 127 | 128 | assert await cache.exists(Keys.KEY) is False 129 | 130 | async def test_close_pool_only_clears_resources(self, cache): 131 | await cache.set(Keys.KEY, "value") 132 | await cache.close() 133 | assert await cache.set(Keys.KEY, "value") is True 134 | assert await cache.get(Keys.KEY) == "value" 135 | 136 | async def test_single_connection(self, cache): 137 | async with cache.get_connection() as conn: 138 | assert isinstance(conn, _Conn) 139 | assert await conn.set(Keys.KEY, "value") is True 140 | assert await conn.get(Keys.KEY) == "value" 141 | 142 | 143 | class TestMemoryCache: 144 | async def test_accept_explicit_args(self): 145 | with pytest.raises(TypeError): 146 | SimpleMemoryCache(random_attr="wtf") 147 | 148 | async def test_set_float_ttl(self, memory_cache): 149 | await memory_cache.set(Keys.KEY, "value", ttl=0.1) 150 | await asyncio.sleep(0.15) 151 | 152 | assert await memory_cache.get(Keys.KEY) is None 153 | 154 | async def test_multi_set_float_ttl(self, memory_cache): 155 | pairs = [(Keys.KEY, "value"), [Keys.KEY_1, "random_value"]] 156 | assert await memory_cache.multi_set(pairs, ttl=0.1) is True 157 | await asyncio.sleep(0.15) 158 | 159 | assert await memory_cache.multi_get([Keys.KEY, Keys.KEY_1]) == [None, None] 160 | 161 | async def test_raw(self, memory_cache): 162 | await memory_cache.raw("setdefault", "key", "value") 163 | assert await memory_cache.raw("get", "key") == "value" 164 | assert list(await memory_cache.raw("keys")) == ["key"] 165 | 166 | async def test_clear_with_namespace_memory(self, memory_cache): 167 | await memory_cache.set(Keys.KEY, "value", namespace="test") 168 | await memory_cache.clear(namespace="test") 169 | 170 | assert await memory_cache.exists(Keys.KEY, namespace="test") is False 171 | 172 | 173 | @pytest.mark.memcached 174 | class TestMemcachedCache: 175 | async def test_accept_explicit_args(self): 176 | from aiocache.backends.memcached import MemcachedCache 177 | 178 | with pytest.raises(TypeError): 179 | MemcachedCache(random_attr="wtf") 180 | 181 | async def test_set_too_long_key(self, memcached_cache): 182 | with pytest.raises(TypeError) as exc_info: 183 | await memcached_cache.set("a" * 2000, "value") 184 | assert str(exc_info.value).startswith("aiomcache error: invalid key") 185 | 186 | async def test_set_float_ttl_fails(self, memcached_cache): 187 | with pytest.raises(TypeError) as exc_info: 188 | await memcached_cache.set(Keys.KEY, "value", ttl=0.1) 189 | assert str(exc_info.value) == "aiomcache error: exptime not int: 0.1" 190 | 191 | async def test_multi_set_float_ttl(self, memcached_cache): 192 | with pytest.raises(TypeError) as exc_info: 193 | pairs = [(Keys.KEY, "value"), [Keys.KEY_1, "random_value"]] 194 | assert await memcached_cache.multi_set(pairs, ttl=0.1) is True 195 | assert str(exc_info.value) == "aiomcache error: exptime not int: 0.1" 196 | 197 | async def test_raw(self, memcached_cache): 198 | await memcached_cache.raw("set", b"key", b"value") 199 | assert await memcached_cache.raw("get", b"key") == "value" 200 | assert await memcached_cache.raw("prepend", b"key", b"super") is True 201 | assert await memcached_cache.raw("get", b"key") == "supervalue" 202 | 203 | async def test_clear_with_namespace_memcached(self, memcached_cache): 204 | await memcached_cache.set(Keys.KEY, "value", namespace="test") 205 | 206 | with pytest.raises(ValueError): 207 | await memcached_cache.clear(namespace="test") 208 | 209 | assert await memcached_cache.exists(Keys.KEY, namespace="test") is True 210 | 211 | async def test_close(self, memcached_cache): 212 | await memcached_cache.set(Keys.KEY, "value") 213 | await memcached_cache._close() 214 | assert memcached_cache.client._pool._pool.qsize() == 0 215 | 216 | 217 | @pytest.mark.valkey 218 | class TestValkeyCache: 219 | async def test_accept_explicit_args(self): 220 | from aiocache.backends.valkey import ValkeyCache 221 | 222 | with pytest.raises(TypeError): 223 | ValkeyCache(random_attr="wtf") 224 | 225 | async def test_float_ttl(self, valkey_cache): 226 | await valkey_cache.set(Keys.KEY, "value", ttl=0.1) 227 | await asyncio.sleep(0.15) 228 | 229 | assert await valkey_cache.get(Keys.KEY) is None 230 | 231 | async def test_multi_set_float_ttl(self, valkey_cache): 232 | pairs = [(Keys.KEY, "value"), [Keys.KEY_1, "random_value"]] 233 | assert await valkey_cache.multi_set(pairs, ttl=0.1) is True 234 | await asyncio.sleep(0.15) 235 | 236 | assert await valkey_cache.multi_get([Keys.KEY, Keys.KEY_1]) == [None, None] 237 | 238 | async def test_raw(self, valkey_cache): 239 | await valkey_cache.raw("set", "key", "value") 240 | assert await valkey_cache.raw("get", "key") == "value" 241 | assert await valkey_cache.raw("scan", b"0", "k*") == [b"0", [b"key"]] 242 | # .raw() doesn't build key with namespace prefix, clear it manually 243 | await valkey_cache.raw("delete", "key") 244 | 245 | async def test_raw_no_encoding(self, valkey_config): 246 | from aiocache.backends.valkey import ValkeyCache 247 | 248 | serializer = NullSerializer(encoding=None) 249 | async with ValkeyCache(valkey_config, namespace="test", serializer=serializer) as cache: 250 | await cache.set(Keys.KEY, "value") 251 | 252 | assert await cache.raw("get", Keys.KEY) == b"value" 253 | 254 | await cache.delete(Keys.KEY) 255 | 256 | async def test_clear_with_namespace_valkey(self, valkey_cache): 257 | await valkey_cache.set(Keys.KEY, "value", namespace="test") 258 | await valkey_cache.clear(namespace="test") 259 | 260 | assert await valkey_cache.exists(Keys.KEY, namespace="test") is False 261 | 262 | async def test_close(self, valkey_cache): 263 | await valkey_cache.set(Keys.KEY, "value") 264 | await valkey_cache._close() 265 | -------------------------------------------------------------------------------- /tests/acceptance/test_lock.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | import pytest 4 | 5 | from aiocache.lock import OptimisticLock, OptimisticLockError, RedLock 6 | from aiocache.serializers import StringSerializer 7 | from ..utils import KEY_LOCK, Keys 8 | 9 | 10 | @pytest.fixture 11 | def lock(cache): 12 | return RedLock(cache, Keys.KEY, 20) 13 | 14 | 15 | def build_key(key, namespace=None): 16 | return "custom_key" 17 | 18 | 19 | def build_key_bytes(key, namespace=None): 20 | return b"custom_key" 21 | 22 | 23 | @pytest.fixture 24 | def custom_valkey_cache(mocker, valkey_cache, build_key=build_key): 25 | mocker.patch.object(valkey_cache, "build_key", new=build_key) 26 | yield valkey_cache 27 | 28 | 29 | @pytest.fixture 30 | def custom_memory_cache(mocker, memory_cache, build_key=build_key): 31 | mocker.patch.object(memory_cache, "build_key", new=build_key) 32 | yield memory_cache 33 | 34 | 35 | @pytest.fixture 36 | def custom_memcached_cache(mocker, memcached_cache, build_key=build_key_bytes): 37 | mocker.patch.object(memcached_cache, "build_key", new=build_key) 38 | yield memcached_cache 39 | 40 | 41 | class TestRedLock: 42 | async def test_acquire(self, cache, lock): 43 | cache.serializer = StringSerializer() 44 | async with lock: 45 | assert await cache.get(KEY_LOCK) == lock._value 46 | 47 | async def test_release_does_nothing_when_no_lock(self, lock): 48 | assert await lock.__aexit__("exc_type", "exc_value", "traceback") is None 49 | 50 | async def test_acquire_release(self, cache, lock): 51 | async with lock: 52 | pass 53 | assert await cache.get(KEY_LOCK) is None 54 | 55 | async def test_locking_dogpile(self, mocker, cache): 56 | mocker.spy(cache, "get") 57 | mocker.spy(cache, "set") 58 | mocker.spy(cache, "_add") 59 | 60 | async def dummy(): 61 | res = await cache.get(Keys.KEY) 62 | assert res is None 63 | 64 | async with RedLock(cache, Keys.KEY, lease=5): 65 | res = await cache.get(Keys.KEY) 66 | if res is not None: 67 | return 68 | await asyncio.sleep(0.1) 69 | await cache.set(Keys.KEY, "value") 70 | 71 | await asyncio.gather(dummy(), dummy(), dummy(), dummy()) 72 | assert cache._add.call_count == 4 73 | assert cache.get.call_count == 8 74 | assert cache.set.call_count == 1, cache.set.call_args_list 75 | 76 | async def test_locking_dogpile_lease_expiration(self, cache): 77 | async def dummy() -> None: 78 | res = await cache.get(Keys.KEY) 79 | assert res is None 80 | 81 | # Lease should expire before cache is set, so res is still None. 82 | async with RedLock(cache, Keys.KEY, lease=1): 83 | res = await cache.get(Keys.KEY) 84 | assert res is None 85 | await asyncio.sleep(1.1) 86 | await cache.set(Keys.KEY, "value") 87 | 88 | await asyncio.gather(dummy(), dummy(), dummy(), dummy()) 89 | 90 | async def test_locking_dogpile_propagates_exceptions(self, cache): 91 | async def dummy(): 92 | async with RedLock(cache, Keys.KEY, lease=1): 93 | raise ValueError() 94 | 95 | with pytest.raises(ValueError): 96 | await dummy() 97 | 98 | 99 | class TestMemoryRedLock: 100 | @pytest.fixture 101 | def lock(self, memory_cache): 102 | return RedLock(memory_cache, Keys.KEY, 20) 103 | 104 | async def test_acquire_key_builder(self, custom_memory_cache, lock): 105 | async with lock: 106 | assert await custom_memory_cache.get(KEY_LOCK) == lock._value 107 | 108 | async def test_acquire_release_key_builder(self, custom_memory_cache, lock): 109 | async with lock: 110 | assert await custom_memory_cache.get(KEY_LOCK) is not None 111 | assert await custom_memory_cache.get(KEY_LOCK) is None 112 | 113 | async def test_release_wrong_token_fails(self, lock): 114 | await lock.__aenter__() 115 | lock._value = "random" 116 | assert await lock.__aexit__("exc_type", "exc_value", "traceback") is None 117 | 118 | async def test_release_wrong_client_fails(self, memory_cache, lock): 119 | wrong_lock = RedLock(memory_cache, Keys.KEY, 20) 120 | await lock.__aenter__() 121 | assert await wrong_lock.__aexit__("exc_type", "exc_value", "traceback") is None 122 | 123 | async def test_float_lease(self, memory_cache): 124 | lock = RedLock(memory_cache, Keys.KEY, 0.1) 125 | await lock.__aenter__() 126 | await asyncio.sleep(0.2) 127 | assert await lock.__aexit__("exc_type", "exc_value", "traceback") is None 128 | 129 | 130 | @pytest.mark.valkey 131 | class TestValkeyRedLock: 132 | @pytest.fixture 133 | def lock(self, valkey_cache): 134 | return RedLock(valkey_cache, Keys.KEY, 20) 135 | 136 | async def test_acquire_key_builder(self, custom_valkey_cache, lock): 137 | custom_valkey_cache.serializer = StringSerializer() 138 | async with lock: 139 | assert await custom_valkey_cache.get(KEY_LOCK) == lock._value 140 | 141 | async def test_acquire_release_key_builder(self, custom_valkey_cache, lock): 142 | custom_valkey_cache.serializer = StringSerializer() 143 | async with lock: 144 | assert await custom_valkey_cache.get(KEY_LOCK) is not None 145 | assert await custom_valkey_cache.get(KEY_LOCK) is None 146 | 147 | async def test_release_wrong_token_fails(self, lock): 148 | await lock.__aenter__() 149 | lock._value = "random" 150 | assert await lock.__aexit__("exc_type", "exc_value", "traceback") is None 151 | 152 | async def test_release_wrong_client_fails(self, valkey_cache, lock): 153 | wrong_lock = RedLock(valkey_cache, Keys.KEY, 20) 154 | await lock.__aenter__() 155 | assert await wrong_lock.__aexit__("exc_type", "exc_value", "traceback") is None 156 | 157 | async def test_float_lease(self, valkey_cache): 158 | lock = RedLock(valkey_cache, Keys.KEY, 0.1) 159 | await lock.__aenter__() 160 | await asyncio.sleep(0.2) 161 | assert await lock.__aexit__("exc_type", "exc_value", "traceback") is None 162 | 163 | 164 | @pytest.mark.memcached 165 | class TestMemcachedRedLock: 166 | @pytest.fixture 167 | def lock(self, memcached_cache): 168 | return RedLock(memcached_cache, Keys.KEY, 20) 169 | 170 | async def test_acquire_key_builder(self, custom_memcached_cache, lock): 171 | custom_memcached_cache.serializer = StringSerializer() 172 | async with lock: 173 | assert await custom_memcached_cache.get(KEY_LOCK) == lock._value 174 | 175 | async def test_acquire_release_key_builder(self, custom_memcached_cache, lock): 176 | custom_memcached_cache.serializer = StringSerializer() 177 | async with lock: 178 | assert await custom_memcached_cache.get(KEY_LOCK) is not None 179 | assert await custom_memcached_cache.get(KEY_LOCK) is None 180 | 181 | async def test_release_wrong_token_succeeds_meh(self, lock): 182 | await lock.__aenter__() 183 | lock._value = "random" 184 | assert await lock.__aexit__("exc_type", "exc_value", "traceback") is None 185 | 186 | async def test_release_wrong_client_succeeds_meh(self, memcached_cache, lock): 187 | wrong_lock = RedLock(memcached_cache, Keys.KEY, 20) 188 | await lock.__aenter__() 189 | assert await wrong_lock.__aexit__("exc_type", "exc_value", "traceback") is None 190 | 191 | async def test_float_lease(self, memcached_cache): 192 | lock = RedLock(memcached_cache, Keys.KEY, 0.1) 193 | with pytest.raises(TypeError): 194 | await lock.__aenter__() 195 | 196 | 197 | class TestOptimisticLock: 198 | @pytest.fixture 199 | def lock(self, cache): 200 | return OptimisticLock(cache, Keys.KEY) 201 | 202 | async def test_acquire(self, cache, lock): 203 | await cache.set(Keys.KEY, "value") 204 | async with lock: 205 | assert lock._token == await cache._gets(cache.build_key(Keys.KEY)) 206 | 207 | async def test_release_does_nothing(self, lock): 208 | assert await lock.__aexit__("exc_type", "exc_value", "traceback") is None 209 | 210 | async def test_check_and_set_not_existing_never_fails(self, cache, lock): 211 | async with lock as locked: 212 | await cache.set(Keys.KEY, "conflicting_value") 213 | await locked.cas("value") 214 | 215 | assert await cache.get(Keys.KEY) == "value" 216 | 217 | async def test_check_and_set(self, cache, lock): 218 | await cache.set(Keys.KEY, "previous_value") 219 | async with lock as locked: 220 | await locked.cas("value") 221 | 222 | assert await cache.get(Keys.KEY) == "value" 223 | 224 | async def test_check_and_set_fail(self, cache, lock): 225 | await cache.set(Keys.KEY, "previous_value") 226 | with pytest.raises(OptimisticLockError): 227 | async with lock as locked: 228 | await cache.set(Keys.KEY, "conflicting_value") 229 | await locked.cas("value") 230 | 231 | async def test_check_and_set_with_int_ttl(self, cache, lock): 232 | await cache.set(Keys.KEY, "previous_value") 233 | async with lock as locked: 234 | await locked.cas("value", ttl=1) 235 | 236 | await asyncio.sleep(1) 237 | assert await cache.get(Keys.KEY) is None 238 | 239 | 240 | class TestMemoryOptimisticLock: 241 | @pytest.fixture 242 | def lock(self, memory_cache): 243 | return OptimisticLock(memory_cache, Keys.KEY) 244 | 245 | async def test_acquire_key_builder(self, custom_memory_cache, lock): 246 | await custom_memory_cache.set(Keys.KEY, "value") 247 | async with lock: 248 | assert await custom_memory_cache.get(KEY_LOCK) == lock._token 249 | await custom_memory_cache.delete(Keys.KEY, "value") 250 | 251 | async def test_check_and_set_with_float_ttl(self, memory_cache, lock): 252 | await memory_cache.set(Keys.KEY, "previous_value") 253 | async with lock as locked: 254 | await locked.cas("value", ttl=0.1) 255 | 256 | await asyncio.sleep(1) 257 | assert await memory_cache.get(Keys.KEY) is None 258 | 259 | 260 | @pytest.mark.valkey 261 | class TestValkeyOptimisticLock: 262 | @pytest.fixture 263 | def lock(self, valkey_cache): 264 | return OptimisticLock(valkey_cache, Keys.KEY) 265 | 266 | async def test_acquire_key_builder(self, custom_valkey_cache, lock): 267 | custom_valkey_cache.serializer = StringSerializer() 268 | await custom_valkey_cache.set(Keys.KEY, "value") 269 | async with lock: 270 | assert await custom_valkey_cache.get(KEY_LOCK) == lock._token 271 | await custom_valkey_cache.delete(Keys.KEY, "value") 272 | 273 | async def test_check_and_set_with_float_ttl(self, valkey_cache, lock): 274 | await valkey_cache.set(Keys.KEY, "previous_value") 275 | async with lock as locked: 276 | await locked.cas("value", ttl=0.1) 277 | 278 | await asyncio.sleep(1) 279 | assert await valkey_cache.get(Keys.KEY) is None 280 | -------------------------------------------------------------------------------- /.gitchangelog.rc: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8; mode: python -*- 2 | ## 3 | ## Format 4 | ## 5 | ## ACTION: [AUDIENCE:] COMMIT_MSG [!TAG ...] 6 | ## 7 | ## Description 8 | ## 9 | ## ACTION is one of 'chg', 'fix', 'new' 10 | ## 11 | ## Is WHAT the change is about. 12 | ## 13 | ## 'chg' is for refactor, small improvement, cosmetic changes... 14 | ## 'fix' is for bug fixes 15 | ## 'new' is for new features, big improvement 16 | ## 17 | ## AUDIENCE is optional and one of 'dev', 'usr', 'pkg', 'test', 'doc' 18 | ## 19 | ## Is WHO is concerned by the change. 20 | ## 21 | ## 'dev' is for developpers (API changes, refactors...) 22 | ## 'usr' is for final users (UI changes) 23 | ## 'pkg' is for packagers (packaging changes) 24 | ## 'test' is for testers (test only related changes) 25 | ## 'doc' is for doc guys (doc only changes) 26 | ## 27 | ## COMMIT_MSG is ... well ... the commit message itself. 28 | ## 29 | ## TAGs are additionnal adjective as 'refactor' 'minor' 'cosmetic' 30 | ## 31 | ## They are preceded with a '!' or a '@' (prefer the former, as the 32 | ## latter is wrongly interpreted in github.) Commonly used tags are: 33 | ## 34 | ## 'refactor' is obviously for refactoring code only 35 | ## 'minor' is for a very meaningless change (a typo, adding a comment) 36 | ## 'cosmetic' is for cosmetic driven change (re-indentation, 80-col...) 37 | ## 'wip' is for partial functionality but complete subfunctionality. 38 | ## 39 | ## Example: 40 | ## 41 | ## new: usr: support of bazaar implemented 42 | ## chg: re-indentend some lines !cosmetic 43 | ## new: dev: updated code to be compatible with last version of killer lib. 44 | ## fix: pkg: updated year of licence coverage. 45 | ## new: test: added a bunch of test around user usability of feature X. 46 | ## fix: typo in spelling my name in comment. !minor 47 | ## 48 | ## Please note that multi-line commit message are supported, and only the 49 | ## first line will be considered as the "summary" of the commit message. So 50 | ## tags, and other rules only applies to the summary. The body of the commit 51 | ## message will be displayed in the changelog without reformatting. 52 | 53 | 54 | ## 55 | ## ``ignore_regexps`` is a line of regexps 56 | ## 57 | ## Any commit having its full commit message matching any regexp listed here 58 | ## will be ignored and won't be reported in the changelog. 59 | ## 60 | ignore_regexps = [ 61 | r':minor', 62 | r':cosmetic', 63 | r':refactor', 64 | r':docs', 65 | r':wip', 66 | r'Bump version|bump version|version bump', 67 | r'README', 68 | r'Update [\w-]+ from [0-9.]+ to [0-9]+', 69 | r'Pin [\w-]+ to latest version', 70 | r'^([cC]hg|[fF]ix|[nN]ew)\s*:\s*[p|P]kg:', 71 | r'^([cC]hg|[fF]ix|[nN]ew)\s*:\s*[d|D]ev:', 72 | r'^(.{3,3}\s*:)?\s*[fF]irst commit.?\s*$', 73 | r'^$', ## ignore commits with empty messages 74 | ] 75 | 76 | 77 | ## ``section_regexps`` is a list of 2-tuples associating a string label and a 78 | ## list of regexp 79 | ## 80 | ## Commit messages will be classified in sections thanks to this. Section 81 | ## titles are the label, and a commit is classified under this section if any 82 | ## of the regexps associated is matching. 83 | ## 84 | ## Please note that ``section_regexps`` will only classify commits and won't 85 | ## make any changes to the contents. So you'll probably want to go check 86 | ## ``subject_process`` (or ``body_process``) to do some changes to the subject, 87 | ## whenever you are tweaking this variable. 88 | ## 89 | section_regexps = [ 90 | ('New', [ 91 | r'^[nN]ew\s*:\s*((dev|use?r|pkg|test|doc)\s*:\s*)?([^\n]*)$', 92 | ]), 93 | ('Changes', [ 94 | r'^[cC]hg\s*:\s*((dev|use?r|pkg|test|doc)\s*:\s*)?([^\n]*)$', 95 | ]), 96 | ('Fix', [ 97 | r'^[fF]ix\s*:\s*((dev|use?r|pkg|test|doc)\s*:\s*)?([^\n]*)$', 98 | ]), 99 | 100 | ('Other', None ## Match all lines 101 | ), 102 | 103 | ] 104 | 105 | 106 | ## ``body_process`` is a callable 107 | ## 108 | ## This callable will be given the original body and result will 109 | ## be used in the changelog. 110 | ## 111 | ## Available constructs are: 112 | ## 113 | ## - any python callable that take one txt argument and return txt argument. 114 | ## 115 | ## - ReSub(pattern, replacement): will apply regexp substitution. 116 | ## 117 | ## - Indent(chars=" "): will indent the text with the prefix 118 | ## Please remember that template engines gets also to modify the text and 119 | ## will usually indent themselves the text if needed. 120 | ## 121 | ## - Wrap(regexp=r"\n\n"): re-wrap text in separate paragraph to fill 80-Columns 122 | ## 123 | ## - noop: do nothing 124 | ## 125 | ## - ucfirst: ensure the first letter is uppercase. 126 | ## (usually used in the ``subject_process`` pipeline) 127 | ## 128 | ## - final_dot: ensure text finishes with a dot 129 | ## (usually used in the ``subject_process`` pipeline) 130 | ## 131 | ## - strip: remove any spaces before or after the content of the string 132 | ## 133 | ## - SetIfEmpty(msg="No commit message."): will set the text to 134 | ## whatever given ``msg`` if the current text is empty. 135 | ## 136 | ## Additionally, you can `pipe` the provided filters, for instance: 137 | body_process = Wrap(regexp=r'\n(?=\w+\s*:)') 138 | # body_process = ReSub(r'.*', r'') | strip 139 | 140 | 141 | ## ``subject_process`` is a callable 142 | ## 143 | ## This callable will be given the original subject and result will 144 | ## be used in the changelog. 145 | ## 146 | ## Available constructs are those listed in ``body_process`` doc. 147 | ## subject_process = (strip | 148 | ## ReSub(r'^([cC]hg|[fF]ix|[nN]ew)\s*:\s*((dev|use?r|pkg|test|doc)\s*:\s*)?([^\n@]*)(@[a-z]+\s+)*$', r'\4') | 149 | ## SetIfEmpty("No commit message.") | ucfirst | final_dot) 150 | subject_process = (strip | 151 | ReSub(r'\(#([0-9]+)\)', r'[#\1](https://github.com/argaen/aiocache/issues/\1)') | 152 | ReSub(r'^[nN]ew\s*:\s*|^[cC]hg\s*:\s*|^[fF]ix\s*:\s*', r'') | 153 | ucfirst | final_dot) 154 | 155 | 156 | ## ``tag_filter_regexp`` is a regexp 157 | ## 158 | ## Tags that will be used for the changelog must match this regexp. 159 | ## 160 | tag_filter_regexp = r'^[0-9]+\.[0-9]+(\.[0-9]+)?$' 161 | 162 | 163 | ## ``unreleased_version_label`` is a string or a callable that outputs a string 164 | ## 165 | ## This label will be used as the changelog Title of the last set of changes 166 | ## between last valid tag and HEAD if any. 167 | unreleased_version_label = "Unreleased" 168 | 169 | 170 | ## ``output_engine`` is a callable 171 | ## 172 | ## This will change the output format of the generated changelog file 173 | ## 174 | ## Available choices are: 175 | ## 176 | ## - rest_py 177 | ## 178 | ## Legacy pure python engine, outputs ReSTructured text. 179 | ## This is the default. 180 | ## 181 | ## - mustache() 182 | ## 183 | ## Template name could be any of the available templates in 184 | ## ``templates/mustache/*.tpl``. 185 | ## Requires python package ``pystache``. 186 | ## Examples: 187 | ## - mustache("markdown") 188 | ## - mustache("restructuredtext") 189 | ## 190 | ## - makotemplate() 191 | ## 192 | ## Template name could be any of the available templates in 193 | ## ``templates/mako/*.tpl``. 194 | ## Requires python package ``mako``. 195 | ## Examples: 196 | ## - makotemplate("restructuredtext") 197 | ## 198 | output_engine = mustache(".release_notes.tpl") 199 | #output_engine = mustache("restructuredtext") 200 | #output_engine = mustache("markdown") 201 | #output_engine = makotemplate("restructuredtext") 202 | 203 | 204 | ## ``include_merge`` is a boolean 205 | ## 206 | ## This option tells git-log whether to include merge commits in the log. 207 | ## The default is to include them. 208 | include_merge = True 209 | 210 | 211 | ## ``log_encoding`` is a string identifier 212 | ## 213 | ## This option tells gitchangelog what encoding is outputed by ``git log``. 214 | ## The default is to be clever about it: it checks ``git config`` for 215 | ## ``i18n.logOutputEncoding``, and if not found will default to git's own 216 | ## default: ``utf-8``. 217 | #log_encoding = 'utf-8' 218 | 219 | 220 | ## ``publish`` is a callable 221 | ## 222 | ## Sets what ``gitchangelog`` should do with the output generated by 223 | ## the output engine. ``publish`` is a callable taking one argument 224 | ## that is an interator on lines from the output engine. 225 | ## 226 | ## Some helper callable are provided: 227 | ## 228 | ## Available choices are: 229 | ## 230 | ## - stdout 231 | ## 232 | ## Outputs directly to standard output 233 | ## (This is the default) 234 | ## 235 | ## - FileInsertAtFirstRegexMatch(file, pattern, idx=lamda m: m.start()) 236 | ## 237 | ## Creates a callable that will parse given file for the given 238 | ## regex pattern and will insert the output in the file. 239 | ## ``idx`` is a callable that receive the matching object and 240 | ## must return a integer index point where to insert the 241 | ## the output in the file. Default is to return the position of 242 | ## the start of the matched string. 243 | ## 244 | ## - FileRegexSubst(file, pattern, replace, flags) 245 | ## 246 | ## Apply a replace inplace in the given file. Your regex pattern must 247 | ## take care of everything and might be more complex. Check the README 248 | ## for a complete copy-pastable example. 249 | ## 250 | # publish = FileInsertAtFirstRegexMatch( 251 | # "CHANGELOG.rst", 252 | # r'Changelog\n=.+\n\n()', 253 | # idx=lambda m: m.start(1) 254 | # ) 255 | publish = stdout 256 | 257 | 258 | ## ``revs`` is a list of callable or a list of string 259 | ## 260 | ## callable will be called to resolve as strings and allow dynamical 261 | ## computation of these. The result will be used as revisions for 262 | ## gitchangelog (as if directly stated on the command line). This allows 263 | ## to filter exaclty which commits will be read by gitchangelog. 264 | ## 265 | ## To get a full documentation on the format of these strings, please 266 | ## refer to the ``git rev-list`` arguments. There are many examples. 267 | ## 268 | ## Using callables is especially useful, for instance, if you 269 | ## are using gitchangelog to generate incrementally your changelog. 270 | ## 271 | ## Some helpers are provided, you can use them:: 272 | ## 273 | ## - FileFirstRegexMatch(file, pattern): will return a callable that will 274 | ## return the first string match for the given pattern in the given file. 275 | ## If you use named sub-patterns in your regex pattern, it'll output only 276 | ## the string matching the regex pattern named "rev". 277 | ## 278 | ## - Caret(rev): will return the rev prefixed by a "^", which is a 279 | ## way to remove the given revision and all its ancestor. 280 | ## 281 | ## Please note that if you provide a rev-list on the command line, it'll 282 | ## replace this value (which will then be ignored). 283 | ## 284 | ## If empty, then ``gitchangelog`` will act as it had to generate a full 285 | ## changelog. 286 | ## 287 | ## The default is to use all commits to make the changelog. 288 | #revs = ["^1.0.3", ] 289 | #revs = [ 290 | # Caret( 291 | # FileFirstRegexMatch( 292 | # "CHANGELOG.rst", 293 | # r"(?P[0-9]+\.[0-9]+(\.[0-9]+)?)\s+\([0-9]+-[0-9]{2}-[0-9]{2}\)\n--+\n")), 294 | # "HEAD" 295 | #] 296 | revs = [] 297 | -------------------------------------------------------------------------------- /tests/ut/backends/test_valkey.py: -------------------------------------------------------------------------------- 1 | import pickle 2 | from unittest.mock import AsyncMock, patch 3 | 4 | import pytest 5 | from glide import Batch, ConditionalChange, ExpirySet, ExpiryType 6 | from glide.exceptions import RequestError 7 | 8 | from aiocache.backends.valkey import ValkeyCache 9 | from aiocache.base import BaseCache 10 | from aiocache.serializers import JsonSerializer, PickleSerializer 11 | from ...utils import Keys, ensure_key 12 | 13 | 14 | @pytest.fixture 15 | async def valkey(valkey_config): 16 | async with ValkeyCache(config=valkey_config) as valkey: 17 | with patch.object(valkey, "client", autospec=True) as m: 18 | # These methods actually return an awaitable. 19 | for method in ( 20 | "eval", 21 | "expire", 22 | "get", 23 | "execute_command", 24 | "exists", 25 | "incrby", 26 | "persist", 27 | "delete", 28 | "scan", 29 | "flushdb", 30 | ): 31 | setattr(m, method, AsyncMock(return_value=None, spec_set=())) 32 | m.mget = AsyncMock(return_value=[None], spec_set=()) 33 | m.set = AsyncMock(return_value="OK", spec_set=()) 34 | yield valkey 35 | 36 | 37 | class TestValkeyCache: 38 | @pytest.fixture 39 | def set_test_namespace(self, valkey_cache): 40 | valkey_cache.namespace = "test" 41 | yield 42 | valkey_cache.namespace = None 43 | 44 | async def test_get(self, valkey): 45 | valkey.client.get.return_value = b"value" 46 | assert await valkey._get(Keys.KEY) == "value" 47 | valkey.client.get.assert_called_with(Keys.KEY) 48 | 49 | async def test_gets(self, valkey): 50 | await valkey._gets(Keys.KEY) 51 | valkey.client.get.assert_called_with(Keys.KEY) 52 | 53 | async def test_set(self, valkey): 54 | await valkey._set(Keys.KEY, "value") 55 | valkey.client.set.assert_called_with(Keys.KEY, "value", expiry=None) 56 | 57 | await valkey._set(Keys.KEY, "value", ttl=1) 58 | valkey.client.set.assert_called_with( 59 | Keys.KEY, "value", expiry=ExpirySet(ExpiryType.SEC, 1) 60 | ) 61 | 62 | async def test_set_cas_token(self, mocker, valkey): 63 | mocker.patch.object(valkey, "_cas") 64 | await valkey._set( 65 | Keys.KEY, "value", _cas_token="old_value", _conn=valkey.client 66 | ) 67 | valkey._cas.assert_called_with( 68 | Keys.KEY, "value", "old_value", ttl=None, _conn=valkey.client 69 | ) 70 | 71 | async def test_set_cas_token_ttl(self, mocker, valkey): 72 | mocker.patch.object(valkey, "_cas") 73 | await valkey._set( 74 | Keys.KEY, "value", ttl=1, _cas_token="old_value", _conn=valkey.client 75 | ) 76 | valkey._cas.assert_called_with( 77 | Keys.KEY, 78 | "value", 79 | "old_value", 80 | ttl=ExpirySet(ExpiryType.SEC, 1), 81 | _conn=valkey.client, 82 | ) 83 | 84 | async def test_set_cas_token_float_ttl(self, mocker, valkey): 85 | mocker.patch.object(valkey, "_cas") 86 | await valkey._set( 87 | Keys.KEY, "value", ttl=1.1, _cas_token="old_value", _conn=valkey.client 88 | ) 89 | valkey._cas.assert_called_with( 90 | Keys.KEY, 91 | "value", 92 | "old_value", 93 | ttl=ExpirySet(ExpiryType.MILLSEC, 1100), 94 | _conn=valkey.client, 95 | ) 96 | 97 | async def test_cas(self, mocker, valkey): 98 | mocker.spy(valkey, "_get") 99 | mocker.spy(valkey, "_cas") 100 | await valkey._cas(Keys.KEY, "value", "old_value", ttl=10, _conn=valkey.client) 101 | valkey._get.assert_called_with(Keys.KEY) 102 | assert valkey._cas.spy_return == 0 103 | 104 | async def test_cas_float_ttl(self, mocker, valkey): 105 | spy = mocker.spy(valkey, "_get") 106 | await valkey._cas(Keys.KEY, "value", "old_value", ttl=0.1, _conn=valkey.client) 107 | spy.assert_called_with(Keys.KEY) 108 | mocker.stop(spy) 109 | mock = mocker.patch.object(valkey, "_get", return_value="old_value") 110 | await valkey._cas(Keys.KEY, "value", "old_value", ttl=0.1, _conn=valkey.client) 111 | mock.assert_called_once() 112 | valkey.client.set.assert_called_with(Keys.KEY, "value", expiry=0.1) 113 | 114 | async def test_multi_get(self, valkey): 115 | await valkey._multi_get([Keys.KEY, Keys.KEY_1]) 116 | valkey.client.mget.assert_called_with([Keys.KEY, Keys.KEY_1]) 117 | 118 | async def test_multi_set(self, valkey): 119 | await valkey._multi_set([(Keys.KEY, "value"), (Keys.KEY_1, "random")]) 120 | valkey.client.mset.assert_called_with({Keys.KEY: "value", Keys.KEY_1: "random"}) 121 | 122 | async def test_multi_set_with_ttl(self, valkey, mocker): 123 | mock_mset = mocker.patch.object(Batch, "mset") 124 | mock_expire = mocker.patch.object(Batch, "expire") 125 | await valkey._multi_set([(Keys.KEY, "value"), (Keys.KEY_1, "random")], ttl=1) 126 | 127 | valkey.client.exec.assert_called() 128 | 129 | assert mock_mset.call_count == 1 130 | assert mock_expire.call_count == 2 131 | mock_expire.assert_any_call(Keys.KEY, 1) 132 | mock_expire.assert_any_call(Keys.KEY_1, 1) 133 | 134 | async def test_add(self, valkey): 135 | await valkey._add(Keys.KEY, "value") 136 | valkey.client.set.assert_called_with( 137 | Keys.KEY, "value", conditional_set=ConditionalChange.ONLY_IF_DOES_NOT_EXIST 138 | ) 139 | 140 | await valkey._add(Keys.KEY, "value", 1) 141 | valkey.client.set.assert_called_with( 142 | Keys.KEY, 143 | "value", 144 | conditional_set=ConditionalChange.ONLY_IF_DOES_NOT_EXIST, 145 | expiry=ExpirySet(ExpiryType.SEC, 1), 146 | ) 147 | 148 | async def test_add_existing(self, valkey): 149 | valkey.client.set.return_value = False 150 | with pytest.raises(ValueError): 151 | await valkey._add(Keys.KEY, "value") 152 | 153 | async def test_add_float_ttl(self, valkey): 154 | await valkey._add(Keys.KEY, "value", 0.1) 155 | assert valkey.client.set.call_args.args[0] == Keys.KEY 156 | assert ( 157 | valkey.client.set.call_args.kwargs["conditional_set"] 158 | == ConditionalChange.ONLY_IF_DOES_NOT_EXIST 159 | ) 160 | assert ( 161 | valkey.client.set.call_args.kwargs["expiry"].get_cmd_args() 162 | == ExpirySet(ExpiryType.MILLSEC, 100).get_cmd_args() 163 | ) 164 | 165 | async def test_exists(self, valkey): 166 | valkey.client.exists.return_value = 1 167 | await valkey._exists(Keys.KEY) 168 | valkey.client.exists.assert_called_with([Keys.KEY]) 169 | 170 | async def test_increment(self, valkey): 171 | await valkey._increment(Keys.KEY, delta=2) 172 | valkey.client.incrby.assert_called_with(Keys.KEY, 2) 173 | 174 | async def test_increment_typerror(self, valkey): 175 | valkey.client.incrby.side_effect = RequestError("msg") 176 | with pytest.raises(TypeError): 177 | await valkey._increment(Keys.KEY, delta=2) 178 | valkey.client.incrby.assert_called_with(Keys.KEY, 2) 179 | 180 | async def test_expire(self, valkey): 181 | await valkey._expire(Keys.KEY, 1) 182 | valkey.client.expire.assert_called_with(Keys.KEY, 1) 183 | await valkey._increment(Keys.KEY, 2) 184 | 185 | async def test_expire_0_ttl(self, valkey): 186 | await valkey._expire(Keys.KEY, ttl=0) 187 | valkey.client.persist.assert_called_with(Keys.KEY) 188 | 189 | async def test_delete(self, valkey): 190 | await valkey._delete(Keys.KEY) 191 | valkey.client.delete.assert_called_with([Keys.KEY]) 192 | 193 | async def test_clear(self, valkey): 194 | valkey.client.scan.return_value = [b"0", ["nm:a", "nm:b"]] 195 | await valkey._clear("nm") 196 | valkey.client.delete.assert_called_with(["nm:a", "nm:b"]) 197 | 198 | async def test_clear_no_keys(self, valkey): 199 | valkey.client.scan.return_value = [b"0", []] 200 | await valkey._clear("nm") 201 | valkey.client.delete.assert_not_called() 202 | 203 | async def test_clear_no_namespace(self, valkey): 204 | await valkey._clear() 205 | assert valkey.client.flushdb.call_count == 1 206 | 207 | async def test_redlock_release(self, mocker, valkey): 208 | mocker.patch.object(valkey, "_get", return_value="random") 209 | await valkey._redlock_release(Keys.KEY, "random") 210 | valkey._get.assert_called_once_with(Keys.KEY) 211 | valkey.client.delete.assert_called_once_with([Keys.KEY]) 212 | 213 | def test_name(self): 214 | assert ValkeyCache.NAME == "valkey" 215 | 216 | def test_inheritance(self, valkey_config): 217 | assert isinstance(ValkeyCache(config=valkey_config), BaseCache) 218 | 219 | def test_default_serializer(self, valkey_config): 220 | assert isinstance(ValkeyCache(config=valkey_config).serializer, JsonSerializer) 221 | 222 | @pytest.mark.parametrize( 223 | "path,expected", 224 | (("", {}), ("/", {}), ("/1", {"db": "1"}), ("/1/2/3", {"db": "1"})), 225 | ) 226 | def test_parse_uri_path(self, path, expected, valkey_config): 227 | assert ValkeyCache(config=valkey_config).parse_uri_path(path) == expected 228 | 229 | @pytest.mark.parametrize( 230 | "namespace, expected", 231 | ( 232 | (None, "test:" + ensure_key(Keys.KEY)), 233 | ("", ensure_key(Keys.KEY)), 234 | ("my_ns", "my_ns:" + ensure_key(Keys.KEY)), 235 | ), 236 | ) 237 | def test_build_key_double_dot( 238 | self, set_test_namespace, valkey_cache, namespace, expected 239 | ): 240 | assert valkey_cache.build_key(Keys.KEY, namespace) == expected 241 | 242 | def test_build_key_no_namespace(self, valkey_cache): 243 | assert valkey_cache.build_key(Keys.KEY, namespace=None) == Keys.KEY 244 | 245 | async def test_custom_serializer(self, valkey_config): 246 | value = {"one": {"nested": "1"}, "two": 2} 247 | serialized = pickle.dumps(value) 248 | 249 | async with ValkeyCache(config=valkey_config, serializer=PickleSerializer()) as vc: 250 | assert isinstance(vc.serializer, PickleSerializer) 251 | await vc.set(Keys.KEY, value) 252 | assert await vc.get(Keys.KEY) == pickle.loads(serialized) 253 | 254 | async def test_default_key_builder(self, valkey_config): 255 | # use .value in this test. see: https://github.com/python/cpython/issues/100458 256 | async with ValkeyCache(config=valkey_config) as default: 257 | await default.set(Keys.KEY.value, "value", namespace="namespace") 258 | assert await default.client.exists(["namespace:key"]) 259 | 260 | async def test_custom_key_builder(self, valkey_config): 261 | async with ValkeyCache( 262 | config=valkey_config, key_builder=lambda k, ns: f"{ns}__{k}" if ns else k 263 | ) as vc: 264 | await vc.set(Keys.KEY.value, "value", namespace="namespace") 265 | assert await vc.get(Keys.KEY.value, namespace="namespace") == "value" 266 | assert await vc.client.exists(["namespace__key"]) 267 | 268 | async def test_raw(self, valkey_config): 269 | async with ValkeyCache(config=valkey_config) as cache: 270 | set_v = await cache.raw("set", Keys.KEY, "Any") 271 | get_v = await cache.raw("get", Keys.KEY) 272 | assert set_v == "OK" 273 | assert get_v == "Any" 274 | -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | # 4 | # aiocache documentation build configuration file, created by 5 | # sphinx-quickstart on Sat Oct 1 16:53:45 2016. 6 | # 7 | # This file is execfile()d with the current directory set to its 8 | # containing dir. 9 | # 10 | # Note that not all possible configuration values are present in this 11 | # autogenerated file. 12 | # 13 | # All configuration values have a default; values that are commented out 14 | # serve to show the default. 15 | 16 | # If extensions (or modules to document with autodoc) are in another directory, 17 | # add these directories to sys.path here. If the directory is relative to the 18 | # documentation root, use os.path.abspath to make it absolute, like shown here. 19 | # 20 | import re 21 | import os 22 | import sys 23 | from pathlib import Path 24 | 25 | sys.path.insert(0, os.path.abspath("..")) 26 | sys.path.insert(0, os.path.abspath(".")) 27 | 28 | # -- General configuration ------------------------------------------------ 29 | 30 | # If your documentation needs a minimal Sphinx version, state it here. 31 | # 32 | # needs_sphinx = '1.0' 33 | 34 | # Add any Sphinx extension module names here, as strings. They can be 35 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 36 | # ones. 37 | extensions = [ 38 | "sphinx.ext.autodoc", 39 | "sphinx.ext.viewcode", 40 | ] 41 | 42 | # Add any paths that contain templates here, relative to this directory. 43 | templates_path = ["_templates"] 44 | 45 | # The suffix(es) of source filenames. 46 | # You can specify multiple suffix as a list of string: 47 | # 48 | # source_suffix = ['.rst', '.md'] 49 | source_suffix = ".rst" 50 | 51 | # The encoding of source files. 52 | # 53 | # source_encoding = 'utf-8-sig' 54 | 55 | # The master toctree document. 56 | master_doc = "index" 57 | 58 | # General information about the project. 59 | project = "aiocache" 60 | copyright = "2016, Manuel Miranda" 61 | author = "Manuel Miranda" 62 | 63 | # The version info for the project you're documenting, acts as replacement for 64 | # |version| and |release|, also used in various other places throughout the 65 | # built documents. 66 | # 67 | 68 | _path = Path(__file__).parent.parent / "aiocache/__init__.py" 69 | try: 70 | version = re.findall(r'__version__ = "(.+?)"', _path.read_text())[0] 71 | release = version 72 | except IndexError: 73 | raise RuntimeError("Unable to determine version.") 74 | 75 | # The language for content autogenerated by Sphinx. Refer to documentation 76 | # for a list of supported languages. 77 | # 78 | # This is also used if you do content translation via gettext catalogs. 79 | # Usually you set "language" from the command line for these cases. 80 | language = None 81 | 82 | # There are two options for replacing |today|: either, you set today to some 83 | # non-false value, then it is used: 84 | # 85 | # today = '' 86 | # 87 | # Else, today_fmt is used as the format for a strftime call. 88 | # 89 | # today_fmt = '%B %d, %Y' 90 | 91 | # List of patterns, relative to source directory, that match files and 92 | # directories to ignore when looking for source files. 93 | # This patterns also effect to html_static_path and html_extra_path 94 | exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"] 95 | 96 | # The reST default role (used for this markup: `text`) to use for all 97 | # documents. 98 | # 99 | # default_role = None 100 | 101 | # If true, '()' will be appended to :func: etc. cross-reference text. 102 | # 103 | # add_function_parentheses = True 104 | 105 | # If true, the current module name will be prepended to all description 106 | # unit titles (such as .. function::). 107 | # 108 | # add_module_names = True 109 | 110 | # If true, sectionauthor and moduleauthor directives will be shown in the 111 | # output. They are ignored by default. 112 | # 113 | # show_authors = False 114 | 115 | # The name of the Pygments (syntax highlighting) style to use. 116 | pygments_style = "sphinx" 117 | 118 | # A list of ignored prefixes for module index sorting. 119 | # modindex_common_prefix = [] 120 | 121 | # If true, keep warnings as "system message" paragraphs in the built documents. 122 | # keep_warnings = False 123 | 124 | # If true, `todo` and `todoList` produce output, else they produce nothing. 125 | todo_include_todos = False 126 | 127 | 128 | # -- Options for HTML output ---------------------------------------------- 129 | 130 | # The theme to use for HTML and HTML Help pages. See the documentation for 131 | # a list of builtin themes. 132 | # 133 | html_theme = "default" 134 | on_rtd = os.environ.get("READTHEDOCS", None) == "True" 135 | if not on_rtd: 136 | import sphinx_rtd_theme 137 | html_theme = "sphinx_rtd_theme" 138 | html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] 139 | 140 | # Theme options are theme-specific and customize the look and feel of a theme 141 | # further. For a list of options available for each theme, see the 142 | # documentation. 143 | # 144 | # html_theme_options = {} 145 | 146 | # Add any paths that contain custom themes here, relative to this directory. 147 | # html_theme_path = [] 148 | 149 | # The name for this set of Sphinx documents. 150 | # " v documentation" by default. 151 | # 152 | # html_title = 'aiocache v0.0.1' 153 | 154 | # A shorter title for the navigation bar. Default is the same as html_title. 155 | # 156 | # html_short_title = None 157 | 158 | # The name of an image file (relative to this directory) to place at the top 159 | # of the sidebar. 160 | # 161 | # html_logo = None 162 | 163 | # The name of an image file (relative to this directory) to use as a favicon of 164 | # the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 165 | # pixels large. 166 | # 167 | # html_favicon = None 168 | 169 | # Add any paths that contain custom static files (such as style sheets) here, 170 | # relative to this directory. They are copied after the builtin static files, 171 | # so a file named "default.css" will overwrite the builtin "default.css". 172 | html_static_path = ["_static"] 173 | 174 | # Add any extra paths that contain custom files (such as robots.txt or 175 | # .htaccess) here, relative to this directory. These files are copied 176 | # directly to the root of the documentation. 177 | # 178 | # html_extra_path = [] 179 | 180 | # If not None, a 'Last updated on:' timestamp is inserted at every page 181 | # bottom, using the given strftime format. 182 | # The empty string is equivalent to '%b %d, %Y'. 183 | # 184 | # html_last_updated_fmt = None 185 | 186 | # If true, SmartyPants will be used to convert quotes and dashes to 187 | # typographically correct entities. 188 | # 189 | # html_use_smartypants = True 190 | 191 | # Custom sidebar templates, maps document names to template names. 192 | # 193 | # html_sidebars = {} 194 | 195 | # Additional templates that should be rendered to pages, maps page names to 196 | # template names. 197 | # 198 | # html_additional_pages = {} 199 | 200 | # If false, no module index is generated. 201 | # 202 | # html_domain_indices = True 203 | 204 | # If false, no index is generated. 205 | # 206 | # html_use_index = True 207 | 208 | # If true, the index is split into individual pages for each letter. 209 | # 210 | # html_split_index = False 211 | 212 | # If true, links to the reST sources are added to the pages. 213 | # 214 | # html_show_sourcelink = True 215 | 216 | # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. 217 | # 218 | # html_show_sphinx = True 219 | 220 | # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. 221 | # 222 | # html_show_copyright = True 223 | 224 | # If true, an OpenSearch description file will be output, and all pages will 225 | # contain a tag referring to it. The value of this option must be the 226 | # base URL from which the finished HTML is served. 227 | # 228 | # html_use_opensearch = '' 229 | 230 | # This is the file name suffix for HTML files (e.g. ".xhtml"). 231 | # html_file_suffix = None 232 | 233 | # Language to be used for generating the HTML full-text search index. 234 | # Sphinx supports the following languages: 235 | # 'da', 'de', 'en', 'es', 'fi', 'fr', 'h', 'it', 'ja' 236 | # 'nl', 'no', 'pt', 'ro', 'r', 'sv', 'tr', 'zh' 237 | # 238 | # html_search_language = 'en' 239 | 240 | # A dictionary with options for the search language support, empty by default. 241 | # 'ja' uses this config value. 242 | # 'zh' user can custom change `jieba` dictionary path. 243 | # 244 | # html_search_options = {'type': 'default'} 245 | 246 | # The name of a javascript file (relative to the configuration directory) that 247 | # implements a search results scorer. If empty, the default will be used. 248 | # 249 | # html_search_scorer = 'scorer.js' 250 | 251 | # Output file base name for HTML help builder. 252 | htmlhelp_basename = "aiocachedoc" 253 | 254 | # -- Options for LaTeX output --------------------------------------------- 255 | 256 | latex_elements = { 257 | # The paper size ('letterpaper' or 'a4paper'). 258 | # 259 | # 'papersize': 'letterpaper', 260 | 261 | # The font size ('10pt', '11pt' or '12pt'). 262 | # 263 | # 'pointsize': '10pt', 264 | 265 | # Additional stuff for the LaTeX preamble. 266 | # 267 | # 'preamble': '', 268 | 269 | # Latex figure (float) alignment 270 | # 271 | # 'figure_align': 'htbp', 272 | } 273 | 274 | # Grouping the document tree into LaTeX files. List of tuples 275 | # (source start file, target name, title, 276 | # author, documentclass [howto, manual, or own class]). 277 | latex_documents = [ 278 | (master_doc, "aiocache.tex", "aiocache Documentation", "Manuel Miranda", "manual"), 279 | ] 280 | 281 | # The name of an image file (relative to this directory) to place at the top of 282 | # the title page. 283 | # 284 | # latex_logo = None 285 | 286 | # For "manual" documents, if this is true, then toplevel headings are parts, 287 | # not chapters. 288 | # 289 | # latex_use_parts = False 290 | 291 | # If true, show page references after internal links. 292 | # 293 | # latex_show_pagerefs = False 294 | 295 | # If true, show URL addresses after external links. 296 | # 297 | # latex_show_urls = False 298 | 299 | # Documents to append as an appendix to all manuals. 300 | # 301 | # latex_appendices = [] 302 | 303 | # It false, will not define \strong, \code, itleref, \crossref ... but only 304 | # \sphinxstrong, ..., \sphinxtitleref, ... To help avoid clash with user added 305 | # packages. 306 | # 307 | # latex_keep_old_macro_names = True 308 | 309 | # If false, no module index is generated. 310 | # 311 | # latex_domain_indices = True 312 | 313 | 314 | # -- Options for manual page output --------------------------------------- 315 | 316 | # One entry per manual page. List of tuples 317 | # (source start file, name, description, authors, manual section). 318 | man_pages = [(master_doc, "aiocache", "aiocache Documentation", [author], 1)] 319 | 320 | # If true, show URL addresses after external links. 321 | # 322 | # man_show_urls = False 323 | 324 | 325 | # -- Options for Texinfo output ------------------------------------------- 326 | 327 | # Grouping the document tree into Texinfo files. List of tuples 328 | # (source start file, target name, title, author, 329 | # dir menu entry, description, category) 330 | texinfo_documents = [ 331 | ( 332 | master_doc, 333 | "aiocache", 334 | "aiocache Documentation", 335 | author, 336 | "aiocache", 337 | "One line description of project.", 338 | "Miscellaneous" 339 | ), 340 | ] 341 | 342 | # Documents to append as an appendix to all manuals. 343 | # 344 | # texinfo_appendices = [] 345 | 346 | # If false, no module index is generated. 347 | # 348 | # texinfo_domain_indices = True 349 | 350 | # How to display URL addresses: 'footnote', 'no', or 'inline'. 351 | # 352 | # texinfo_show_urls = 'footnote' 353 | 354 | # If true, do not generate a @detailmenu in the "Top" node's menu. 355 | # 356 | # texinfo_no_detailmenu = False 357 | --------------------------------------------------------------------------------