├── .codacy.yml ├── .codecov.yml ├── .coveragerc ├── .flake8 ├── .gitchangelog.rc ├── .github ├── dependabot.yml └── workflows │ ├── auto-merge.yml │ ├── ci.yml │ └── codeql.yml ├── .gitignore ├── .mypy.ini ├── .pre-commit-config.yaml ├── .readthedocs.yml ├── .release_notes.tpl ├── CHANGES.rst ├── CONTRIBUTING.rst ├── LICENSE ├── MANIFEST.in ├── Makefile ├── README.rst ├── aiocache ├── __init__.py ├── backends │ ├── __init__.py │ ├── memcached.py │ ├── memory.py │ └── redis.py ├── base.py ├── decorators.py ├── exceptions.py ├── lock.py ├── plugins.py ├── py.typed └── serializers │ ├── __init__.py │ └── serializers.py ├── docker-compose.yml ├── docs ├── Makefile ├── caches.rst ├── conf.py ├── decorators.rst ├── images │ ├── architecture.png │ └── set_operation_flow.png ├── index.rst ├── locking.rst ├── plugins.rst ├── readthedocs.yml ├── serializers.rst ├── testing.rst └── v1_migration.rst ├── examples ├── alt_key_builder.py ├── cached_decorator.py ├── frameworks │ ├── aiohttp_example.py │ ├── sanic_example.py │ └── tornado_example.py ├── marshmallow_serializer_class.py ├── multicached_decorator.py ├── optimistic_lock.py ├── plugins.py ├── python_object.py ├── redlock.py ├── run_all.sh ├── serializer_class.py ├── serializer_function.py ├── simple_redis.py └── testing.py ├── pyproject.toml ├── requirements-dev.txt ├── requirements.txt ├── scripts └── make_release ├── setup.cfg ├── setup.py └── tests ├── __init__.py ├── acceptance ├── __init__.py ├── conftest.py ├── test_base.py ├── test_decorators.py ├── test_lock.py ├── test_plugins.py └── test_serializers.py ├── conftest.py ├── performance ├── __init__.py ├── conftest.py ├── server.py ├── test_concurrency.py └── test_footprint.py ├── ut ├── __init__.py ├── backends │ ├── __init__.py │ ├── test_memcached.py │ ├── test_memory.py │ └── test_redis.py ├── conftest.py ├── test_base.py ├── test_decorators.py ├── test_exceptions.py ├── test_lock.py ├── test_plugins.py └── test_serializers.py └── utils.py /.codacy.yml: -------------------------------------------------------------------------------- 1 | exclude_paths: 2 | - tests/** 3 | - examples/** 4 | -------------------------------------------------------------------------------- /.codecov.yml: -------------------------------------------------------------------------------- 1 | codecov: 2 | notify: 3 | after_n_builds: 4 4 | 5 | coverage: 6 | status: 7 | patch: no 8 | changes: no 9 | 10 | comment: 11 | layout: "reach, diff, flags, files, footer" 12 | behavior: default 13 | require_changes: yes 14 | -------------------------------------------------------------------------------- /.coveragerc: -------------------------------------------------------------------------------- 1 | [run] 2 | branch = True 3 | concurrency = multiprocessing 4 | -------------------------------------------------------------------------------- /.flake8: -------------------------------------------------------------------------------- 1 | [flake8] 2 | enable-extensions = G 3 | max-doc-length = 90 4 | max-line-length = 90 5 | select = A,B,C,D,E,F,G,H,I,J,K,L,M,N,O,P,Q,R,S,T,U,V,W,X,Y,Z,B901,B902,B903,B950 6 | # E226: Missing whitespace around arithmetic operators can help group things together. 7 | # E501,W505: Superseeded by B950 (from Bugbear) 8 | # E722: Superseeded by B001 (from Bugbear) 9 | # W503: Mutually exclusive with W504. 10 | ignore = E226,E501,E722,W503,W505 11 | per-file-ignores = 12 | # S*: Bandit security checks not useful in tests. 13 | tests/*:S 14 | 15 | # flake8-import-order 16 | application-import-names = aiocache 17 | import-order-style = pycharm 18 | 19 | # flake8-quotes 20 | inline-quotes = " 21 | # flake8-requirements 22 | requirements-file = requirements-dev.txt 23 | -------------------------------------------------------------------------------- /.gitchangelog.rc: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8; mode: python -*- 2 | ## 3 | ## Format 4 | ## 5 | ## ACTION: [AUDIENCE:] COMMIT_MSG [!TAG ...] 6 | ## 7 | ## Description 8 | ## 9 | ## ACTION is one of 'chg', 'fix', 'new' 10 | ## 11 | ## Is WHAT the change is about. 12 | ## 13 | ## 'chg' is for refactor, small improvement, cosmetic changes... 14 | ## 'fix' is for bug fixes 15 | ## 'new' is for new features, big improvement 16 | ## 17 | ## AUDIENCE is optional and one of 'dev', 'usr', 'pkg', 'test', 'doc' 18 | ## 19 | ## Is WHO is concerned by the change. 20 | ## 21 | ## 'dev' is for developpers (API changes, refactors...) 22 | ## 'usr' is for final users (UI changes) 23 | ## 'pkg' is for packagers (packaging changes) 24 | ## 'test' is for testers (test only related changes) 25 | ## 'doc' is for doc guys (doc only changes) 26 | ## 27 | ## COMMIT_MSG is ... well ... the commit message itself. 28 | ## 29 | ## TAGs are additionnal adjective as 'refactor' 'minor' 'cosmetic' 30 | ## 31 | ## They are preceded with a '!' or a '@' (prefer the former, as the 32 | ## latter is wrongly interpreted in github.) Commonly used tags are: 33 | ## 34 | ## 'refactor' is obviously for refactoring code only 35 | ## 'minor' is for a very meaningless change (a typo, adding a comment) 36 | ## 'cosmetic' is for cosmetic driven change (re-indentation, 80-col...) 37 | ## 'wip' is for partial functionality but complete subfunctionality. 38 | ## 39 | ## Example: 40 | ## 41 | ## new: usr: support of bazaar implemented 42 | ## chg: re-indentend some lines !cosmetic 43 | ## new: dev: updated code to be compatible with last version of killer lib. 44 | ## fix: pkg: updated year of licence coverage. 45 | ## new: test: added a bunch of test around user usability of feature X. 46 | ## fix: typo in spelling my name in comment. !minor 47 | ## 48 | ## Please note that multi-line commit message are supported, and only the 49 | ## first line will be considered as the "summary" of the commit message. So 50 | ## tags, and other rules only applies to the summary. The body of the commit 51 | ## message will be displayed in the changelog without reformatting. 52 | 53 | 54 | ## 55 | ## ``ignore_regexps`` is a line of regexps 56 | ## 57 | ## Any commit having its full commit message matching any regexp listed here 58 | ## will be ignored and won't be reported in the changelog. 59 | ## 60 | ignore_regexps = [ 61 | r':minor', 62 | r':cosmetic', 63 | r':refactor', 64 | r':docs', 65 | r':wip', 66 | r'Bump version|bump version|version bump', 67 | r'README', 68 | r'Update [\w-]+ from [0-9.]+ to [0-9]+', 69 | r'Pin [\w-]+ to latest version', 70 | r'^([cC]hg|[fF]ix|[nN]ew)\s*:\s*[p|P]kg:', 71 | r'^([cC]hg|[fF]ix|[nN]ew)\s*:\s*[d|D]ev:', 72 | r'^(.{3,3}\s*:)?\s*[fF]irst commit.?\s*$', 73 | r'^$', ## ignore commits with empty messages 74 | ] 75 | 76 | 77 | ## ``section_regexps`` is a list of 2-tuples associating a string label and a 78 | ## list of regexp 79 | ## 80 | ## Commit messages will be classified in sections thanks to this. Section 81 | ## titles are the label, and a commit is classified under this section if any 82 | ## of the regexps associated is matching. 83 | ## 84 | ## Please note that ``section_regexps`` will only classify commits and won't 85 | ## make any changes to the contents. So you'll probably want to go check 86 | ## ``subject_process`` (or ``body_process``) to do some changes to the subject, 87 | ## whenever you are tweaking this variable. 88 | ## 89 | section_regexps = [ 90 | ('New', [ 91 | r'^[nN]ew\s*:\s*((dev|use?r|pkg|test|doc)\s*:\s*)?([^\n]*)$', 92 | ]), 93 | ('Changes', [ 94 | r'^[cC]hg\s*:\s*((dev|use?r|pkg|test|doc)\s*:\s*)?([^\n]*)$', 95 | ]), 96 | ('Fix', [ 97 | r'^[fF]ix\s*:\s*((dev|use?r|pkg|test|doc)\s*:\s*)?([^\n]*)$', 98 | ]), 99 | 100 | ('Other', None ## Match all lines 101 | ), 102 | 103 | ] 104 | 105 | 106 | ## ``body_process`` is a callable 107 | ## 108 | ## This callable will be given the original body and result will 109 | ## be used in the changelog. 110 | ## 111 | ## Available constructs are: 112 | ## 113 | ## - any python callable that take one txt argument and return txt argument. 114 | ## 115 | ## - ReSub(pattern, replacement): will apply regexp substitution. 116 | ## 117 | ## - Indent(chars=" "): will indent the text with the prefix 118 | ## Please remember that template engines gets also to modify the text and 119 | ## will usually indent themselves the text if needed. 120 | ## 121 | ## - Wrap(regexp=r"\n\n"): re-wrap text in separate paragraph to fill 80-Columns 122 | ## 123 | ## - noop: do nothing 124 | ## 125 | ## - ucfirst: ensure the first letter is uppercase. 126 | ## (usually used in the ``subject_process`` pipeline) 127 | ## 128 | ## - final_dot: ensure text finishes with a dot 129 | ## (usually used in the ``subject_process`` pipeline) 130 | ## 131 | ## - strip: remove any spaces before or after the content of the string 132 | ## 133 | ## - SetIfEmpty(msg="No commit message."): will set the text to 134 | ## whatever given ``msg`` if the current text is empty. 135 | ## 136 | ## Additionally, you can `pipe` the provided filters, for instance: 137 | body_process = Wrap(regexp=r'\n(?=\w+\s*:)') 138 | # body_process = ReSub(r'.*', r'') | strip 139 | 140 | 141 | ## ``subject_process`` is a callable 142 | ## 143 | ## This callable will be given the original subject and result will 144 | ## be used in the changelog. 145 | ## 146 | ## Available constructs are those listed in ``body_process`` doc. 147 | ## subject_process = (strip | 148 | ## ReSub(r'^([cC]hg|[fF]ix|[nN]ew)\s*:\s*((dev|use?r|pkg|test|doc)\s*:\s*)?([^\n@]*)(@[a-z]+\s+)*$', r'\4') | 149 | ## SetIfEmpty("No commit message.") | ucfirst | final_dot) 150 | subject_process = (strip | 151 | ReSub(r'\(#([0-9]+)\)', r'[#\1](https://github.com/argaen/aiocache/issues/\1)') | 152 | ReSub(r'^[nN]ew\s*:\s*|^[cC]hg\s*:\s*|^[fF]ix\s*:\s*', r'') | 153 | ucfirst | final_dot) 154 | 155 | 156 | ## ``tag_filter_regexp`` is a regexp 157 | ## 158 | ## Tags that will be used for the changelog must match this regexp. 159 | ## 160 | tag_filter_regexp = r'^[0-9]+\.[0-9]+(\.[0-9]+)?$' 161 | 162 | 163 | ## ``unreleased_version_label`` is a string or a callable that outputs a string 164 | ## 165 | ## This label will be used as the changelog Title of the last set of changes 166 | ## between last valid tag and HEAD if any. 167 | unreleased_version_label = "Unreleased" 168 | 169 | 170 | ## ``output_engine`` is a callable 171 | ## 172 | ## This will change the output format of the generated changelog file 173 | ## 174 | ## Available choices are: 175 | ## 176 | ## - rest_py 177 | ## 178 | ## Legacy pure python engine, outputs ReSTructured text. 179 | ## This is the default. 180 | ## 181 | ## - mustache() 182 | ## 183 | ## Template name could be any of the available templates in 184 | ## ``templates/mustache/*.tpl``. 185 | ## Requires python package ``pystache``. 186 | ## Examples: 187 | ## - mustache("markdown") 188 | ## - mustache("restructuredtext") 189 | ## 190 | ## - makotemplate() 191 | ## 192 | ## Template name could be any of the available templates in 193 | ## ``templates/mako/*.tpl``. 194 | ## Requires python package ``mako``. 195 | ## Examples: 196 | ## - makotemplate("restructuredtext") 197 | ## 198 | output_engine = mustache(".release_notes.tpl") 199 | #output_engine = mustache("restructuredtext") 200 | #output_engine = mustache("markdown") 201 | #output_engine = makotemplate("restructuredtext") 202 | 203 | 204 | ## ``include_merge`` is a boolean 205 | ## 206 | ## This option tells git-log whether to include merge commits in the log. 207 | ## The default is to include them. 208 | include_merge = True 209 | 210 | 211 | ## ``log_encoding`` is a string identifier 212 | ## 213 | ## This option tells gitchangelog what encoding is outputed by ``git log``. 214 | ## The default is to be clever about it: it checks ``git config`` for 215 | ## ``i18n.logOutputEncoding``, and if not found will default to git's own 216 | ## default: ``utf-8``. 217 | #log_encoding = 'utf-8' 218 | 219 | 220 | ## ``publish`` is a callable 221 | ## 222 | ## Sets what ``gitchangelog`` should do with the output generated by 223 | ## the output engine. ``publish`` is a callable taking one argument 224 | ## that is an interator on lines from the output engine. 225 | ## 226 | ## Some helper callable are provided: 227 | ## 228 | ## Available choices are: 229 | ## 230 | ## - stdout 231 | ## 232 | ## Outputs directly to standard output 233 | ## (This is the default) 234 | ## 235 | ## - FileInsertAtFirstRegexMatch(file, pattern, idx=lamda m: m.start()) 236 | ## 237 | ## Creates a callable that will parse given file for the given 238 | ## regex pattern and will insert the output in the file. 239 | ## ``idx`` is a callable that receive the matching object and 240 | ## must return a integer index point where to insert the 241 | ## the output in the file. Default is to return the position of 242 | ## the start of the matched string. 243 | ## 244 | ## - FileRegexSubst(file, pattern, replace, flags) 245 | ## 246 | ## Apply a replace inplace in the given file. Your regex pattern must 247 | ## take care of everything and might be more complex. Check the README 248 | ## for a complete copy-pastable example. 249 | ## 250 | # publish = FileInsertAtFirstRegexMatch( 251 | # "CHANGELOG.rst", 252 | # r'Changelog\n=.+\n\n()', 253 | # idx=lambda m: m.start(1) 254 | # ) 255 | publish = stdout 256 | 257 | 258 | ## ``revs`` is a list of callable or a list of string 259 | ## 260 | ## callable will be called to resolve as strings and allow dynamical 261 | ## computation of these. The result will be used as revisions for 262 | ## gitchangelog (as if directly stated on the command line). This allows 263 | ## to filter exaclty which commits will be read by gitchangelog. 264 | ## 265 | ## To get a full documentation on the format of these strings, please 266 | ## refer to the ``git rev-list`` arguments. There are many examples. 267 | ## 268 | ## Using callables is especially useful, for instance, if you 269 | ## are using gitchangelog to generate incrementally your changelog. 270 | ## 271 | ## Some helpers are provided, you can use them:: 272 | ## 273 | ## - FileFirstRegexMatch(file, pattern): will return a callable that will 274 | ## return the first string match for the given pattern in the given file. 275 | ## If you use named sub-patterns in your regex pattern, it'll output only 276 | ## the string matching the regex pattern named "rev". 277 | ## 278 | ## - Caret(rev): will return the rev prefixed by a "^", which is a 279 | ## way to remove the given revision and all its ancestor. 280 | ## 281 | ## Please note that if you provide a rev-list on the command line, it'll 282 | ## replace this value (which will then be ignored). 283 | ## 284 | ## If empty, then ``gitchangelog`` will act as it had to generate a full 285 | ## changelog. 286 | ## 287 | ## The default is to use all commits to make the changelog. 288 | #revs = ["^1.0.3", ] 289 | #revs = [ 290 | # Caret( 291 | # FileFirstRegexMatch( 292 | # "CHANGELOG.rst", 293 | # r"(?P[0-9]+\.[0-9]+(\.[0-9]+)?)\s+\([0-9]+-[0-9]{2}-[0-9]{2}\)\n--+\n")), 294 | # "HEAD" 295 | #] 296 | revs = [] 297 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: pip 4 | directory: "/" 5 | schedule: 6 | interval: daily 7 | 8 | - package-ecosystem: "github-actions" 9 | directory: "/" 10 | schedule: 11 | interval: "monthly" 12 | -------------------------------------------------------------------------------- /.github/workflows/auto-merge.yml: -------------------------------------------------------------------------------- 1 | name: Dependabot auto-merge 2 | on: pull_request_target 3 | 4 | permissions: 5 | pull-requests: write 6 | contents: write 7 | 8 | jobs: 9 | dependabot: 10 | runs-on: ubuntu-latest 11 | if: ${{ github.actor == 'dependabot[bot]' }} 12 | steps: 13 | - name: Dependabot metadata 14 | id: metadata 15 | uses: dependabot/fetch-metadata@v2.4.0 16 | with: 17 | github-token: "${{ secrets.GITHUB_TOKEN }}" 18 | - name: Enable auto-merge for Dependabot PRs 19 | run: gh pr merge --auto --squash "$PR_URL" 20 | env: 21 | PR_URL: ${{github.event.pull_request.html_url}} 22 | GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}} 23 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: CI 2 | 3 | on: 4 | push: 5 | branches: 6 | - master 7 | - '[0-9].[0-9]+' # matches to backport branches, e.g. 3.6 8 | tags: [ 'v*' ] 9 | pull_request: 10 | branches: 11 | - master 12 | - '[0-9].[0-9]+' 13 | 14 | jobs: 15 | lint: 16 | name: Linter 17 | runs-on: ubuntu-latest 18 | timeout-minutes: 10 19 | steps: 20 | - name: Checkout 21 | uses: actions/checkout@v4 22 | - name: Setup Python 23 | uses: actions/setup-python@v5 24 | with: 25 | python-version: 3.13 26 | cache: 'pip' 27 | cache-dependency-path: '**/requirements*.txt' 28 | - name: Pre-Commit hooks 29 | uses: pre-commit/action@v3.0.1 30 | - name: Install dependencies 31 | uses: py-actions/py-dependency-install@v4 32 | with: 33 | path: requirements-dev.txt 34 | - name: Install itself 35 | run: | 36 | pip install . 37 | - name: Run linter 38 | run: | 39 | make lint 40 | - name: Prepare twine checker 41 | run: | 42 | pip install -U build twine wheel 43 | python -m build 44 | - name: Run twine checker 45 | run: | 46 | twine check dist/* 47 | 48 | test: 49 | name: Test 50 | strategy: 51 | matrix: 52 | os: [ubuntu] 53 | pyver: ['3.9', '3.10', '3.11', '3.12', '3.13'] 54 | redis: ['latest'] 55 | ujson: [''] 56 | include: 57 | - os: ubuntu 58 | pyver: pypy-3.9 59 | redis: 'latest' 60 | - os: ubuntu 61 | pyver: '3.9' 62 | redis: '5.0.14' 63 | - os: ubuntu 64 | pyver: '3.9' 65 | redis: 'latest' 66 | ujson: 'ujson' 67 | services: 68 | redis: 69 | image: redis:${{ matrix.redis }} 70 | ports: 71 | - 6379:6379 72 | options: >- 73 | --health-cmd "redis-cli ping" 74 | --health-interval 10s 75 | --health-timeout 5s 76 | --health-retries 5 77 | memcached: 78 | image: memcached 79 | ports: 80 | - 11211:11211 81 | runs-on: ${{ matrix.os }}-latest 82 | timeout-minutes: 15 83 | steps: 84 | - name: Checkout 85 | uses: actions/checkout@v4 86 | - name: Setup Python ${{ matrix.pyver }} 87 | uses: actions/setup-python@v5 88 | with: 89 | python-version: ${{ matrix.pyver }} 90 | allow-prereleases: true 91 | cache: 'pip' 92 | cache-dependency-path: '**/requirements*.txt' 93 | - name: Install ujson 94 | if: ${{ matrix.ujson == 'ujson' }} 95 | run: pip install ujson 96 | - name: Install dependencies 97 | uses: py-actions/py-dependency-install@v4 98 | with: 99 | path: requirements.txt 100 | - name: Run unittests 101 | env: 102 | COLOR: 'yes' 103 | run: pytest tests --cov-report xml --cov-report html 104 | - name: Run functional tests 105 | run: bash examples/run_all.sh 106 | - name: Uninstall optional backends 107 | run: pip uninstall -y aiomcache redis 108 | - name: Run unittests with minimal backend set 109 | env: 110 | COLOR: 'yes' 111 | run: | 112 | pytest --cov-report xml --cov-report html --cov-append tests/acceptance tests/ut -m "not memcached and not redis" --ignore "tests/ut/backends/test_memcached.py" --ignore "tests/ut/backends/test_redis.py" 113 | - name: Produce coverage report 114 | run: python -m coverage xml 115 | - name: Upload coverage 116 | uses: codecov/codecov-action@v5 117 | with: 118 | fail_ci_if_error: true 119 | token: ${{ secrets.CODECOV_TOKEN }} 120 | 121 | check: # This job does nothing and is only used for the branch protection 122 | if: always() 123 | 124 | needs: [lint, test] 125 | 126 | runs-on: ubuntu-latest 127 | 128 | steps: 129 | - name: Decide whether the needed jobs succeeded or failed 130 | uses: re-actors/alls-green@release/v1 131 | with: 132 | jobs: ${{ toJSON(needs) }} 133 | 134 | deploy: 135 | name: Deploy 136 | environment: release 137 | if: github.event_name == 'push' && contains(github.ref, 'refs/tags/') 138 | needs: [check] 139 | runs-on: ubuntu-latest 140 | steps: 141 | - name: Checkout 142 | uses: actions/checkout@v4 143 | - name: Update pip, wheel, setuptools, build, twine 144 | run: | 145 | python -m pip install -U pip wheel setuptools build twine 146 | - name: Build dists 147 | run: | 148 | python -m build 149 | - name: Make Release 150 | uses: aio-libs/create-release@v1.6.6 151 | with: 152 | changes_file: CHANGES.rst 153 | name: aiocache 154 | version_file: aiocache/__init__.py 155 | github_token: ${{ secrets.GITHUB_TOKEN }} 156 | pypi_token: ${{ secrets.PYPI_API_TOKEN }} 157 | dist_dir: dist 158 | fix_issue_regex: "`#(\\d+) `" 159 | fix_issue_repl: "(#\\1)" 160 | -------------------------------------------------------------------------------- /.github/workflows/codeql.yml: -------------------------------------------------------------------------------- 1 | name: "CodeQL" 2 | 3 | on: 4 | push: 5 | branches: [ "master" ] 6 | pull_request: 7 | branches: [ "master" ] 8 | schedule: 9 | - cron: "28 18 * * 3" 10 | 11 | jobs: 12 | analyze: 13 | name: Analyze 14 | runs-on: ubuntu-latest 15 | permissions: 16 | actions: read 17 | contents: read 18 | security-events: write 19 | 20 | strategy: 21 | fail-fast: false 22 | matrix: 23 | language: [ python ] 24 | 25 | steps: 26 | - name: Checkout 27 | uses: actions/checkout@v4 28 | 29 | - name: Initialize CodeQL 30 | uses: github/codeql-action/init@v3 31 | with: 32 | languages: ${{ matrix.language }} 33 | queries: +security-and-quality 34 | 35 | - name: Autobuild 36 | uses: github/codeql-action/autobuild@v3 37 | 38 | - name: Perform CodeQL Analysis 39 | uses: github/codeql-action/analyze@v3 40 | with: 41 | category: "/language:${{ matrix.language }}" 42 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | env/ 12 | build/ 13 | develop-eggs/ 14 | dist/ 15 | downloads/ 16 | eggs/ 17 | .eggs/ 18 | lib/ 19 | lib64/ 20 | parts/ 21 | sdist/ 22 | var/ 23 | *.egg-info/ 24 | .installed.cfg 25 | *.egg 26 | 27 | # PyInstaller 28 | # Usually these files are written by a python script from a template 29 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 30 | *.manifest 31 | *.spec 32 | 33 | # Installer logs 34 | pip-log.txt 35 | pip-delete-this-directory.txt 36 | 37 | # Unit test / coverage reports 38 | htmlcov/ 39 | .tox/ 40 | .coverage 41 | .coverage.* 42 | .cache 43 | nosetests.xml 44 | coverage.xml 45 | *,cover 46 | .hypothesis/ 47 | 48 | # Translations 49 | *.mo 50 | *.pot 51 | 52 | # Django stuff: 53 | *.log 54 | local_settings.py 55 | 56 | # Flask stuff: 57 | instance/ 58 | .webassets-cache 59 | 60 | # Scrapy stuff: 61 | .scrapy 62 | 63 | # Sphinx documentation 64 | docs/_build/ 65 | docs/_build_html/ 66 | 67 | # PyBuilder 68 | target/ 69 | 70 | # IPython Notebook 71 | .ipynb_checkpoints 72 | 73 | # pyenv 74 | .python-version 75 | 76 | # celery beat schedule file 77 | celerybeat-schedule 78 | 79 | # dotenv 80 | .env 81 | 82 | # virtualenv 83 | venv/ 84 | ENV/ 85 | 86 | # Spyder project settings 87 | .spyderproject 88 | 89 | # Rope project settings 90 | .ropeproject 91 | 92 | dump.rdb 93 | _release_notes 94 | 95 | tags 96 | .mypy_cache/ 97 | .pytest_cache/ 98 | -------------------------------------------------------------------------------- /.mypy.ini: -------------------------------------------------------------------------------- 1 | [mypy] 2 | files = aiocache, examples, tests 3 | #check_untyped_defs = True 4 | follow_imports_for_stubs = True 5 | #disallow_any_decorated = True 6 | disallow_any_generics = True 7 | disallow_incomplete_defs = True 8 | disallow_subclassing_any = True 9 | #disallow_untyped_calls = True 10 | disallow_untyped_decorators = True 11 | #disallow_untyped_defs = True 12 | implicit_reexport = False 13 | no_implicit_optional = True 14 | show_error_codes = True 15 | strict_equality = True 16 | warn_incomplete_stub = True 17 | warn_redundant_casts = True 18 | warn_unreachable = True 19 | warn_unused_ignores = True 20 | disallow_any_unimported = True 21 | #warn_return_any = True 22 | 23 | [mypy-tests.*] 24 | disallow_any_decorated = False 25 | disallow_untyped_calls = False 26 | disallow_untyped_defs = False 27 | 28 | 29 | [mypy-msgpack.*] 30 | ignore_missing_imports = True 31 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | # See https://pre-commit.com for more information 2 | # See https://pre-commit.com/hooks.html for more hooks 3 | repos: 4 | - repo: https://github.com/pre-commit/pre-commit-hooks 5 | rev: v5.0.0 6 | hooks: 7 | - id: trailing-whitespace 8 | - id: end-of-file-fixer 9 | - id: check-yaml 10 | - id: check-added-large-files 11 | - repo: https://github.com/PyCQA/flake8 12 | rev: '7.1.1' 13 | hooks: 14 | - id: flake8 15 | exclude: "^docs/" 16 | -------------------------------------------------------------------------------- /.readthedocs.yml: -------------------------------------------------------------------------------- 1 | # Read the Docs configuration file 2 | # See https://docs.readthedocs.io/en/stable/config-file/v2.html 3 | # for details 4 | 5 | --- 6 | version: 2 7 | 8 | sphinx: 9 | configuration: docs/conf.py 10 | 11 | submodules: 12 | include: all 13 | exclude: [] 14 | recursive: true 15 | 16 | build: 17 | os: ubuntu-24.04 18 | tools: 19 | python: "3.12" 20 | apt_packages: 21 | - graphviz 22 | 23 | jobs: 24 | post_create_environment: 25 | - pip install -r requirements-dev.txt 26 | 27 | ... 28 | -------------------------------------------------------------------------------- /.release_notes.tpl: -------------------------------------------------------------------------------- 1 | {{#general_title}} 2 | # {{{title}}} 3 | 4 | 5 | {{/general_title}} 6 | {{#versions}} 7 | ## {{{label}}} 8 | 9 | {{#sections}} 10 | #### {{{label}}} 11 | 12 | {{#commits}} 13 | * {{{subject}}} - {{{author}}} 14 | {{#body}} 15 | _{{{body}}}_ 16 | {{/body}} 17 | 18 | {{/commits}} 19 | {{/sections}} 20 | 21 | 22 | {{/versions}} 23 | -------------------------------------------------------------------------------- /CONTRIBUTING.rst: -------------------------------------------------------------------------------- 1 | Contributing 2 | ============ 3 | 4 | #. Clone the repository with ``git clone git@github.com:argaen/aiocache.git`` 5 | #. Install dependencies with ``make install-dev`` 6 | #. Make a change (means writing code, tests without reducing coverage and docs) 7 | #. Ensure syntax is correct with ``make lint``. If there are errors, you can format the code with ``make format`` 8 | #. Ensure all tests pass with ``make test``. For fast iterations, use ``make unit`` which will build just the unit tests. You will need docker and docker-compose to be able to pass acceptance and functional tests. 9 | #. Ensure documentation is OK with ``sphinx-autobuild docs/ docs/_build/html/`` 10 | #. Make the PR in Github (you must have a fork of your own) 11 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2016, Manuel Miranda de Cid 2 | All rights reserved. 3 | 4 | Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 5 | 6 | 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 7 | 8 | 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 9 | 10 | 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. 11 | 12 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 13 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include CHANGES.rst 2 | include LICENSE 3 | include README.rst 4 | include Makefile 5 | include requirements.txt 6 | include requirements-dev.txt 7 | include setup.cfg 8 | include .coveragerc 9 | graft aiocache 10 | graft docs 11 | graft examples 12 | graft tests 13 | global-exclude *.pyc 14 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | cov-report = true 2 | 3 | 4 | lint: 5 | flake8 tests/ aiocache/ 6 | 7 | install-dev: 8 | pip install -e .[redis,memcached,msgpack,dev] 9 | 10 | pylint: 11 | pylint --disable=C0111 aiocache 12 | 13 | unit: 14 | coverage run -m pytest tests/ut 15 | @if [ $(cov-report) = true ]; then\ 16 | coverage combine;\ 17 | coverage report;\ 18 | fi 19 | 20 | acceptance: 21 | pytest -sv tests/acceptance 22 | 23 | doc: 24 | make -C docs/ html 25 | 26 | functional: 27 | bash examples/run_all.sh 28 | 29 | performance: 30 | pytest -sv tests/performance 31 | 32 | test: lint unit acceptance functional 33 | 34 | _release: 35 | scripts/make_release 36 | 37 | release: test _release 38 | 39 | changelog: 40 | gitchangelog 41 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | aiocache 2 | ######## 3 | 4 | Asyncio cache supporting multiple backends (memory, redis, memcached, etc.). 5 | 6 | .. image:: https://codecov.io/gh/aio-libs/aiocache/branch/master/graph/badge.svg 7 | :target: https://codecov.io/gh/aio-libs/aiocache 8 | 9 | .. image:: https://badge.fury.io/py/aiocache.svg 10 | :target: https://pypi.python.org/pypi/aiocache 11 | 12 | .. image:: https://img.shields.io/pypi/pyversions/aiocache.svg 13 | :target: https://pypi.python.org/pypi/aiocache 14 | 15 | This library aims for simplicity over specialization. All caches contain the same minimum interface which consists on the following functions: 16 | 17 | - ``add``: Only adds key/value if key does not exist. 18 | - ``get``: Retrieve value identified by key. 19 | - ``set``: Sets key/value. 20 | - ``multi_get``: Retrieves multiple key/values. 21 | - ``multi_set``: Sets multiple key/values. 22 | - ``exists``: Returns True if key exists False otherwise. 23 | - ``increment``: Increment the value stored in the given key. 24 | - ``delete``: Deletes key and returns number of deleted items. 25 | - ``clear``: Clears the items stored. 26 | - ``raw``: Executes the specified command using the underlying client. 27 | 28 | 29 | .. role:: python(code) 30 | :language: python 31 | 32 | .. contents:: 33 | 34 | .. section-numbering: 35 | 36 | 37 | Installing 38 | ========== 39 | 40 | - ``pip install aiocache`` 41 | - ``pip install aiocache[redis]`` 42 | - ``pip install aiocache[memcached]`` 43 | - ``pip install aiocache[redis,memcached]`` 44 | - ``pip install aiocache[msgpack]`` 45 | 46 | 47 | Usage 48 | ===== 49 | 50 | Using a cache is as simple as 51 | 52 | .. code-block:: python 53 | 54 | >>> import asyncio 55 | >>> from aiocache import SimpleMemoryCache 56 | >>> cache = SimpleMemoryCache() # Or RedisCache, MemcachedCache... 57 | >>> with asyncio.Runner() as runner: 58 | >>> runner.run(cache.set('key', 'value')) 59 | True 60 | >>> runner.run(cache.get('key')) 61 | 'value' 62 | 63 | Or as a decorator 64 | 65 | .. code-block:: python 66 | 67 | import asyncio 68 | 69 | from collections import namedtuple 70 | 71 | from aiocache import RedisCache, cached 72 | from aiocache.serializers import PickleSerializer 73 | # With this we can store python objects in backends like Redis! 74 | 75 | Result = namedtuple('Result', "content, status") 76 | redis_client = redis.Redis(host="127.0.0.1", port=6379) 77 | redis_cache = RedisCache(redis_client, namespace="main") 78 | 79 | 80 | @cached(redis_cache, key="key", serializer=PickleSerializer(), port=6379, namespace="main") 81 | async def cached_call(): 82 | print("Sleeping for three seconds zzzz.....") 83 | await asyncio.sleep(3) 84 | return Result("content", 200) 85 | 86 | 87 | async def run(): 88 | async with redis_client, redis_cache: 89 | await cached_call() 90 | await cached_call() 91 | await cached_call() 92 | await redis_cache.delete("key") 93 | 94 | if __name__ == "__main__": 95 | asyncio.run(run()) 96 | 97 | 98 | How does it work 99 | ================ 100 | 101 | Aiocache provides 3 main entities: 102 | 103 | - **backends**: Allow you specify which backend you want to use for your cache. See the docs for a full list of supported backends. 104 | - **serializers**: Serialize and deserialize the data between your code and the backends. This allows you to save any Python object into your cache. Currently supporting: StringSerializer, PickleSerializer, JsonSerializer, and MsgPackSerializer. But you can also build custom ones. 105 | - **plugins**: Implement a hooks system that allows to execute extra behavior before and after of each command. 106 | 107 | If you are missing an implementation of backend, serializer or plugin you think it could be interesting for the package, do not hesitate to open a new issue. 108 | 109 | .. image:: docs/images/architecture.png 110 | :align: center 111 | 112 | Those 3 entities combine during some of the cache operations to apply the desired command (backend), data transformation (serializer) and pre/post hooks (plugins). To have a better vision of what happens, here you can check how ``set`` function works in ``aiocache``: 113 | 114 | .. image:: docs/images/set_operation_flow.png 115 | :align: center 116 | 117 | 118 | Amazing examples 119 | ================ 120 | 121 | In `examples folder `_ you can check different use cases: 122 | 123 | - `Sanic, Aiohttp and Tornado `_ 124 | - `Python object in Redis `_ 125 | - `Custom serializer for compressing data `_ 126 | - `TimingPlugin and HitMissRatioPlugin demos `_ 127 | - `Using marshmallow as a serializer `_ 128 | - `Using cached decorator `_. 129 | - `Using multi_cached decorator `_. 130 | 131 | 132 | 133 | Documentation 134 | ============= 135 | 136 | - `Usage `_ 137 | - `Caches `_ 138 | - `Serializers `_ 139 | - `Plugins `_ 140 | - `Configuration `_ 141 | - `Decorators `_ 142 | - `Testing `_ 143 | - `Examples `_ 144 | -------------------------------------------------------------------------------- /aiocache/__init__.py: -------------------------------------------------------------------------------- 1 | import logging 2 | from typing import Any, Type 3 | 4 | from .backends.memory import SimpleMemoryCache 5 | from .base import BaseCache 6 | 7 | __version__ = "1.0.0a0" 8 | 9 | logger = logging.getLogger(__name__) 10 | 11 | _AIOCACHE_CACHES: list[Type[BaseCache[Any]]] = [SimpleMemoryCache] 12 | 13 | try: 14 | import redis 15 | except ImportError: 16 | logger.debug("redis not installed, RedisCache unavailable") 17 | else: 18 | from aiocache.backends.redis import RedisCache 19 | 20 | _AIOCACHE_CACHES.append(RedisCache) 21 | del redis 22 | 23 | try: 24 | import aiomcache 25 | except ImportError: 26 | logger.debug("aiomcache not installed, Memcached unavailable") 27 | else: 28 | from aiocache.backends.memcached import MemcachedCache 29 | 30 | _AIOCACHE_CACHES.append(MemcachedCache) 31 | del aiomcache 32 | 33 | from .decorators import cached, cached_stampede, multi_cached # noqa: E402,I202 34 | 35 | __all__ = ( 36 | "cached", 37 | "cached_stampede", 38 | "multi_cached", 39 | *sorted(c.__name__ for c in _AIOCACHE_CACHES), 40 | ) 41 | -------------------------------------------------------------------------------- /aiocache/backends/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aio-libs/aiocache/28b3af84086e53e6a4ee5196770660a8f797fb3d/aiocache/backends/__init__.py -------------------------------------------------------------------------------- /aiocache/backends/memcached.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from typing import Optional 3 | 4 | import aiomcache 5 | 6 | from aiocache.base import BaseCache 7 | from aiocache.serializers import JsonSerializer 8 | 9 | 10 | class MemcachedBackend(BaseCache[bytes]): 11 | def __init__(self, host="127.0.0.1", port=11211, pool_size=2, **kwargs): 12 | super().__init__(**kwargs) 13 | self.host = host 14 | self.port = port 15 | self.pool_size = int(pool_size) 16 | self.client = aiomcache.Client( 17 | self.host, self.port, pool_size=self.pool_size 18 | ) 19 | 20 | async def _get(self, key, encoding="utf-8", _conn=None): 21 | value = await self.client.get(key) 22 | if encoding is None or value is None: 23 | return value 24 | return value.decode(encoding) 25 | 26 | async def _gets(self, key, encoding="utf-8", _conn=None): 27 | key = key.encode() if isinstance(key, str) else key 28 | _, token = await self.client.gets(key) 29 | return token 30 | 31 | async def _multi_get(self, keys, encoding="utf-8", _conn=None): 32 | values = [] 33 | for value in await self.client.multi_get(*keys): 34 | if encoding is None or value is None: 35 | values.append(value) 36 | else: 37 | values.append(value.decode(encoding)) 38 | return values 39 | 40 | async def _set(self, key, value, ttl=0, _cas_token=None, _conn=None): 41 | value = value.encode() if isinstance(value, str) else value 42 | if _cas_token is not None: 43 | return await self._cas(key, value, _cas_token, ttl=ttl, _conn=_conn) 44 | try: 45 | return await self.client.set(key, value, exptime=ttl or 0) 46 | except aiomcache.exceptions.ValidationException as e: 47 | raise TypeError("aiomcache error: {}".format(str(e))) 48 | 49 | async def _cas(self, key, value, token, ttl=None, _conn=None): 50 | return await self.client.cas(key, value, token, exptime=ttl or 0) 51 | 52 | async def _multi_set(self, pairs, ttl=0, _conn=None): 53 | tasks = [] 54 | for key, value in pairs: 55 | value = str.encode(value) if isinstance(value, str) else value 56 | tasks.append(self.client.set(key, value, exptime=ttl or 0)) 57 | 58 | try: 59 | await asyncio.gather(*tasks) 60 | except aiomcache.exceptions.ValidationException as e: 61 | raise TypeError("aiomcache error: {}".format(str(e))) 62 | 63 | return True 64 | 65 | async def _add(self, key, value, ttl=0, _conn=None): 66 | value = str.encode(value) if isinstance(value, str) else value 67 | try: 68 | ret = await self.client.add(key, value, exptime=ttl or 0) 69 | except aiomcache.exceptions.ValidationException as e: 70 | raise TypeError("aiomcache error: {}".format(str(e))) 71 | if not ret: 72 | raise ValueError("Key {} already exists, use .set to update the value".format(key)) 73 | 74 | return True 75 | 76 | async def _exists(self, key, _conn=None): 77 | return await self.client.append(key, b"") 78 | 79 | async def _increment(self, key, delta, _conn=None): 80 | incremented = None 81 | try: 82 | if delta > 0: 83 | incremented = await self.client.incr(key, delta) 84 | else: 85 | incremented = await self.client.decr(key, abs(delta)) 86 | except aiomcache.exceptions.ClientException as e: 87 | if "NOT_FOUND" in str(e): 88 | await self._set(key, str(delta).encode()) 89 | else: 90 | raise TypeError("aiomcache error: {}".format(str(e))) 91 | 92 | return incremented or delta 93 | 94 | async def _expire(self, key, ttl, _conn=None): 95 | return await self.client.touch(key, ttl) 96 | 97 | async def _delete(self, key, _conn=None): 98 | return 1 if await self.client.delete(key) else 0 99 | 100 | async def _clear(self, namespace=None, _conn=None): 101 | if namespace: 102 | raise ValueError("MemcachedBackend doesnt support flushing by namespace") 103 | else: 104 | await self.client.flush_all() 105 | return True 106 | 107 | async def _raw(self, command, *args, encoding="utf-8", _conn=None, **kwargs): 108 | value = await getattr(self.client, command)(*args, **kwargs) 109 | if command in {"get", "multi_get"}: 110 | if encoding is not None and value is not None: 111 | return value.decode(encoding) 112 | return value 113 | 114 | async def _redlock_release(self, key, _): 115 | # Not ideal, should check the value coincides first but this would introduce 116 | # race conditions 117 | return await self._delete(key) 118 | 119 | async def _close(self, *args, _conn=None, **kwargs): 120 | await self.client.close() 121 | 122 | def build_key(self, key: str, namespace: Optional[str] = None) -> bytes: 123 | ns_key = self._str_build_key(key, namespace).replace(" ", "_") 124 | return str.encode(ns_key) 125 | 126 | 127 | class MemcachedCache(MemcachedBackend): 128 | """ 129 | Memcached cache implementation with the following components as defaults: 130 | - serializer: :class:`aiocache.serializers.JsonSerializer` 131 | - plugins: [] 132 | 133 | Config options are: 134 | 135 | :param serializer: obj derived from :class:`aiocache.serializers.BaseSerializer`. 136 | :param plugins: list of :class:`aiocache.plugins.BasePlugin` derived classes. 137 | :param namespace: string to use as default prefix for the key used in all operations of 138 | the backend. Default is an empty string, "". 139 | :param timeout: int or float in seconds specifying maximum timeout for the operations to last. 140 | By default its 5. 141 | :param endpoint: str with the endpoint to connect to. Default is 127.0.0.1. 142 | :param port: int with the port to connect to. Default is 11211. 143 | :param pool_size: int size for memcached connections pool. Default is 2. 144 | """ 145 | 146 | NAME = "memcached" 147 | 148 | def __init__(self, serializer=None, **kwargs): 149 | super().__init__(serializer=serializer or JsonSerializer(), **kwargs) 150 | 151 | @classmethod 152 | def parse_uri_path(cls, path): 153 | return {} 154 | 155 | def __repr__(self): # pragma: no cover 156 | return "MemcachedCache ({}:{})".format(self.host, self.port) 157 | -------------------------------------------------------------------------------- /aiocache/backends/memory.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from typing import Any, Dict, Optional 3 | 4 | from aiocache.base import BaseCache 5 | from aiocache.serializers import NullSerializer 6 | 7 | 8 | class SimpleMemoryBackend(BaseCache[str]): 9 | """ 10 | Wrapper around dict operations to use it as a cache backend 11 | """ 12 | 13 | # TODO(PY312): https://peps.python.org/pep-0692/ 14 | def __init__(self, **kwargs: Any): 15 | super().__init__(**kwargs) 16 | 17 | self._cache: Dict[str, object] = {} 18 | self._handlers: Dict[str, asyncio.TimerHandle] = {} 19 | 20 | async def _get(self, key, encoding="utf-8", _conn=None): 21 | return self._cache.get(key) 22 | 23 | async def _gets(self, key, encoding="utf-8", _conn=None): 24 | return await self._get(key, encoding=encoding, _conn=_conn) 25 | 26 | async def _multi_get(self, keys, encoding="utf-8", _conn=None): 27 | return [self._cache.get(key) for key in keys] 28 | 29 | async def _set(self, key, value, ttl=None, _cas_token=None, _conn=None): 30 | if _cas_token is not None and _cas_token != self._cache.get(key): 31 | return 0 32 | 33 | if key in self._handlers: 34 | self._handlers[key].cancel() 35 | 36 | self._cache[key] = value 37 | if ttl: 38 | loop = asyncio.get_running_loop() 39 | self._handlers[key] = loop.call_later(ttl, self.__delete, key) 40 | return True 41 | 42 | async def _multi_set(self, pairs, ttl=None, _conn=None): 43 | for key, value in pairs: 44 | await self._set(key, value, ttl=ttl) 45 | return True 46 | 47 | async def _add(self, key, value, ttl=None, _conn=None): 48 | if key in self._cache: 49 | raise ValueError("Key {} already exists, use .set to update the value".format(key)) 50 | 51 | await self._set(key, value, ttl=ttl) 52 | return True 53 | 54 | async def _exists(self, key, _conn=None): 55 | return key in self._cache 56 | 57 | async def _increment(self, key, delta, _conn=None): 58 | if key not in self._cache: 59 | self._cache[key] = delta 60 | else: 61 | try: 62 | self._cache[key] = int(self._cache[key]) + delta 63 | except ValueError: 64 | raise TypeError("Value is not an integer") from None 65 | return self._cache[key] 66 | 67 | async def _expire(self, key, ttl, _conn=None): 68 | if key in self._cache: 69 | handle = self._handlers.pop(key, None) 70 | if handle: 71 | handle.cancel() 72 | if ttl: 73 | loop = asyncio.get_running_loop() 74 | self._handlers[key] = loop.call_later(ttl, self.__delete, key) 75 | return True 76 | 77 | return False 78 | 79 | async def _delete(self, key, _conn=None): 80 | return self.__delete(key) 81 | 82 | async def _clear(self, namespace=None, _conn=None): 83 | if namespace: 84 | for key in list(self._cache): 85 | if key.startswith(namespace): 86 | self.__delete(key) 87 | else: 88 | self._cache = {} 89 | self._handlers = {} 90 | return True 91 | 92 | async def _raw(self, command, *args, encoding="utf-8", _conn=None, **kwargs): 93 | return getattr(self._cache, command)(*args, **kwargs) 94 | 95 | async def _redlock_release(self, key, value): 96 | if self._cache.get(key) == value: 97 | return self.__delete(key) 98 | return 0 99 | 100 | def __delete(self, key): 101 | if self._cache.pop(key, None) is not None: 102 | handle = self._handlers.pop(key, None) 103 | if handle: 104 | handle.cancel() 105 | return 1 106 | 107 | return 0 108 | 109 | def build_key(self, key: str, namespace: Optional[str] = None) -> str: 110 | return self._str_build_key(key, namespace) 111 | 112 | 113 | class SimpleMemoryCache(SimpleMemoryBackend): 114 | """ 115 | Memory cache implementation with the following components as defaults: 116 | - serializer: :class:`aiocache.serializers.NullSerializer` 117 | - plugins: None 118 | 119 | Config options are: 120 | 121 | :param serializer: obj derived from :class:`aiocache.serializers.BaseSerializer`. 122 | :param plugins: list of :class:`aiocache.plugins.BasePlugin` derived classes. 123 | :param namespace: string to use as default prefix for the key used in all operations of 124 | the backend. Default is an empty string, "". 125 | :param timeout: int or float in seconds specifying maximum timeout for the operations to last. 126 | By default its 5. 127 | """ 128 | 129 | NAME = "memory" 130 | 131 | def __init__(self, serializer=None, **kwargs): 132 | super().__init__(serializer=serializer or NullSerializer(), **kwargs) 133 | 134 | @classmethod 135 | def parse_uri_path(cls, path): 136 | return {} 137 | -------------------------------------------------------------------------------- /aiocache/backends/redis.py: -------------------------------------------------------------------------------- 1 | import itertools 2 | from typing import Any, Callable, Optional, TYPE_CHECKING 3 | 4 | import redis.asyncio as redis 5 | from redis.exceptions import ResponseError as IncrbyException 6 | 7 | from aiocache.base import BaseCache 8 | from aiocache.serializers import JsonSerializer 9 | 10 | if TYPE_CHECKING: # pragma: no cover 11 | from aiocache.serializers import BaseSerializer 12 | 13 | 14 | class RedisBackend(BaseCache[str]): 15 | RELEASE_SCRIPT = ( 16 | "if redis.call('get',KEYS[1]) == ARGV[1] then" 17 | " return redis.call('del',KEYS[1])" 18 | " else" 19 | " return 0" 20 | " end" 21 | ) 22 | 23 | CAS_SCRIPT = ( 24 | "if redis.call('get',KEYS[1]) == ARGV[2] then" 25 | " if #ARGV == 4 then" 26 | " return redis.call('set', KEYS[1], ARGV[1], ARGV[3], ARGV[4])" 27 | " else" 28 | " return redis.call('set', KEYS[1], ARGV[1])" 29 | " end" 30 | " else" 31 | " return 0" 32 | " end" 33 | ) 34 | 35 | def __init__( 36 | self, 37 | client: redis.Redis, 38 | **kwargs, 39 | ): 40 | super().__init__(**kwargs) 41 | 42 | # NOTE: decoding can't be controlled on API level after switching to 43 | # redis, we need to disable decoding on global/connection level 44 | # (decode_responses=False), because some of the values are saved as 45 | # bytes directly, like pickle serialized values, which may raise an 46 | # exception when decoded with 'utf-8'. 47 | if client.connection_pool.connection_kwargs['decode_responses']: 48 | raise ValueError("redis client must be constructed with decode_responses set to False") 49 | self.client = client 50 | 51 | async def _get(self, key, encoding="utf-8", _conn=None): 52 | value = await self.client.get(key) 53 | if encoding is None or value is None: 54 | return value 55 | return value.decode(encoding) 56 | 57 | async def _gets(self, key, encoding="utf-8", _conn=None): 58 | return await self._get(key, encoding=encoding, _conn=_conn) 59 | 60 | async def _multi_get(self, keys, encoding="utf-8", _conn=None): 61 | values = await self.client.mget(*keys) 62 | if encoding is None: 63 | return values 64 | return [v if v is None else v.decode(encoding) for v in values] 65 | 66 | async def _set(self, key, value, ttl=None, _cas_token=None, _conn=None): 67 | if _cas_token is not None: 68 | return await self._cas(key, value, _cas_token, ttl=ttl, _conn=_conn) 69 | if ttl is None: 70 | return await self.client.set(key, value) 71 | if isinstance(ttl, float): 72 | ttl = int(ttl * 1000) 73 | return await self.client.psetex(key, ttl, value) 74 | return await self.client.setex(key, ttl, value) 75 | 76 | async def _cas(self, key, value, token, ttl=None, _conn=None): 77 | args = () 78 | if ttl is not None: 79 | args = ("PX", int(ttl * 1000)) if isinstance(ttl, float) else ("EX", ttl) 80 | return await self._raw("eval", self.CAS_SCRIPT, 1, key, value, token, *args, _conn=_conn) 81 | 82 | async def _multi_set(self, pairs, ttl=None, _conn=None): 83 | ttl = ttl or 0 84 | 85 | flattened = list(itertools.chain.from_iterable((key, value) for key, value in pairs)) 86 | 87 | if ttl: 88 | await self.__multi_set_ttl(flattened, ttl) 89 | else: 90 | await self.client.execute_command("MSET", *flattened) 91 | 92 | return True 93 | 94 | async def __multi_set_ttl(self, flattened, ttl): 95 | async with self.client.pipeline(transaction=True) as p: 96 | p.execute_command("MSET", *flattened) 97 | ttl, exp = (int(ttl * 1000), p.pexpire) if isinstance(ttl, float) else (ttl, p.expire) 98 | for key in flattened[::2]: 99 | exp(key, time=ttl) 100 | await p.execute() 101 | 102 | async def _add(self, key, value, ttl=None, _conn=None): 103 | kwargs = {"nx": True} 104 | if isinstance(ttl, float): 105 | kwargs["px"] = int(ttl * 1000) 106 | else: 107 | kwargs["ex"] = ttl 108 | was_set = await self.client.set(key, value, **kwargs) 109 | if not was_set: 110 | raise ValueError("Key {} already exists, use .set to update the value".format(key)) 111 | return was_set 112 | 113 | async def _exists(self, key, _conn=None): 114 | number = await self.client.exists(key) 115 | return bool(number) 116 | 117 | async def _increment(self, key, delta, _conn=None): 118 | try: 119 | return await self.client.incrby(key, delta) 120 | except IncrbyException: 121 | raise TypeError("Value is not an integer") from None 122 | 123 | async def _expire(self, key, ttl, _conn=None): 124 | if ttl == 0: 125 | return await self.client.persist(key) 126 | return await self.client.expire(key, ttl) 127 | 128 | async def _delete(self, key, _conn=None): 129 | return await self.client.delete(key) 130 | 131 | async def _clear(self, namespace=None, _conn=None): 132 | if namespace: 133 | keys = await self.client.keys("{}:*".format(namespace)) 134 | if keys: 135 | await self.client.delete(*keys) 136 | else: 137 | await self.client.flushdb() 138 | return True 139 | 140 | async def _raw(self, command, *args, encoding="utf-8", _conn=None, **kwargs): 141 | value = await getattr(self.client, command)(*args, **kwargs) 142 | if encoding is not None: 143 | if command == "get" and value is not None: 144 | value = value.decode(encoding) 145 | elif command in {"keys", "mget"}: 146 | value = [v if v is None else v.decode(encoding) for v in value] 147 | return value 148 | 149 | async def _redlock_release(self, key, value): 150 | return await self._raw("eval", self.RELEASE_SCRIPT, 1, key, value) 151 | 152 | def build_key(self, key: str, namespace: Optional[str] = None) -> str: 153 | return self._str_build_key(key, namespace) 154 | 155 | 156 | class RedisCache(RedisBackend): 157 | """ 158 | Redis cache implementation with the following components as defaults: 159 | - serializer: :class:`aiocache.serializers.JsonSerializer` 160 | - plugins: [] 161 | 162 | Config options are: 163 | 164 | :param serializer: obj derived from :class:`aiocache.serializers.BaseSerializer`. 165 | :param plugins: list of :class:`aiocache.plugins.BasePlugin` derived classes. 166 | :param namespace: string to use as default prefix for the key used in all operations of 167 | the backend. Default is an empty string, "". 168 | :param timeout: int or float in seconds specifying maximum timeout for the operations to last. 169 | By default its 5. 170 | :param client: redis.Redis which is an active client for working with redis 171 | """ 172 | 173 | NAME = "redis" 174 | 175 | def __init__( 176 | self, 177 | client: redis.Redis, 178 | serializer: Optional["BaseSerializer"] = None, 179 | namespace: str = "", 180 | key_builder: Callable[[str, str], str] = lambda k, ns: f"{ns}:{k}" if ns else k, 181 | **kwargs: Any, 182 | ): 183 | super().__init__( 184 | client=client, 185 | serializer=serializer or JsonSerializer(), 186 | namespace=namespace, 187 | key_builder=key_builder, 188 | **kwargs, 189 | ) 190 | 191 | @classmethod 192 | def parse_uri_path(cls, path): 193 | """ 194 | Given a uri path, return the Redis specific configuration 195 | options in that path string according to iana definition 196 | http://www.iana.org/assignments/uri-schemes/prov/redis 197 | 198 | :param path: string containing the path. Example: "/0" 199 | :return: mapping containing the options. Example: {"db": "0"} 200 | """ 201 | options = {} 202 | db, *_ = path[1:].split("/") 203 | if db: 204 | options["db"] = db 205 | return options 206 | 207 | def __repr__(self): # pragma: no cover 208 | connection_kwargs = self.client.connection_pool.connection_kwargs 209 | return "RedisCache ({}:{})".format(connection_kwargs['host'], connection_kwargs['port']) 210 | -------------------------------------------------------------------------------- /aiocache/exceptions.py: -------------------------------------------------------------------------------- 1 | class InvalidCacheType(Exception): 2 | pass 3 | -------------------------------------------------------------------------------- /aiocache/lock.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import uuid 3 | from typing import Any, Dict, Generic, Union 4 | 5 | from aiocache.base import BaseCache, CacheKeyType 6 | 7 | 8 | class RedLock(Generic[CacheKeyType]): 9 | """ 10 | Implementation of `Redlock `_ 11 | with a single instance because aiocache is focused on single 12 | instance cache. 13 | 14 | This locking has some limitations and shouldn't be used in 15 | situations where consistency is critical. Those locks are aimed for 16 | performance reasons where failing on locking from time to time 17 | is acceptable. TLDR: do NOT use this if you need real resource 18 | exclusion. 19 | 20 | Couple of considerations with the implementation: 21 | 22 | - If the lease expires and there are calls waiting, all of them 23 | will pass (blocking just happens for the first time). 24 | - When a new call arrives, it will wait always at most lease 25 | time. This means that the call could end up blocked longer 26 | than needed in case the lease from the blocker expires. 27 | 28 | Backend specific implementation: 29 | 30 | - Redis implements correctly the redlock algorithm. It sets 31 | the key if it doesn't exist. To release, it checks the value 32 | is the same as the instance trying to release and if it is, 33 | it removes the lock. If not it will do nothing 34 | - Memcached follows the same approach with a difference. Due 35 | to memcached lacking a way to execute the operation get and 36 | delete commands atomically, any client is able to release the 37 | lock. This is a limitation that can't be fixed without introducing 38 | race conditions. 39 | - Memory implementation is not distributed, it will only apply 40 | to the process running. Say you have 4 processes running 41 | APIs with aiocache, the locking will apply only per process 42 | (still useful to reduce load per process). 43 | 44 | Example usage:: 45 | 46 | from aiocache import Cache 47 | from aiocache.lock import RedLock 48 | 49 | cache = Cache(Cache.REDIS) 50 | async with RedLock(cache, 'key', lease=1): # Calls will wait here 51 | result = await cache.get('key') 52 | if result is not None: 53 | return result 54 | result = await super_expensive_function() 55 | await cache.set('key', result) 56 | 57 | In the example, first call will start computing the ``super_expensive_function`` 58 | while consecutive calls will block at most 1 second. If the blocking lasts for 59 | more than 1 second, the calls will proceed to also calculate the 60 | result of ``super_expensive_function``. 61 | """ 62 | 63 | _EVENTS: Dict[str, asyncio.Event] = {} 64 | 65 | def __init__(self, client: BaseCache[CacheKeyType], key: str, lease: Union[int, float]): 66 | self.client = client 67 | self.key = self.client.build_key(key + "-lock") 68 | self.lease = lease 69 | self._value = "" 70 | 71 | async def __aenter__(self): 72 | return await self._acquire() 73 | 74 | async def _acquire(self): 75 | self._value = str(uuid.uuid4()) 76 | try: 77 | await self.client._add(self.key, self._value, ttl=self.lease) 78 | RedLock._EVENTS[self.key] = asyncio.Event() 79 | except ValueError: 80 | await self._wait_for_release() 81 | 82 | async def _wait_for_release(self): 83 | try: 84 | await asyncio.wait_for(RedLock._EVENTS[self.key].wait(), self.lease) 85 | except asyncio.TimeoutError: 86 | pass 87 | except KeyError: # lock was released when wait_for was rescheduled 88 | pass 89 | 90 | async def __aexit__(self, exc_type, exc_value, traceback): 91 | await self._release() 92 | 93 | async def _release(self): 94 | removed = await self.client._redlock_release(self.key, self._value) 95 | if removed: 96 | RedLock._EVENTS.pop(self.key).set() 97 | 98 | 99 | class OptimisticLock(Generic[CacheKeyType]): 100 | """ 101 | Implementation of 102 | `optimistic lock `_ 103 | 104 | Optimistic locking assumes multiple transactions can happen at the same time 105 | and they will only fail if before finish, conflicting modifications with other 106 | transactions are found, producing a roll back. 107 | 108 | Finding a conflict will end up raising an `aiocache.lock.OptimisticLockError` 109 | exception. A conflict happens when the value at the storage is different from 110 | the one we retrieved when the lock started. 111 | 112 | Example usage:: 113 | 114 | cache = Cache(Cache.REDIS) 115 | 116 | # The value stored in 'key' will be checked here 117 | async with OptimisticLock(cache, 'key') as lock: 118 | result = await super_expensive_call() 119 | await lock.cas(result) 120 | 121 | If any other call sets the value of ``key`` before the ``lock.cas`` is called, 122 | an :class:`aiocache.lock.OptimisticLockError` will be raised. A way to make 123 | the same call crash would be to change the value inside the lock like:: 124 | 125 | cache = Cache(Cache.REDIS) 126 | 127 | # The value stored in 'key' will be checked here 128 | async with OptimisticLock(cache, 'key') as lock: 129 | result = await super_expensive_call() 130 | await cache.set('random_value') # This will make the `lock.cas` call fail 131 | await lock.cas(result) 132 | 133 | If the lock is created with an unexisting key, there will never be conflicts. 134 | """ 135 | 136 | def __init__(self, client: BaseCache[CacheKeyType], key: str): 137 | self.client = client 138 | self.key = key 139 | self.ns_key = self.client.build_key(key) 140 | self._token = None 141 | 142 | async def __aenter__(self): 143 | return await self._acquire() 144 | 145 | async def _acquire(self): 146 | self._token = await self.client._gets(self.ns_key) 147 | return self 148 | 149 | async def __aexit__(self, exc_type, exc_value, traceback): 150 | pass 151 | 152 | async def cas(self, value: Any, **kwargs: Any) -> bool: 153 | """ 154 | Checks and sets the specified value for the locked key. If the value has changed 155 | since the lock was created, it will raise an :class:`aiocache.lock.OptimisticLockError` 156 | exception. 157 | 158 | :raises: :class:`aiocache.lock.OptimisticLockError` 159 | """ 160 | success = await self.client.set(self.key, value, _cas_token=self._token, **kwargs) 161 | if not success: 162 | raise OptimisticLockError("Value has changed since the lock started") 163 | return True 164 | 165 | 166 | class OptimisticLockError(Exception): 167 | """ 168 | Raised when a conflict is found during an optimistic lock 169 | """ 170 | -------------------------------------------------------------------------------- /aiocache/plugins.py: -------------------------------------------------------------------------------- 1 | """ 2 | This module implements different plugins you can attach to your cache instance. They 3 | are coded in a collaborative so you can use multiple inheritance. 4 | """ 5 | 6 | from aiocache.base import API 7 | 8 | 9 | class BasePlugin: 10 | @classmethod 11 | def add_hook(cls, func, hooks): 12 | for hook in hooks: 13 | setattr(cls, hook, func) 14 | 15 | async def do_nothing(self, *args, **kwargs): 16 | pass 17 | 18 | 19 | BasePlugin.add_hook( 20 | BasePlugin.do_nothing, ["pre_{}".format(method.__name__) for method in API.CMDS] 21 | ) 22 | BasePlugin.add_hook( 23 | BasePlugin.do_nothing, ["post_{}".format(method.__name__) for method in API.CMDS] 24 | ) 25 | 26 | 27 | class TimingPlugin(BasePlugin): 28 | """ 29 | Calculates average, min and max times each command takes. The data is saved 30 | in the cache class as a dict attribute called ``profiling``. For example, to 31 | access the average time of the operation get, you can do ``cache.profiling['get_avg']`` 32 | """ 33 | 34 | @classmethod 35 | def save_time(cls, method): 36 | async def do_save_time(self, client, *args, took=0, **kwargs): 37 | if not hasattr(client, "profiling"): 38 | client.profiling = {} 39 | 40 | previous_total = client.profiling.get("{}_total".format(method), 0) 41 | previous_avg = client.profiling.get("{}_avg".format(method), 0) 42 | previous_max = client.profiling.get("{}_max".format(method), 0) 43 | previous_min = client.profiling.get("{}_min".format(method)) 44 | 45 | client.profiling["{}_total".format(method)] = previous_total + 1 46 | client.profiling["{}_avg".format(method)] = previous_avg + (took - previous_avg) / ( 47 | previous_total + 1 48 | ) 49 | client.profiling["{}_max".format(method)] = max(took, previous_max) 50 | client.profiling["{}_min".format(method)] = ( 51 | min(took, previous_min) if previous_min else took 52 | ) 53 | 54 | return do_save_time 55 | 56 | 57 | for method in API.CMDS: 58 | TimingPlugin.add_hook( 59 | TimingPlugin.save_time(method.__name__), ["post_{}".format(method.__name__)] 60 | ) 61 | 62 | 63 | class HitMissRatioPlugin(BasePlugin): 64 | """ 65 | Calculates the ratio of hits the cache has. The data is saved in the cache class as a dict 66 | attribute called ``hit_miss_ratio``. For example, to access the hit ratio of the cache, 67 | you can do ``cache.hit_miss_ratio['hit_ratio']``. It also provides the "total" and "hits" 68 | keys. 69 | """ 70 | 71 | async def post_get(self, client, key, took=0, ret=None, **kwargs): 72 | if not hasattr(client, "hit_miss_ratio"): 73 | client.hit_miss_ratio = {} 74 | client.hit_miss_ratio["total"] = 0 75 | client.hit_miss_ratio["hits"] = 0 76 | 77 | client.hit_miss_ratio["total"] += 1 78 | if ret is not None: 79 | client.hit_miss_ratio["hits"] += 1 80 | 81 | client.hit_miss_ratio["hit_ratio"] = ( 82 | client.hit_miss_ratio["hits"] / client.hit_miss_ratio["total"] 83 | ) 84 | 85 | async def post_multi_get(self, client, keys, took=0, ret=None, **kwargs): 86 | if not hasattr(client, "hit_miss_ratio"): 87 | client.hit_miss_ratio = {} 88 | client.hit_miss_ratio["total"] = 0 89 | client.hit_miss_ratio["hits"] = 0 90 | 91 | client.hit_miss_ratio["total"] += len(keys) 92 | for result in ret: 93 | if result is not None: 94 | client.hit_miss_ratio["hits"] += 1 95 | 96 | client.hit_miss_ratio["hit_ratio"] = ( 97 | client.hit_miss_ratio["hits"] / client.hit_miss_ratio["total"] 98 | ) 99 | -------------------------------------------------------------------------------- /aiocache/py.typed: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aio-libs/aiocache/28b3af84086e53e6a4ee5196770660a8f797fb3d/aiocache/py.typed -------------------------------------------------------------------------------- /aiocache/serializers/__init__.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | from .serializers import ( 4 | BaseSerializer, 5 | JsonSerializer, 6 | NullSerializer, 7 | PickleSerializer, 8 | StringSerializer, 9 | ) 10 | 11 | logger = logging.getLogger(__name__) 12 | 13 | 14 | try: 15 | import msgpack 16 | except ImportError: 17 | logger.debug("msgpack not installed, MsgPackSerializer unavailable") 18 | else: 19 | from .serializers import MsgPackSerializer 20 | 21 | del msgpack 22 | 23 | 24 | __all__ = [ 25 | "BaseSerializer", 26 | "NullSerializer", 27 | "StringSerializer", 28 | "PickleSerializer", 29 | "JsonSerializer", 30 | "MsgPackSerializer", 31 | ] 32 | -------------------------------------------------------------------------------- /aiocache/serializers/serializers.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import pickle # noqa: S403 3 | from abc import ABC, abstractmethod 4 | from typing import Any, Optional 5 | 6 | logger = logging.getLogger(__name__) 7 | 8 | try: 9 | import ujson as json # noqa: I900 10 | except ImportError: 11 | logger.debug("ujson module not found, using json") 12 | import json # type: ignore[no-redef] 13 | 14 | try: 15 | import msgpack 16 | except ImportError: 17 | msgpack = None 18 | logger.debug("msgpack not installed, MsgPackSerializer unavailable") 19 | 20 | 21 | _NOT_SET = object() 22 | 23 | 24 | class BaseSerializer(ABC): 25 | 26 | DEFAULT_ENCODING: Optional[str] = "utf-8" 27 | 28 | def __init__(self, *args, encoding=_NOT_SET, **kwargs): 29 | self.encoding = self.DEFAULT_ENCODING if encoding is _NOT_SET else encoding 30 | super().__init__(*args, **kwargs) 31 | 32 | @abstractmethod 33 | def dumps(self, value: Any, /) -> Any: 34 | """Serialise the value to be stored in the backend.""" 35 | 36 | @abstractmethod 37 | def loads(self, value: Any, /) -> Any: 38 | """Decode the value retrieved from the backend.""" 39 | 40 | 41 | class NullSerializer(BaseSerializer): 42 | """ 43 | This serializer does nothing. Its only recommended to be used by 44 | :class:`aiocache.SimpleMemoryCache` because for other backends it will 45 | produce incompatible data unless you work only with str types because it 46 | store data as is. 47 | 48 | DISCLAIMER: Be careful with mutable types and memory storage. The following 49 | behavior is considered normal (same as ``functools.lru_cache``):: 50 | 51 | cache = Cache() 52 | my_list = [1] 53 | await cache.set("key", my_list) 54 | my_list.append(2) 55 | await cache.get("key") # Will return [1, 2] 56 | """ 57 | 58 | def dumps(self, value): 59 | """ 60 | Returns the same value 61 | """ 62 | return value 63 | 64 | def loads(self, value): 65 | """ 66 | Returns the same value 67 | """ 68 | return value 69 | 70 | 71 | class StringSerializer(BaseSerializer): 72 | """ 73 | Converts all input values to str. All return values are also str. Be 74 | careful because this means that if you store an ``int(1)``, you will get 75 | back '1'. 76 | 77 | The transformation is done by just casting to str in the ``dumps`` method. 78 | 79 | If you want to keep python types, use ``PickleSerializer``. ``JsonSerializer`` 80 | may also be useful to keep type of simple python types. 81 | """ 82 | 83 | def dumps(self, value): 84 | """ 85 | Serialize the received value casting it to str. 86 | 87 | :param value: obj Anything support cast to str 88 | :returns: str 89 | """ 90 | return str(value) 91 | 92 | def loads(self, value): 93 | """ 94 | Returns value back without transformations 95 | """ 96 | return value 97 | 98 | 99 | class PickleSerializer(BaseSerializer): 100 | """ 101 | Transform data to bytes using pickle.dumps and pickle.loads to retrieve it back. 102 | """ 103 | 104 | DEFAULT_ENCODING = None 105 | 106 | def __init__(self, *args, protocol=pickle.DEFAULT_PROTOCOL, **kwargs): 107 | super().__init__(*args, **kwargs) 108 | self.protocol = protocol 109 | 110 | def dumps(self, value): 111 | """ 112 | Serialize the received value using ``pickle.dumps``. 113 | 114 | :param value: obj 115 | :returns: bytes 116 | """ 117 | return pickle.dumps(value, protocol=self.protocol) 118 | 119 | def loads(self, value): 120 | """ 121 | Deserialize value using ``pickle.loads``. 122 | 123 | :param value: bytes 124 | :returns: obj 125 | """ 126 | if value is None: 127 | return None 128 | return pickle.loads(value) # noqa: S301 129 | 130 | 131 | class JsonSerializer(BaseSerializer): 132 | """ 133 | Transform data to json string with json.dumps and json.loads to retrieve it back. Check 134 | https://docs.python.org/3/library/json.html#py-to-json-table for how types are converted. 135 | 136 | ujson will be used by default if available. Be careful with differences between built in 137 | json module and ujson: 138 | - ujson dumps supports bytes while json doesn't 139 | - ujson and json outputs may differ sometimes 140 | """ 141 | 142 | def dumps(self, value): 143 | """ 144 | Serialize the received value using ``json.dumps``. 145 | 146 | :param value: dict 147 | :returns: str 148 | """ 149 | return json.dumps(value) 150 | 151 | def loads(self, value): 152 | """ 153 | Deserialize value using ``json.loads``. 154 | 155 | :param value: str 156 | :returns: output of ``json.loads``. 157 | """ 158 | if value is None: 159 | return None 160 | return json.loads(value) 161 | 162 | 163 | class MsgPackSerializer(BaseSerializer): 164 | """ 165 | Transform data to bytes using msgpack.dumps and msgpack.loads to retrieve it back. You need 166 | to have ``msgpack`` installed in order to be able to use this serializer. 167 | 168 | :param encoding: str. Can be used to change encoding param for ``msg.loads`` method. 169 | Default is utf-8. 170 | :param use_list: bool. Can be used to change use_list param for ``msgpack.loads`` method. 171 | Default is True. 172 | """ 173 | 174 | def __init__(self, *args, use_list=True, **kwargs): 175 | if not msgpack: 176 | raise RuntimeError("msgpack not installed, MsgPackSerializer unavailable") 177 | self.use_list = use_list 178 | super().__init__(*args, **kwargs) 179 | 180 | def dumps(self, value): 181 | """ 182 | Serialize the received value using ``msgpack.dumps``. 183 | 184 | :param value: obj 185 | :returns: bytes 186 | """ 187 | return msgpack.dumps(value) 188 | 189 | def loads(self, value): 190 | """ 191 | Deserialize value using ``msgpack.loads``. 192 | 193 | :param value: bytes 194 | :returns: obj 195 | """ 196 | raw = False if self.encoding == "utf-8" else True 197 | if value is None: 198 | return None 199 | return msgpack.loads(value, raw=raw, use_list=self.use_list) 200 | -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: '2' 2 | services: 3 | redis: 4 | image: redis 5 | ports: 6 | - "6379:6379" 7 | memcached: 8 | image: memcached 9 | ports: 10 | - "11211:11211" 11 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | PAPER = 8 | BUILDDIR = _build 9 | 10 | # Internal variables. 11 | PAPEROPT_a4 = -D latex_paper_size=a4 12 | PAPEROPT_letter = -D latex_paper_size=letter 13 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 14 | # the i18n builder cannot share the environment and doctrees with the others 15 | I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 16 | 17 | .PHONY: help 18 | help: 19 | @echo "Please use \`make ' where is one of" 20 | @echo " html to make standalone HTML files" 21 | @echo " dirhtml to make HTML files named index.html in directories" 22 | @echo " singlehtml to make a single large HTML file" 23 | @echo " pickle to make pickle files" 24 | @echo " json to make JSON files" 25 | @echo " htmlhelp to make HTML files and a HTML help project" 26 | @echo " qthelp to make HTML files and a qthelp project" 27 | @echo " applehelp to make an Apple Help Book" 28 | @echo " devhelp to make HTML files and a Devhelp project" 29 | @echo " epub to make an epub" 30 | @echo " epub3 to make an epub3" 31 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" 32 | @echo " latexpdf to make LaTeX files and run them through pdflatex" 33 | @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" 34 | @echo " text to make text files" 35 | @echo " man to make manual pages" 36 | @echo " texinfo to make Texinfo files" 37 | @echo " info to make Texinfo files and run them through makeinfo" 38 | @echo " gettext to make PO message catalogs" 39 | @echo " changes to make an overview of all changed/added/deprecated items" 40 | @echo " xml to make Docutils-native XML files" 41 | @echo " pseudoxml to make pseudoxml-XML files for display purposes" 42 | @echo " linkcheck to check all external links for integrity" 43 | @echo " doctest to run all doctests embedded in the documentation (if enabled)" 44 | @echo " coverage to run coverage check of the documentation (if enabled)" 45 | @echo " dummy to check syntax errors of document sources" 46 | 47 | .PHONY: clean 48 | clean: 49 | rm -rf $(BUILDDIR)/* 50 | 51 | .PHONY: html 52 | html: 53 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html 54 | @echo 55 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." 56 | 57 | .PHONY: dirhtml 58 | dirhtml: 59 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml 60 | @echo 61 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." 62 | 63 | .PHONY: singlehtml 64 | singlehtml: 65 | $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml 66 | @echo 67 | @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." 68 | 69 | .PHONY: pickle 70 | pickle: 71 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle 72 | @echo 73 | @echo "Build finished; now you can process the pickle files." 74 | 75 | .PHONY: json 76 | json: 77 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json 78 | @echo 79 | @echo "Build finished; now you can process the JSON files." 80 | 81 | .PHONY: htmlhelp 82 | htmlhelp: 83 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp 84 | @echo 85 | @echo "Build finished; now you can run HTML Help Workshop with the" \ 86 | ".hhp project file in $(BUILDDIR)/htmlhelp." 87 | 88 | .PHONY: qthelp 89 | qthelp: 90 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp 91 | @echo 92 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \ 93 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:" 94 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/aiocache.qhcp" 95 | @echo "To view the help file:" 96 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/aiocache.qhc" 97 | 98 | .PHONY: applehelp 99 | applehelp: 100 | $(SPHINXBUILD) -b applehelp $(ALLSPHINXOPTS) $(BUILDDIR)/applehelp 101 | @echo 102 | @echo "Build finished. The help book is in $(BUILDDIR)/applehelp." 103 | @echo "N.B. You won't be able to view it unless you put it in" \ 104 | "~/Library/Documentation/Help or install it in your application" \ 105 | "bundle." 106 | 107 | .PHONY: devhelp 108 | devhelp: 109 | $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp 110 | @echo 111 | @echo "Build finished." 112 | @echo "To view the help file:" 113 | @echo "# mkdir -p $$HOME/.local/share/devhelp/aiocache" 114 | @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/aiocache" 115 | @echo "# devhelp" 116 | 117 | .PHONY: epub 118 | epub: 119 | $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub 120 | @echo 121 | @echo "Build finished. The epub file is in $(BUILDDIR)/epub." 122 | 123 | .PHONY: epub3 124 | epub3: 125 | $(SPHINXBUILD) -b epub3 $(ALLSPHINXOPTS) $(BUILDDIR)/epub3 126 | @echo 127 | @echo "Build finished. The epub3 file is in $(BUILDDIR)/epub3." 128 | 129 | .PHONY: latex 130 | latex: 131 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 132 | @echo 133 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." 134 | @echo "Run \`make' in that directory to run these through (pdf)latex" \ 135 | "(use \`make latexpdf' here to do that automatically)." 136 | 137 | .PHONY: latexpdf 138 | latexpdf: 139 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 140 | @echo "Running LaTeX files through pdflatex..." 141 | $(MAKE) -C $(BUILDDIR)/latex all-pdf 142 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 143 | 144 | .PHONY: latexpdfja 145 | latexpdfja: 146 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 147 | @echo "Running LaTeX files through platex and dvipdfmx..." 148 | $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja 149 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 150 | 151 | .PHONY: text 152 | text: 153 | $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text 154 | @echo 155 | @echo "Build finished. The text files are in $(BUILDDIR)/text." 156 | 157 | .PHONY: man 158 | man: 159 | $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man 160 | @echo 161 | @echo "Build finished. The manual pages are in $(BUILDDIR)/man." 162 | 163 | .PHONY: texinfo 164 | texinfo: 165 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 166 | @echo 167 | @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." 168 | @echo "Run \`make' in that directory to run these through makeinfo" \ 169 | "(use \`make info' here to do that automatically)." 170 | 171 | .PHONY: info 172 | info: 173 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 174 | @echo "Running Texinfo files through makeinfo..." 175 | make -C $(BUILDDIR)/texinfo info 176 | @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." 177 | 178 | .PHONY: gettext 179 | gettext: 180 | $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale 181 | @echo 182 | @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." 183 | 184 | .PHONY: changes 185 | changes: 186 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes 187 | @echo 188 | @echo "The overview file is in $(BUILDDIR)/changes." 189 | 190 | .PHONY: linkcheck 191 | linkcheck: 192 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck 193 | @echo 194 | @echo "Link check complete; look for any errors in the above output " \ 195 | "or in $(BUILDDIR)/linkcheck/output.txt." 196 | 197 | .PHONY: doctest 198 | doctest: 199 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest 200 | @echo "Testing of doctests in the sources finished, look at the " \ 201 | "results in $(BUILDDIR)/doctest/output.txt." 202 | 203 | .PHONY: coverage 204 | coverage: 205 | $(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage 206 | @echo "Testing of coverage in the sources finished, look at the " \ 207 | "results in $(BUILDDIR)/coverage/python.txt." 208 | 209 | .PHONY: xml 210 | xml: 211 | $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml 212 | @echo 213 | @echo "Build finished. The XML files are in $(BUILDDIR)/xml." 214 | 215 | .PHONY: pseudoxml 216 | pseudoxml: 217 | $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml 218 | @echo 219 | @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." 220 | 221 | .PHONY: dummy 222 | dummy: 223 | $(SPHINXBUILD) -b dummy $(ALLSPHINXOPTS) $(BUILDDIR)/dummy 224 | @echo 225 | @echo "Build finished. Dummy builder generates no files." 226 | -------------------------------------------------------------------------------- /docs/caches.rst: -------------------------------------------------------------------------------- 1 | .. _caches: 2 | 3 | Caches 4 | ====== 5 | 6 | You can use different caches according to your needs. All the caches implement the same interface. 7 | 8 | Caches are always working together with a serializer which transforms data when storing and retrieving from the backend. It may also contain plugins that are able to enrich the behavior of your cache (like adding metrics, logs, etc). 9 | 10 | This is the flow of the ``set`` command: 11 | 12 | .. image:: images/set_operation_flow.png 13 | :align: center 14 | 15 | Let's go with a more specific case. Let's pick Redis as the cache with namespace "test" and PickleSerializer as the serializer: 16 | 17 | #. We receive ``set("key", "value")``. 18 | #. Hook ``pre_set`` of all attached plugins (none by default) is called. 19 | #. "key" will become "test:key" when calling ``build_key``. 20 | #. "value" will become an array of bytes when calling ``serializer.dumps`` because of ``PickleSerializer``. 21 | #. the byte array is stored together with the key using ``set`` cmd in Redis. 22 | #. Hook ``post_set`` of all attached plugins is called. 23 | 24 | By default, all commands are covered by a timeout that will trigger an ``asyncio.TimeoutError`` in case of timeout. Timeout can be set at instance level or when calling the command. 25 | 26 | The supported commands are: 27 | 28 | - add 29 | - get 30 | - set 31 | - multi_get 32 | - multi_set 33 | - delete 34 | - exists 35 | - increment 36 | - expire 37 | - clear 38 | - raw 39 | 40 | If you feel a command is missing here do not hesitate to `open an issue `_ 41 | 42 | 43 | .. _basecache: 44 | 45 | BaseCache 46 | --------- 47 | 48 | .. autoclass:: aiocache.base.BaseCache 49 | :members: 50 | 51 | 52 | .. _rediscache: 53 | 54 | RedisCache 55 | ---------- 56 | 57 | .. autoclass:: aiocache.backends.redis.RedisCache 58 | :members: 59 | 60 | 61 | .. _simplememorycache: 62 | 63 | SimpleMemoryCache 64 | ----------------- 65 | 66 | .. autoclass:: aiocache.SimpleMemoryCache 67 | :members: 68 | 69 | 70 | .. _memcachedcache: 71 | 72 | MemcachedCache 73 | -------------- 74 | 75 | .. autoclass:: aiocache.backends.memcached.MemcachedCache 76 | :members: 77 | 78 | 79 | .. _dynamodbcache: 80 | 81 | Third-party caches 82 | ================== 83 | 84 | Additional cache backends are available through other libraries. 85 | 86 | DynamoDBCache 87 | ------------- 88 | 89 | `aiocache-dynamodb `_ provides support for DynamoDB. 90 | 91 | .. autoclass:: aiocache_dynamodb.DynamoDBCache 92 | :members: 93 | -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | # 4 | # aiocache documentation build configuration file, created by 5 | # sphinx-quickstart on Sat Oct 1 16:53:45 2016. 6 | # 7 | # This file is execfile()d with the current directory set to its 8 | # containing dir. 9 | # 10 | # Note that not all possible configuration values are present in this 11 | # autogenerated file. 12 | # 13 | # All configuration values have a default; values that are commented out 14 | # serve to show the default. 15 | 16 | # If extensions (or modules to document with autodoc) are in another directory, 17 | # add these directories to sys.path here. If the directory is relative to the 18 | # documentation root, use os.path.abspath to make it absolute, like shown here. 19 | # 20 | import re 21 | import os 22 | import sys 23 | from pathlib import Path 24 | 25 | sys.path.insert(0, os.path.abspath("..")) 26 | sys.path.insert(0, os.path.abspath(".")) 27 | 28 | # -- General configuration ------------------------------------------------ 29 | 30 | # If your documentation needs a minimal Sphinx version, state it here. 31 | # 32 | # needs_sphinx = '1.0' 33 | 34 | # Add any Sphinx extension module names here, as strings. They can be 35 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 36 | # ones. 37 | extensions = [ 38 | "sphinx.ext.autodoc", 39 | "sphinx.ext.viewcode", 40 | ] 41 | 42 | # Add any paths that contain templates here, relative to this directory. 43 | templates_path = ["_templates"] 44 | 45 | # The suffix(es) of source filenames. 46 | # You can specify multiple suffix as a list of string: 47 | # 48 | # source_suffix = ['.rst', '.md'] 49 | source_suffix = ".rst" 50 | 51 | # The encoding of source files. 52 | # 53 | # source_encoding = 'utf-8-sig' 54 | 55 | # The master toctree document. 56 | master_doc = "index" 57 | 58 | # General information about the project. 59 | project = "aiocache" 60 | copyright = "2016, Manuel Miranda" 61 | author = "Manuel Miranda" 62 | 63 | # The version info for the project you're documenting, acts as replacement for 64 | # |version| and |release|, also used in various other places throughout the 65 | # built documents. 66 | # 67 | 68 | _path = Path(__file__).parent.parent / "aiocache/__init__.py" 69 | try: 70 | version = re.findall(r'__version__ = "(.+?)"', _path.read_text())[0] 71 | release = version 72 | except IndexError: 73 | raise RuntimeError("Unable to determine version.") 74 | 75 | # The language for content autogenerated by Sphinx. Refer to documentation 76 | # for a list of supported languages. 77 | # 78 | # This is also used if you do content translation via gettext catalogs. 79 | # Usually you set "language" from the command line for these cases. 80 | language = None 81 | 82 | # There are two options for replacing |today|: either, you set today to some 83 | # non-false value, then it is used: 84 | # 85 | # today = '' 86 | # 87 | # Else, today_fmt is used as the format for a strftime call. 88 | # 89 | # today_fmt = '%B %d, %Y' 90 | 91 | # List of patterns, relative to source directory, that match files and 92 | # directories to ignore when looking for source files. 93 | # This patterns also effect to html_static_path and html_extra_path 94 | exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"] 95 | 96 | # The reST default role (used for this markup: `text`) to use for all 97 | # documents. 98 | # 99 | # default_role = None 100 | 101 | # If true, '()' will be appended to :func: etc. cross-reference text. 102 | # 103 | # add_function_parentheses = True 104 | 105 | # If true, the current module name will be prepended to all description 106 | # unit titles (such as .. function::). 107 | # 108 | # add_module_names = True 109 | 110 | # If true, sectionauthor and moduleauthor directives will be shown in the 111 | # output. They are ignored by default. 112 | # 113 | # show_authors = False 114 | 115 | # The name of the Pygments (syntax highlighting) style to use. 116 | pygments_style = "sphinx" 117 | 118 | # A list of ignored prefixes for module index sorting. 119 | # modindex_common_prefix = [] 120 | 121 | # If true, keep warnings as "system message" paragraphs in the built documents. 122 | # keep_warnings = False 123 | 124 | # If true, `todo` and `todoList` produce output, else they produce nothing. 125 | todo_include_todos = False 126 | 127 | 128 | # -- Options for HTML output ---------------------------------------------- 129 | 130 | # The theme to use for HTML and HTML Help pages. See the documentation for 131 | # a list of builtin themes. 132 | # 133 | html_theme = "default" 134 | on_rtd = os.environ.get("READTHEDOCS", None) == "True" 135 | if not on_rtd: 136 | import sphinx_rtd_theme 137 | html_theme = "sphinx_rtd_theme" 138 | html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] 139 | 140 | # Theme options are theme-specific and customize the look and feel of a theme 141 | # further. For a list of options available for each theme, see the 142 | # documentation. 143 | # 144 | # html_theme_options = {} 145 | 146 | # Add any paths that contain custom themes here, relative to this directory. 147 | # html_theme_path = [] 148 | 149 | # The name for this set of Sphinx documents. 150 | # " v documentation" by default. 151 | # 152 | # html_title = 'aiocache v0.0.1' 153 | 154 | # A shorter title for the navigation bar. Default is the same as html_title. 155 | # 156 | # html_short_title = None 157 | 158 | # The name of an image file (relative to this directory) to place at the top 159 | # of the sidebar. 160 | # 161 | # html_logo = None 162 | 163 | # The name of an image file (relative to this directory) to use as a favicon of 164 | # the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 165 | # pixels large. 166 | # 167 | # html_favicon = None 168 | 169 | # Add any paths that contain custom static files (such as style sheets) here, 170 | # relative to this directory. They are copied after the builtin static files, 171 | # so a file named "default.css" will overwrite the builtin "default.css". 172 | html_static_path = ["_static"] 173 | 174 | # Add any extra paths that contain custom files (such as robots.txt or 175 | # .htaccess) here, relative to this directory. These files are copied 176 | # directly to the root of the documentation. 177 | # 178 | # html_extra_path = [] 179 | 180 | # If not None, a 'Last updated on:' timestamp is inserted at every page 181 | # bottom, using the given strftime format. 182 | # The empty string is equivalent to '%b %d, %Y'. 183 | # 184 | # html_last_updated_fmt = None 185 | 186 | # If true, SmartyPants will be used to convert quotes and dashes to 187 | # typographically correct entities. 188 | # 189 | # html_use_smartypants = True 190 | 191 | # Custom sidebar templates, maps document names to template names. 192 | # 193 | # html_sidebars = {} 194 | 195 | # Additional templates that should be rendered to pages, maps page names to 196 | # template names. 197 | # 198 | # html_additional_pages = {} 199 | 200 | # If false, no module index is generated. 201 | # 202 | # html_domain_indices = True 203 | 204 | # If false, no index is generated. 205 | # 206 | # html_use_index = True 207 | 208 | # If true, the index is split into individual pages for each letter. 209 | # 210 | # html_split_index = False 211 | 212 | # If true, links to the reST sources are added to the pages. 213 | # 214 | # html_show_sourcelink = True 215 | 216 | # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. 217 | # 218 | # html_show_sphinx = True 219 | 220 | # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. 221 | # 222 | # html_show_copyright = True 223 | 224 | # If true, an OpenSearch description file will be output, and all pages will 225 | # contain a tag referring to it. The value of this option must be the 226 | # base URL from which the finished HTML is served. 227 | # 228 | # html_use_opensearch = '' 229 | 230 | # This is the file name suffix for HTML files (e.g. ".xhtml"). 231 | # html_file_suffix = None 232 | 233 | # Language to be used for generating the HTML full-text search index. 234 | # Sphinx supports the following languages: 235 | # 'da', 'de', 'en', 'es', 'fi', 'fr', 'h', 'it', 'ja' 236 | # 'nl', 'no', 'pt', 'ro', 'r', 'sv', 'tr', 'zh' 237 | # 238 | # html_search_language = 'en' 239 | 240 | # A dictionary with options for the search language support, empty by default. 241 | # 'ja' uses this config value. 242 | # 'zh' user can custom change `jieba` dictionary path. 243 | # 244 | # html_search_options = {'type': 'default'} 245 | 246 | # The name of a javascript file (relative to the configuration directory) that 247 | # implements a search results scorer. If empty, the default will be used. 248 | # 249 | # html_search_scorer = 'scorer.js' 250 | 251 | # Output file base name for HTML help builder. 252 | htmlhelp_basename = "aiocachedoc" 253 | 254 | # -- Options for LaTeX output --------------------------------------------- 255 | 256 | latex_elements = { 257 | # The paper size ('letterpaper' or 'a4paper'). 258 | # 259 | # 'papersize': 'letterpaper', 260 | 261 | # The font size ('10pt', '11pt' or '12pt'). 262 | # 263 | # 'pointsize': '10pt', 264 | 265 | # Additional stuff for the LaTeX preamble. 266 | # 267 | # 'preamble': '', 268 | 269 | # Latex figure (float) alignment 270 | # 271 | # 'figure_align': 'htbp', 272 | } 273 | 274 | # Grouping the document tree into LaTeX files. List of tuples 275 | # (source start file, target name, title, 276 | # author, documentclass [howto, manual, or own class]). 277 | latex_documents = [ 278 | (master_doc, "aiocache.tex", "aiocache Documentation", "Manuel Miranda", "manual"), 279 | ] 280 | 281 | # The name of an image file (relative to this directory) to place at the top of 282 | # the title page. 283 | # 284 | # latex_logo = None 285 | 286 | # For "manual" documents, if this is true, then toplevel headings are parts, 287 | # not chapters. 288 | # 289 | # latex_use_parts = False 290 | 291 | # If true, show page references after internal links. 292 | # 293 | # latex_show_pagerefs = False 294 | 295 | # If true, show URL addresses after external links. 296 | # 297 | # latex_show_urls = False 298 | 299 | # Documents to append as an appendix to all manuals. 300 | # 301 | # latex_appendices = [] 302 | 303 | # It false, will not define \strong, \code, itleref, \crossref ... but only 304 | # \sphinxstrong, ..., \sphinxtitleref, ... To help avoid clash with user added 305 | # packages. 306 | # 307 | # latex_keep_old_macro_names = True 308 | 309 | # If false, no module index is generated. 310 | # 311 | # latex_domain_indices = True 312 | 313 | 314 | # -- Options for manual page output --------------------------------------- 315 | 316 | # One entry per manual page. List of tuples 317 | # (source start file, name, description, authors, manual section). 318 | man_pages = [(master_doc, "aiocache", "aiocache Documentation", [author], 1)] 319 | 320 | # If true, show URL addresses after external links. 321 | # 322 | # man_show_urls = False 323 | 324 | 325 | # -- Options for Texinfo output ------------------------------------------- 326 | 327 | # Grouping the document tree into Texinfo files. List of tuples 328 | # (source start file, target name, title, author, 329 | # dir menu entry, description, category) 330 | texinfo_documents = [ 331 | ( 332 | master_doc, 333 | "aiocache", 334 | "aiocache Documentation", 335 | author, 336 | "aiocache", 337 | "One line description of project.", 338 | "Miscellaneous" 339 | ), 340 | ] 341 | 342 | # Documents to append as an appendix to all manuals. 343 | # 344 | # texinfo_appendices = [] 345 | 346 | # If false, no module index is generated. 347 | # 348 | # texinfo_domain_indices = True 349 | 350 | # How to display URL addresses: 'footnote', 'no', or 'inline'. 351 | # 352 | # texinfo_show_urls = 'footnote' 353 | 354 | # If true, do not generate a @detailmenu in the "Top" node's menu. 355 | # 356 | # texinfo_no_detailmenu = False 357 | -------------------------------------------------------------------------------- /docs/decorators.rst: -------------------------------------------------------------------------------- 1 | .. _decorators: 2 | 3 | Decorators 4 | ========== 5 | 6 | aiocache comes with a couple of decorators for caching results from asynchronous functions. Do not use the decorator in synchronous functions, it may lead to unexpected behavior. 7 | 8 | .. _cached: 9 | 10 | cached 11 | ------ 12 | 13 | .. automodule:: aiocache 14 | :members: cached 15 | 16 | .. literalinclude:: ../examples/cached_decorator.py 17 | :language: python 18 | :linenos: 19 | 20 | .. _multi_cached: 21 | 22 | multi_cached 23 | ------------ 24 | 25 | .. automodule:: aiocache 26 | :members: multi_cached 27 | 28 | .. literalinclude:: ../examples/multicached_decorator.py 29 | :language: python 30 | :linenos: 31 | -------------------------------------------------------------------------------- /docs/images/architecture.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aio-libs/aiocache/28b3af84086e53e6a4ee5196770660a8f797fb3d/docs/images/architecture.png -------------------------------------------------------------------------------- /docs/images/set_operation_flow.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aio-libs/aiocache/28b3af84086e53e6a4ee5196770660a8f797fb3d/docs/images/set_operation_flow.png -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | .. aiocache documentation master file, created by 2 | sphinx-quickstart on Sat Oct 1 16:53:45 2016. 3 | You can adapt this file completely to your liking, but it should at least 4 | contain the root `toctree` directive. 5 | 6 | Welcome to aiocache's documentation! 7 | ==================================== 8 | 9 | 10 | Installing 11 | ---------- 12 | 13 | - ``pip install aiocache`` 14 | - ``pip install aiocache[redis]`` 15 | - ``pip install aiocache[memcached]`` 16 | - ``pip install aiocache[redis,memcached]`` 17 | 18 | 19 | Usage 20 | ----- 21 | 22 | Using a cache is as simple as 23 | 24 | .. code-block:: python 25 | 26 | >>> import asyncio 27 | >>> from aiocache import SimpleMemoryCache 28 | >>> cache = SimpleMemoryCache() 29 | >>> with asyncio.Runner() as runner: 30 | >>> runner.run(cache.set("key", "value")) 31 | True 32 | >>> runner.run(cache.get("key")) 33 | 'value' 34 | 35 | Here we are using the :ref:`simplememorycache` but you can use any other supported backends as listed in :ref:`caches`. 36 | All caches contain the same minimum interface which consists of the following functions: 37 | 38 | - ``add``: Only adds key/value if key does not exist. Otherwise raises ValueError. 39 | - ``get``: Retrieve value identified by key. 40 | - ``set``: Sets key/value. 41 | - ``multi_get``: Retrieves multiple key/values. 42 | - ``multi_set``: Sets multiple key/values. 43 | - ``exists``: Returns True if key exists False otherwise. 44 | - ``increment``: Increment the value stored in the given key. 45 | - ``delete``: Deletes key and returns number of deleted items. 46 | - ``clear``: Clears the items stored. 47 | - ``raw``: Executes the specified command using the underlying client. 48 | 49 | See the `examples folder `_ for different use cases: 50 | 51 | - `Sanic, Aiohttp and Tornado `_ 52 | - `Python object in Redis `_ 53 | - `Custom serializer for compressing data `_ 54 | - `TimingPlugin and HitMissRatioPlugin demos `_ 55 | - `Using marshmallow as a serializer `_ 56 | - `Using cached decorator `_. 57 | - `Using multi_cached decorator `_. 58 | 59 | 60 | Contents 61 | -------- 62 | 63 | .. toctree:: 64 | 65 | caches 66 | serializers 67 | plugins 68 | decorators 69 | locking 70 | testing 71 | v1_migration 72 | 73 | Indices and tables 74 | ================== 75 | 76 | * :ref:`genindex` 77 | * :ref:`modindex` 78 | * :ref:`search` 79 | -------------------------------------------------------------------------------- /docs/locking.rst: -------------------------------------------------------------------------------- 1 | .. _locking: 2 | 3 | .. WARNING:: 4 | This was added in version 0.7.0 and the API is new. This means its open to breaking changes in future versions until the API is considered stable. 5 | 6 | 7 | Locking 8 | ======= 9 | 10 | 11 | .. WARNING:: 12 | The implementations provided are **NOT** intented for consistency/synchronization purposes. If you need a locking mechanism focused on consistency, consider implementing your mechanism based on more serious tools like https://zookeeper.apache.org/. 13 | 14 | 15 | There are a couple of locking implementations than can help you to protect against different scenarios: 16 | 17 | 18 | .. _redlock: 19 | 20 | RedLock 21 | ------- 22 | 23 | .. autoclass:: aiocache.lock.RedLock 24 | :members: 25 | 26 | 27 | .. _optimisticlock: 28 | 29 | OptimisticLock 30 | -------------- 31 | 32 | .. autoclass:: aiocache.lock.OptimisticLock 33 | :members: 34 | -------------------------------------------------------------------------------- /docs/plugins.rst: -------------------------------------------------------------------------------- 1 | .. _plugins: 2 | 3 | Plugins 4 | ======= 5 | 6 | Plugins can be used to enrich the behavior of the cache. By default all caches are configured without any plugin but can add new ones in the constructor or after initializing the cache class:: 7 | 8 | >>> from aiocache import SimpleMemoryCache 9 | >>> from aiocache.plugins import TimingPlugin 10 | cache = SimpleMemoryCache(plugins=[HitMissRatioPlugin()]) 11 | cache.plugins += [TimingPlugin()] 12 | 13 | You can define your custom plugin by inheriting from `BasePlugin`_ and overriding the needed methods (the overrides NEED to be async). All commands have ``pre_`` and ``post_`` hooks. 14 | 15 | .. WARNING:: 16 | Both pre and post hooks are executed awaiting the coroutine. If you perform expensive operations with the hooks, you will add more latency to the command being executed and thus, there are more probabilities of raising a timeout error. If a timeout error is raised, be aware that previous actions **won't be rolled back**. 17 | 18 | A complete example of using plugins: 19 | 20 | .. literalinclude:: ../examples/plugins.py 21 | :language: python 22 | :linenos: 23 | 24 | 25 | .. _baseplugin: 26 | 27 | BasePlugin 28 | ---------- 29 | 30 | .. autoclass:: aiocache.plugins.BasePlugin 31 | :members: 32 | :undoc-members: 33 | 34 | .. _timingplugin: 35 | 36 | TimingPlugin 37 | ------------ 38 | 39 | .. autoclass:: aiocache.plugins.TimingPlugin 40 | :members: 41 | :undoc-members: 42 | 43 | .. _hitmissratioplugin: 44 | 45 | HitMissRatioPlugin 46 | ------------------ 47 | 48 | .. autoclass:: aiocache.plugins.HitMissRatioPlugin 49 | :members: 50 | :undoc-members: 51 | -------------------------------------------------------------------------------- /docs/readthedocs.yml: -------------------------------------------------------------------------------- 1 | formats: 2 | - none 3 | 4 | build: 5 | image: latest 6 | 7 | python: 8 | version: 3.11 9 | pip_install: true 10 | extra_requirements: 11 | - redis 12 | - memcached 13 | - msgpack 14 | -------------------------------------------------------------------------------- /docs/serializers.rst: -------------------------------------------------------------------------------- 1 | .. _serializers: 2 | 3 | Serializers 4 | =========== 5 | 6 | Serializers can be attached to backends in order to serialize/deserialize data sent and retrieved from the backend. This allows to apply transformations to data in case you want it to be saved in a specific format in your cache backend. For example, imagine you have your ``Model`` and want to serialize it to something that Redis can understand (Redis can't store python objects). This is the task of a serializer. 7 | 8 | To use a specific serializer:: 9 | 10 | >>> from aiocache import SimpleMemoryCache 11 | >>> from aiocache.serializers import PickleSerializer 12 | cache = SimpleMemoryCache(serializer=PickleSerializer()) 13 | 14 | Currently the following are built in: 15 | 16 | 17 | .. _nullserializer: 18 | 19 | NullSerializer 20 | -------------- 21 | .. autoclass:: aiocache.serializers.NullSerializer 22 | :members: 23 | 24 | 25 | .. _stringserializer: 26 | 27 | StringSerializer 28 | ---------------- 29 | 30 | .. autoclass:: aiocache.serializers.StringSerializer 31 | :members: 32 | 33 | .. _pickleserializer: 34 | 35 | PickleSerializer 36 | ---------------- 37 | 38 | .. autoclass:: aiocache.serializers.PickleSerializer 39 | :members: 40 | 41 | .. _jsonserializer: 42 | 43 | JsonSerializer 44 | -------------- 45 | 46 | .. autoclass:: aiocache.serializers.JsonSerializer 47 | :members: 48 | 49 | .. _msgpackserializer: 50 | 51 | MsgPackSerializer 52 | ----------------- 53 | 54 | .. autoclass:: aiocache.serializers.MsgPackSerializer 55 | :members: 56 | 57 | In case the current serializers are not covering your needs, you can always define your custom serializer as shown in ``examples/serializer_class.py``: 58 | 59 | .. literalinclude:: ../examples/serializer_class.py 60 | :language: python 61 | :linenos: 62 | 63 | You can also use marshmallow as your serializer (``examples/marshmallow_serializer_class.py``): 64 | 65 | .. literalinclude:: ../examples/marshmallow_serializer_class.py 66 | :language: python 67 | :linenos: 68 | 69 | By default cache backends assume they are working with ``str`` types. If your custom implementation transform data to bytes, you will need to set the class attribute ``encoding`` to ``None``. 70 | -------------------------------------------------------------------------------- /docs/testing.rst: -------------------------------------------------------------------------------- 1 | Testing 2 | ======= 3 | 4 | It's really easy to cut the dependency with aiocache functionality: 5 | 6 | .. literalinclude:: ../examples/testing.py 7 | 8 | Note that we are passing the :ref:`basecache` as the spec for the Mock. 9 | 10 | Also, for debuging purposes you can use `AIOCACHE_DISABLE = 1 python myscript.py` to disable caching. 11 | -------------------------------------------------------------------------------- /docs/v1_migration.rst: -------------------------------------------------------------------------------- 1 | .. _v1_migration: 2 | 3 | Migrating from v0.x to v1 4 | ====== 5 | 6 | The v1 release of aiocache is a major release that introduces several breaking changes. 7 | 8 | Changes to Cache Instantiation 9 | --------- 10 | 11 | The abstraction and factories around cache instantiation have been removed in favor of a more direct approach. 12 | 13 | * The `aiocache.Cache` class has been removed. Instead, use the specific cache class directly. For example, use `aiocache.RedisCache` instead of `aiocache.Cache.REDIS`. 14 | * Caches should be fully instantiated when passed to decorators, rather than being instantiated with a factory function. 15 | * Cache aliases have been removed. Create an instance of the cache class directly instead. 16 | -------------------------------------------------------------------------------- /examples/cached_decorator.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | from collections import namedtuple 4 | import redis.asyncio as redis 5 | 6 | from aiocache import cached 7 | from aiocache import RedisCache 8 | from aiocache.serializers import PickleSerializer 9 | 10 | Result = namedtuple('Result', "content, status") 11 | 12 | cache = RedisCache(namespace="main", client=redis.Redis(), serializer=PickleSerializer()) 13 | 14 | 15 | @cached(cache, ttl=10, key_builder=lambda *args, **kw: "key") 16 | async def cached_call(): 17 | return Result("content", 200) 18 | 19 | 20 | async def test_cached(): 21 | async with cache: 22 | await cached_call() 23 | exists = await cache.exists("key") 24 | assert exists is True 25 | await cache.delete("key") 26 | 27 | 28 | if __name__ == "__main__": 29 | asyncio.run(test_cached()) 30 | -------------------------------------------------------------------------------- /examples/frameworks/aiohttp_example.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import logging 3 | from datetime import datetime 4 | from aiohttp import web 5 | from aiocache import cached, SimpleMemoryCache 6 | from aiocache.serializers import JsonSerializer 7 | 8 | cache = SimpleMemoryCache(serializer=JsonSerializer()) 9 | 10 | 11 | @cached(cache, key_builder=lambda x: "time") 12 | async def time(): 13 | return {"time": datetime.now().isoformat()} 14 | 15 | 16 | async def handle(request): 17 | return web.json_response(await time()) 18 | 19 | 20 | # It is also possible to cache the whole route, but for this you will need to 21 | # override `cached.get_from_cache` and regenerate the response since aiohttp 22 | # forbids reusing responses 23 | class CachedOverride(cached): 24 | def __init__(self, *args, **kwargs): 25 | super().__init__(*args, **kwargs) 26 | 27 | async def get_from_cache(self, key): 28 | try: 29 | value = await self.cache.get(key) 30 | if type(value) is web.Response: 31 | return web.Response( 32 | body=value.body, 33 | status=value.status, 34 | reason=value.reason, 35 | headers=value.headers, 36 | ) 37 | return value 38 | except Exception: 39 | logging.exception("Couldn't retrieve %s, unexpected error", key) 40 | return None 41 | 42 | 43 | @CachedOverride(cache, key_builder="route") 44 | async def handle2(request): 45 | return web.json_response(await asyncio.sleep(3)) 46 | 47 | 48 | if __name__ == "__main__": 49 | app = web.Application() 50 | app.router.add_get('/handle', handle) 51 | app.router.add_get('/handle2', handle2) 52 | 53 | web.run_app(app) 54 | -------------------------------------------------------------------------------- /examples/frameworks/sanic_example.py: -------------------------------------------------------------------------------- 1 | """ 2 | Example of caching using aiocache package: 3 | 4 | /: Does a 3 seconds sleep. Only the first time because its using the `cached` decorator 5 | /reuse: Returns the data stored in "main" endpoint 6 | """ 7 | 8 | import asyncio 9 | 10 | from sanic import Sanic 11 | from sanic.response import json 12 | from sanic.log import logger 13 | from aiocache import cached, SimpleMemoryCache 14 | from aiocache.serializers import JsonSerializer 15 | 16 | app = Sanic(__name__) 17 | 18 | 19 | @cached(SimpleMemoryCache(), key_builder=lambda x: "my_custom_key") 20 | async def expensive_call(): 21 | logger.info("Expensive has been called") 22 | await asyncio.sleep(3) 23 | return {"test": True} 24 | 25 | 26 | async def reuse_data(): 27 | cache = SimpleMemoryCache(serializer=JsonSerializer()) # Not ideal to define here 28 | data = await cache.get("my_custom_key") # Note the key is defined in `cached` decorator 29 | return data 30 | 31 | 32 | @app.route("/") 33 | async def main(request): 34 | logger.info("Received GET /") 35 | return json(await expensive_call()) 36 | 37 | 38 | @app.route("/reuse") 39 | async def reuse(request): 40 | logger.info("Received GET /reuse") 41 | return json(await reuse_data()) 42 | 43 | 44 | app.run(host="0.0.0.0", port=8000) 45 | -------------------------------------------------------------------------------- /examples/frameworks/tornado_example.py: -------------------------------------------------------------------------------- 1 | import tornado.web 2 | import tornado.ioloop 3 | from datetime import datetime 4 | from aiocache import cached, SimpleMemoryCache 5 | from aiocache.serializers import JsonSerializer 6 | 7 | 8 | class MainHandler(tornado.web.RequestHandler): 9 | 10 | # Due some incompatibilities between tornado and asyncio, caches can't use the "ttl" feature 11 | # in order to make it work, you will have to specify it always to 0 12 | @cached(SimpleMemoryCache(serializer=JsonSerializer, timeout=0), key_builder=lambda x: "my_custom_key") 13 | async def time(self): 14 | return {"time": datetime.now().isoformat()} 15 | 16 | async def get(self): 17 | self.write(await self.time()) 18 | 19 | 20 | if __name__ == "__main__": 21 | tornado.ioloop.IOLoop.configure('tornado.platform.asyncio.AsyncIOLoop') 22 | app = tornado.web.Application([(r"/", MainHandler)]) 23 | app.listen(8888) 24 | tornado.ioloop.IOLoop.current().start() 25 | -------------------------------------------------------------------------------- /examples/marshmallow_serializer_class.py: -------------------------------------------------------------------------------- 1 | import random 2 | import string 3 | import asyncio 4 | from typing import Any 5 | 6 | from marshmallow import fields, Schema, post_load 7 | 8 | from aiocache import SimpleMemoryCache 9 | from aiocache.serializers import BaseSerializer 10 | 11 | 12 | class RandomModel: 13 | MY_CONSTANT = "CONSTANT" 14 | 15 | def __init__(self, int_type=None, str_type=None, dict_type=None, list_type=None): 16 | self.int_type = int_type or random.randint(1, 10) 17 | self.str_type = str_type or random.choice(string.ascii_lowercase) 18 | self.dict_type = dict_type or {} 19 | self.list_type = list_type or [] 20 | 21 | def __eq__(self, obj): 22 | return self.__dict__ == obj.__dict__ 23 | 24 | 25 | class RandomSchema(Schema): 26 | int_type = fields.Integer() 27 | str_type = fields.String() 28 | dict_type = fields.Dict() 29 | list_type = fields.List(fields.Integer()) 30 | 31 | @post_load 32 | def build_my_type(self, data, **kwargs): 33 | return RandomModel(**data) 34 | 35 | class Meta: 36 | strict = True 37 | 38 | 39 | class MarshmallowSerializer(BaseSerializer): 40 | def __init__(self, *args: Any, **kwargs: Any): 41 | super().__init__(*args, **kwargs) 42 | self.schema = RandomSchema() 43 | 44 | def dumps(self, value: Any) -> str: 45 | return self.schema.dumps(value) 46 | 47 | def loads(self, value: str) -> Any: 48 | return self.schema.loads(value) 49 | 50 | 51 | cache = SimpleMemoryCache(serializer=MarshmallowSerializer(), namespace="main") 52 | 53 | 54 | async def serializer(): 55 | model = RandomModel() 56 | await cache.set("key", model) 57 | 58 | result = await cache.get("key") 59 | 60 | assert result.int_type == model.int_type 61 | assert result.str_type == model.str_type 62 | assert result.dict_type == model.dict_type 63 | assert result.list_type == model.list_type 64 | 65 | 66 | async def test_serializer(): 67 | await serializer() 68 | await cache.delete("key") 69 | 70 | 71 | if __name__ == "__main__": 72 | asyncio.run(test_serializer()) 73 | -------------------------------------------------------------------------------- /examples/multicached_decorator.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | import redis.asyncio as redis 4 | 5 | from aiocache import multi_cached 6 | from aiocache import RedisCache 7 | 8 | DICT = { 9 | 'a': "Z", 10 | 'b': "Y", 11 | 'c': "X", 12 | 'd': "W" 13 | } 14 | 15 | cache = RedisCache(namespace="main", client=redis.Redis()) 16 | 17 | 18 | @multi_cached(cache, keys_from_attr="ids") 19 | async def multi_cached_ids(ids=None): 20 | return {id_: DICT[id_] for id_ in ids} 21 | 22 | 23 | @multi_cached(cache, keys_from_attr="keys") 24 | async def multi_cached_keys(keys=None): 25 | return {id_: DICT[id_] for id_ in keys} 26 | 27 | 28 | async def test_multi_cached(): 29 | await multi_cached_ids(ids=("a", "b")) 30 | await multi_cached_ids(ids=("a", "c")) 31 | await multi_cached_keys(keys=("d",)) 32 | 33 | assert await cache.exists("a") 34 | assert await cache.exists("b") 35 | assert await cache.exists("c") 36 | assert await cache.exists("d") 37 | 38 | await cache.delete("a") 39 | await cache.delete("b") 40 | await cache.delete("c") 41 | await cache.delete("d") 42 | await cache.close() 43 | 44 | 45 | if __name__ == "__main__": 46 | asyncio.run(test_multi_cached()) 47 | -------------------------------------------------------------------------------- /examples/optimistic_lock.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import logging 3 | import random 4 | 5 | import redis.asyncio as redis 6 | from aiocache import RedisCache 7 | from aiocache.lock import OptimisticLock, OptimisticLockError 8 | 9 | logger = logging.getLogger(__name__) 10 | cache = RedisCache(namespace="main", client=redis.Redis()) 11 | 12 | 13 | async def expensive_function(): 14 | logger.warning('Expensive is being executed...') 15 | await asyncio.sleep(random.uniform(0, 2)) 16 | return 'result' 17 | 18 | 19 | async def my_view(): 20 | 21 | async with OptimisticLock(cache, 'key') as lock: 22 | result = await expensive_function() 23 | try: 24 | await lock.cas(result) 25 | except OptimisticLockError: 26 | logger.warning( 27 | 'I failed setting the value because it is different since the lock started!') 28 | return result 29 | 30 | 31 | async def concurrent(): 32 | await cache.set('key', 'initial_value') 33 | # All three calls will read 'initial_value' as the value to check and only 34 | # the first one finishing will succeed because the others, when trying to set 35 | # the value, will see that the value is not the same as when the lock started 36 | await asyncio.gather(my_view(), my_view(), my_view()) 37 | 38 | 39 | async def test_redis(): 40 | await concurrent() 41 | await cache.delete("key") 42 | await cache.close() 43 | 44 | 45 | if __name__ == '__main__': 46 | asyncio.run(test_redis()) 47 | -------------------------------------------------------------------------------- /examples/plugins.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import random 3 | import logging 4 | 5 | from aiocache import SimpleMemoryCache 6 | from aiocache.plugins import HitMissRatioPlugin, TimingPlugin, BasePlugin 7 | 8 | 9 | logger = logging.getLogger(__name__) 10 | 11 | 12 | class MyCustomPlugin(BasePlugin): 13 | 14 | async def pre_set(self, *args, **kwargs): 15 | logger.info("I'm the pre_set hook being called with %s %s" % (args, kwargs)) 16 | 17 | async def post_set(self, *args, **kwargs): 18 | logger.info("I'm the post_set hook being called with %s %s" % (args, kwargs)) 19 | 20 | 21 | cache = SimpleMemoryCache( 22 | plugins=[HitMissRatioPlugin(), TimingPlugin(), MyCustomPlugin()], 23 | namespace="main") 24 | 25 | 26 | async def run(): 27 | await cache.set("a", "1") 28 | await cache.set("b", "2") 29 | await cache.set("c", "3") 30 | await cache.set("d", "4") 31 | 32 | possible_keys = ["a", "b", "c", "d", "e", "f"] 33 | 34 | for t in range(1000): 35 | await cache.get(random.choice(possible_keys)) 36 | 37 | assert cache.hit_miss_ratio["hit_ratio"] > 0.5 38 | assert cache.hit_miss_ratio["total"] == 1000 39 | 40 | assert cache.profiling["get_min"] > 0 41 | assert cache.profiling["set_min"] > 0 42 | assert cache.profiling["get_max"] > 0 43 | assert cache.profiling["set_max"] > 0 44 | 45 | print(cache.hit_miss_ratio) 46 | print(cache.profiling) 47 | 48 | 49 | async def test_run(): 50 | await run() 51 | await cache.delete("a") 52 | await cache.delete("b") 53 | await cache.delete("c") 54 | await cache.delete("d") 55 | 56 | 57 | if __name__ == "__main__": 58 | asyncio.run(test_run()) 59 | -------------------------------------------------------------------------------- /examples/python_object.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | from collections import namedtuple 4 | import redis.asyncio as redis 5 | 6 | 7 | from aiocache import RedisCache 8 | from aiocache.serializers import PickleSerializer 9 | 10 | MyObject = namedtuple("MyObject", ["x", "y"]) 11 | cache = RedisCache(serializer=PickleSerializer(), namespace="main", client=redis.Redis()) 12 | 13 | 14 | async def complex_object(): 15 | obj = MyObject(x=1, y=2) 16 | await cache.set("key", obj) 17 | my_object = await cache.get("key") 18 | 19 | assert my_object.x == 1 20 | assert my_object.y == 2 21 | 22 | 23 | async def test_python_object(): 24 | await complex_object() 25 | await cache.delete("key") 26 | await cache.close() 27 | 28 | 29 | if __name__ == "__main__": 30 | asyncio.run(test_python_object()) 31 | -------------------------------------------------------------------------------- /examples/redlock.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import logging 3 | 4 | import redis.asyncio as redis 5 | 6 | from aiocache import RedisCache 7 | from aiocache.lock import RedLock 8 | 9 | logger = logging.getLogger(__name__) 10 | cache = RedisCache(namespace="main", client=redis.Redis()) 11 | 12 | 13 | async def expensive_function(): 14 | logger.warning('Expensive is being executed...') 15 | await asyncio.sleep(1) 16 | return 'result' 17 | 18 | 19 | async def my_view(): 20 | 21 | async with RedLock(cache, 'key', lease=2): # Wait at most 2 seconds 22 | result = await cache.get('key') 23 | if result is not None: 24 | logger.info('Found the value in the cache hurray!') 25 | return result 26 | 27 | result = await expensive_function() 28 | await cache.set('key', result) 29 | return result 30 | 31 | 32 | async def concurrent(): 33 | await asyncio.gather(my_view(), my_view(), my_view()) 34 | 35 | 36 | async def test_redis(): 37 | await concurrent() 38 | await cache.delete("key") 39 | await cache.close() 40 | 41 | 42 | if __name__ == '__main__': 43 | asyncio.run(test_redis()) 44 | -------------------------------------------------------------------------------- /examples/run_all.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | pushd "$(dirname "$0")" 4 | 5 | for f in `find . -name '*.py' -not -path "./frameworks/*"`; do 6 | echo "########## Running $f #########" 7 | python $f || exit 1 8 | echo;echo;echo 9 | done 10 | 11 | popd 12 | -------------------------------------------------------------------------------- /examples/serializer_class.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import zlib 3 | 4 | import redis.asyncio as redis 5 | 6 | from aiocache import RedisCache 7 | from aiocache.serializers import BaseSerializer 8 | 9 | 10 | class CompressionSerializer(BaseSerializer): 11 | 12 | # This is needed because zlib works with bytes. 13 | # this way the underlying backend knows how to 14 | # store/retrieve values 15 | DEFAULT_ENCODING = None 16 | 17 | def dumps(self, value): 18 | print("I've received:\n{}".format(value)) 19 | compressed = zlib.compress(value.encode()) 20 | print("But I'm storing:\n{}".format(compressed)) 21 | return compressed 22 | 23 | def loads(self, value): 24 | print("I've retrieved:\n{}".format(value)) 25 | decompressed = zlib.decompress(value).decode() 26 | print("But I'm returning:\n{}".format(decompressed)) 27 | return decompressed 28 | 29 | 30 | cache = RedisCache(serializer=CompressionSerializer(), namespace="main", client=redis.Redis()) 31 | 32 | 33 | async def serializer(): 34 | text = ( 35 | "Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt" 36 | "ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation" 37 | "ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in" 38 | "reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur" 39 | "sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit" 40 | "anim id est laborum.") 41 | await cache.set("key", text) 42 | print("-----------------------------------") 43 | real_value = await cache.get("key") 44 | compressed_value = await cache.raw("get", "main:key") 45 | assert len(compressed_value) < len(real_value.encode()) 46 | 47 | 48 | async def test_serializer(): 49 | await serializer() 50 | await cache.delete("key") 51 | await cache.close() 52 | 53 | 54 | if __name__ == "__main__": 55 | asyncio.run(test_serializer()) 56 | -------------------------------------------------------------------------------- /examples/serializer_function.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import json 3 | 4 | import redis.asyncio as redis 5 | 6 | from marshmallow import Schema, fields, post_load 7 | 8 | from aiocache import RedisCache 9 | 10 | 11 | class MyType: 12 | def __init__(self, x, y): 13 | self.x = x 14 | self.y = y 15 | 16 | 17 | class MyTypeSchema(Schema): 18 | x = fields.Number() 19 | y = fields.Number() 20 | 21 | @post_load 22 | def build_object(self, data, **kwargs): 23 | return MyType(data['x'], data['y']) 24 | 25 | 26 | def dumps(value): 27 | return MyTypeSchema().dumps(value) 28 | 29 | 30 | def loads(value): 31 | return MyTypeSchema().loads(value) 32 | 33 | 34 | cache = RedisCache(namespace="main", client=redis.Redis()) 35 | 36 | 37 | async def serializer_function(): 38 | await cache.set("key", MyType(1, 2), dumps_fn=dumps) 39 | 40 | obj = await cache.get("key", loads_fn=loads) 41 | 42 | assert obj.x == 1 43 | assert obj.y == 2 44 | assert await cache.get("key") == json.loads(('{"y": 2.0, "x": 1.0}')) 45 | assert json.loads(await cache.raw("get", "main:key")) == {"y": 2.0, "x": 1.0} 46 | 47 | 48 | async def test_serializer_function(): 49 | await serializer_function() 50 | await cache.delete("key") 51 | await cache.close() 52 | 53 | 54 | if __name__ == "__main__": 55 | asyncio.run(test_serializer_function()) 56 | -------------------------------------------------------------------------------- /examples/simple_redis.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | 4 | import redis.asyncio as redis 5 | 6 | from aiocache import RedisCache 7 | 8 | cache = RedisCache(namespace="main", client=redis.Redis()) 9 | 10 | 11 | async def redis(): 12 | await cache.set("key", "value") 13 | await cache.set("expire_me", "value", ttl=10) 14 | 15 | assert await cache.get("key") == "value" 16 | assert await cache.get("expire_me") == "value" 17 | assert await cache.raw("ttl", "main:expire_me") > 0 18 | 19 | 20 | async def test_redis(): 21 | await redis() 22 | await cache.delete("key") 23 | await cache.delete("expire_me") 24 | await cache.close() 25 | 26 | 27 | if __name__ == "__main__": 28 | asyncio.run(test_redis()) 29 | -------------------------------------------------------------------------------- /examples/testing.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from unittest.mock import MagicMock 3 | 4 | from aiocache.base import BaseCache 5 | 6 | 7 | async def main(): 8 | mocked_cache = MagicMock(spec=BaseCache) 9 | mocked_cache.get.return_value = "world" 10 | print(await mocked_cache.get("hello")) 11 | 12 | 13 | if __name__ == "__main__": 14 | asyncio.run(main()) 15 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.black] 2 | line-length = 99 3 | target-version = ['py38', 'py39', 'py310', 'py311'] 4 | -------------------------------------------------------------------------------- /requirements-dev.txt: -------------------------------------------------------------------------------- 1 | -r requirements.txt 2 | 3 | flake8==7.2.0 4 | flake8-bandit==4.1.1 5 | flake8-bugbear==24.12.12 6 | flake8-import-order==0.18.2 7 | flake8-requirements==2.2.1 8 | mypy==1.16.0; implementation_name=="cpython" 9 | types-redis==4.6.0.20241004 10 | types-ujson==5.10.0.20250326 11 | aiocache-dynamodb==1.0.2 # used for documentation 12 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | -e . 2 | 3 | aiomcache==0.8.2 4 | aiohttp==3.12.9 5 | marshmallow==3.26.1 6 | msgpack==1.1.0 7 | pytest==8.4.0 8 | pytest-asyncio==0.26.0 9 | pytest-cov==6.1.1 10 | pytest-mock==3.14.1 11 | redis==5.2.1 12 | -------------------------------------------------------------------------------- /scripts/make_release: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | SCRIPT=$(readlink -f "$0") 4 | pushd $(dirname "$SCRIPT") 5 | cd .. 6 | 7 | version=$(grep -o -E "([0-9]+\.[0-9]+\.[0-9]+)" aiocache/_version.py) 8 | echo -n "New version number (current is $version): " 9 | read new_version 10 | gitchangelog ^$version HEAD | sed "s/Unreleased/$new_version (`date +%Y-%m-%d`)/g" > _release_notes 11 | cat _release_notes 12 | echo -n "Are you happy with the release notes (if not, modify the ./_release_notes file manually)? (y/n) " 13 | read answer 14 | 15 | if echo "$answer" | grep -iq "^y" ;then 16 | echo "Generating new release..." 17 | sed -i "s/$version/$new_version/" aiocache/_version.py 18 | sed -i '1s/^/# Changelog\n\n\n/' _release_notes && sed -i '1,3d' CHANGELOG.md && cat CHANGELOG.md >> _release_notes && mv _release_notes CHANGELOG.md 19 | git add CHANGELOG.md aiocache/_version.py 20 | git commit -m "Bump version $new_version" 21 | git tag -a "$new_version" -m "$new_version" 22 | git push --follow-tags 23 | 24 | else 25 | exit 1 26 | fi 27 | 28 | popd 29 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [bdist_wheel] 2 | universal=1 3 | 4 | [pep8] 5 | max-line-length=100 6 | 7 | [tool:pytest] 8 | addopts = --cov=aiocache --cov=tests/ --cov-report term --strict-markers 9 | asyncio_mode = auto 10 | junit_suite_name = aiohttp_test_suite 11 | filterwarnings= 12 | error 13 | # Can be removed once using aiojobs or similar in decorator() 14 | ignore:never awaited 15 | testpaths = tests/ 16 | junit_family=xunit2 17 | xfail_strict = true 18 | markers = 19 | memcached: tests requiring memcached backend 20 | redis: tests requiring redis backend 21 | 22 | [coverage:run] 23 | branch = True 24 | parallel = True 25 | source = aiocache 26 | 27 | [coverage:report] 28 | show_missing = true 29 | skip_covered = true 30 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | import re 2 | from pathlib import Path 3 | 4 | from setuptools import setup 5 | 6 | p = Path(__file__).with_name("aiocache") / "__init__.py" 7 | try: 8 | version = re.findall(r"^__version__ = \"([^']+)\"\r?$", p.read_text(), re.M)[0] 9 | except IndexError: 10 | raise RuntimeError("Unable to determine version.") 11 | 12 | readme = Path(__file__).with_name("README.rst").read_text() 13 | 14 | 15 | setup( 16 | name="aiocache", 17 | version=version, 18 | author="Manuel Miranda", 19 | url="https://github.com/aio-libs/aiocache", 20 | author_email="manu.mirandad@gmail.com", 21 | license="BSD-3-Clause", 22 | description="multi backend asyncio cache", 23 | long_description=readme, 24 | classifiers=[ 25 | "Programming Language :: Python", 26 | "Programming Language :: Python :: 3.9", 27 | "Programming Language :: Python :: 3.10", 28 | "Programming Language :: Python :: 3.11", 29 | "Programming Language :: Python :: 3.12", 30 | "Programming Language :: Python :: 3.13", 31 | "Framework :: AsyncIO", 32 | ], 33 | python_requires=">=3.9", 34 | packages=("aiocache",), 35 | install_requires=None, 36 | extras_require={ 37 | "redis": ["redis>=5"], 38 | "memcached": ["aiomcache>=0.5.2"], 39 | "msgpack": ["msgpack>=0.5.5"], 40 | }, 41 | include_package_data=True, 42 | ) 43 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aio-libs/aiocache/28b3af84086e53e6a4ee5196770660a8f797fb3d/tests/__init__.py -------------------------------------------------------------------------------- /tests/acceptance/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aio-libs/aiocache/28b3af84086e53e6a4ee5196770660a8f797fb3d/tests/acceptance/__init__.py -------------------------------------------------------------------------------- /tests/acceptance/conftest.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | import pytest 4 | 5 | from ..utils import KEY_LOCK, Keys 6 | 7 | 8 | @pytest.fixture 9 | async def redis_cache(redis_client): 10 | from aiocache.backends.redis import RedisCache 11 | async with RedisCache(namespace="test", client=redis_client) as cache: 12 | yield cache 13 | await asyncio.gather(*(cache.delete(k) for k in (*Keys, KEY_LOCK))) 14 | 15 | 16 | @pytest.fixture 17 | async def memory_cache(): 18 | from aiocache.backends.memory import SimpleMemoryCache 19 | async with SimpleMemoryCache(namespace="test") as cache: 20 | yield cache 21 | await asyncio.gather(*(cache.delete(k) for k in (*Keys, KEY_LOCK))) 22 | 23 | 24 | @pytest.fixture 25 | async def memcached_cache(): 26 | from aiocache.backends.memcached import MemcachedCache 27 | async with MemcachedCache(namespace="test") as cache: 28 | yield cache 29 | await asyncio.gather(*(cache.delete(k) for k in (*Keys, KEY_LOCK))) 30 | 31 | 32 | @pytest.fixture( 33 | params=( 34 | pytest.param("redis_cache", marks=pytest.mark.redis), 35 | "memory_cache", 36 | pytest.param("memcached_cache", marks=pytest.mark.memcached), 37 | )) 38 | def cache(request): 39 | return request.getfixturevalue(request.param) 40 | -------------------------------------------------------------------------------- /tests/acceptance/test_base.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | import pytest 4 | 5 | from aiocache.backends.memory import SimpleMemoryCache 6 | from aiocache.base import _Conn 7 | from ..utils import Keys 8 | 9 | 10 | class TestCache: 11 | """ 12 | This class ensures that all caches behave the same way and have the minimum functionality. 13 | To add a new cache just create the fixture for the new cache and add id as a param for the 14 | cache fixture 15 | """ 16 | 17 | async def test_setup(self, cache): 18 | assert cache.namespace == "test" 19 | 20 | async def test_get_missing(self, cache): 21 | assert await cache.get(Keys.KEY) is None 22 | assert await cache.get(Keys.KEY, default=1) == 1 23 | 24 | async def test_get_existing(self, cache): 25 | await cache.set(Keys.KEY, "value") 26 | assert await cache.get(Keys.KEY) == "value" 27 | 28 | async def test_multi_get(self, cache): 29 | await cache.set(Keys.KEY, "value") 30 | assert await cache.multi_get([Keys.KEY, Keys.KEY_1]) == ["value", None] 31 | 32 | async def test_delete_missing(self, cache): 33 | result = await cache.delete(Keys.KEY) 34 | assert result == 0 35 | 36 | async def test_delete_existing(self, cache): 37 | await cache.set(Keys.KEY, "value") 38 | result = await cache.delete(Keys.KEY) 39 | assert result == 1 40 | 41 | value = await cache.get(Keys.KEY) 42 | assert value is None 43 | 44 | async def test_set(self, cache): 45 | assert await cache.set(Keys.KEY, "value") is True 46 | 47 | async def test_set_cancel_previous_ttl_handle(self, cache): 48 | await cache.set(Keys.KEY, "value", ttl=4) 49 | 50 | await asyncio.sleep(2.1) 51 | # Smaller ttl seems flaky, as if this call takes >0.5s... 52 | result = await cache.get(Keys.KEY) 53 | assert result == "value" 54 | await cache.set(Keys.KEY, "new_value", ttl=4) 55 | 56 | await asyncio.sleep(2) 57 | result = await cache.get(Keys.KEY) 58 | assert result == "new_value" 59 | 60 | async def test_multi_set(self, cache): 61 | pairs = [(Keys.KEY, "value"), [Keys.KEY_1, "random_value"]] 62 | assert await cache.multi_set(pairs) is True 63 | assert await cache.multi_get([Keys.KEY, Keys.KEY_1]) == ["value", "random_value"] 64 | 65 | async def test_multi_set_with_ttl(self, cache): 66 | pairs = [(Keys.KEY, "value"), [Keys.KEY_1, "random_value"]] 67 | assert await cache.multi_set(pairs, ttl=1) is True 68 | await asyncio.sleep(1.1) 69 | 70 | assert await cache.multi_get([Keys.KEY, Keys.KEY_1]) == [None, None] 71 | 72 | async def test_set_with_ttl(self, cache): 73 | await cache.set(Keys.KEY, "value", ttl=1) 74 | await asyncio.sleep(1.1) 75 | 76 | assert await cache.get(Keys.KEY) is None 77 | 78 | async def test_add_missing(self, cache): 79 | assert await cache.add(Keys.KEY, "value", ttl=1) is True 80 | 81 | async def test_add_existing(self, cache): 82 | assert await cache.set(Keys.KEY, "value") is True 83 | with pytest.raises(ValueError): 84 | await cache.add(Keys.KEY, "value") 85 | 86 | async def test_exists_missing(self, cache): 87 | assert await cache.exists(Keys.KEY) is False 88 | 89 | async def test_exists_existing(self, cache): 90 | await cache.set(Keys.KEY, "value") 91 | assert await cache.exists(Keys.KEY) is True 92 | 93 | async def test_increment_missing(self, cache): 94 | assert await cache.increment(Keys.KEY, delta=2) == 2 95 | assert await cache.increment(Keys.KEY_1, delta=-2) == -2 96 | 97 | async def test_increment_existing(self, cache): 98 | await cache.set(Keys.KEY, 2) 99 | assert await cache.increment(Keys.KEY, delta=2) == 4 100 | assert await cache.increment(Keys.KEY, delta=1) == 5 101 | assert await cache.increment(Keys.KEY, delta=-3) == 2 102 | 103 | async def test_increment_typeerror(self, cache): 104 | await cache.set(Keys.KEY, "value") 105 | with pytest.raises(TypeError): 106 | assert await cache.increment(Keys.KEY) 107 | 108 | async def test_expire_existing(self, cache): 109 | await cache.set(Keys.KEY, "value") 110 | assert await cache.expire(Keys.KEY, 1) is True 111 | await asyncio.sleep(1.1) 112 | assert await cache.exists(Keys.KEY) is False 113 | 114 | async def test_expire_with_0(self, cache): 115 | await cache.set(Keys.KEY, "value", 1) 116 | assert await cache.expire(Keys.KEY, 0) is True 117 | await asyncio.sleep(1.1) 118 | assert await cache.exists(Keys.KEY) is True 119 | 120 | async def test_expire_missing(self, cache): 121 | assert await cache.expire(Keys.KEY, 1) is False 122 | 123 | async def test_clear(self, cache): 124 | await cache.set(Keys.KEY, "value") 125 | await cache.clear() 126 | 127 | assert await cache.exists(Keys.KEY) is False 128 | 129 | async def test_close_pool_only_clears_resources(self, cache): 130 | await cache.set(Keys.KEY, "value") 131 | await cache.close() 132 | assert await cache.set(Keys.KEY, "value") is True 133 | assert await cache.get(Keys.KEY) == "value" 134 | 135 | async def test_single_connection(self, cache): 136 | async with cache.get_connection() as conn: 137 | assert isinstance(conn, _Conn) 138 | assert await conn.set(Keys.KEY, "value") is True 139 | assert await conn.get(Keys.KEY) == "value" 140 | 141 | 142 | class TestMemoryCache: 143 | async def test_accept_explicit_args(self): 144 | with pytest.raises(TypeError): 145 | SimpleMemoryCache(random_attr="wtf") 146 | 147 | async def test_set_float_ttl(self, memory_cache): 148 | await memory_cache.set(Keys.KEY, "value", ttl=0.1) 149 | await asyncio.sleep(0.15) 150 | 151 | assert await memory_cache.get(Keys.KEY) is None 152 | 153 | async def test_multi_set_float_ttl(self, memory_cache): 154 | pairs = [(Keys.KEY, "value"), [Keys.KEY_1, "random_value"]] 155 | assert await memory_cache.multi_set(pairs, ttl=0.1) is True 156 | await asyncio.sleep(0.15) 157 | 158 | assert await memory_cache.multi_get([Keys.KEY, Keys.KEY_1]) == [None, None] 159 | 160 | async def test_raw(self, memory_cache): 161 | await memory_cache.raw("setdefault", "key", "value") 162 | assert await memory_cache.raw("get", "key") == "value" 163 | assert list(await memory_cache.raw("keys")) == ["key"] 164 | 165 | async def test_clear_with_namespace_memory(self, memory_cache): 166 | await memory_cache.set(Keys.KEY, "value", namespace="test") 167 | await memory_cache.clear(namespace="test") 168 | 169 | assert await memory_cache.exists(Keys.KEY, namespace="test") is False 170 | 171 | 172 | @pytest.mark.memcached 173 | class TestMemcachedCache: 174 | async def test_accept_explicit_args(self): 175 | from aiocache.backends.memcached import MemcachedCache 176 | 177 | with pytest.raises(TypeError): 178 | MemcachedCache(random_attr="wtf") 179 | 180 | async def test_set_too_long_key(self, memcached_cache): 181 | with pytest.raises(TypeError) as exc_info: 182 | await memcached_cache.set("a" * 2000, "value") 183 | assert str(exc_info.value).startswith("aiomcache error: invalid key") 184 | 185 | async def test_set_float_ttl_fails(self, memcached_cache): 186 | with pytest.raises(TypeError) as exc_info: 187 | await memcached_cache.set(Keys.KEY, "value", ttl=0.1) 188 | assert str(exc_info.value) == "aiomcache error: exptime not int: 0.1" 189 | 190 | async def test_multi_set_float_ttl(self, memcached_cache): 191 | with pytest.raises(TypeError) as exc_info: 192 | pairs = [(Keys.KEY, "value"), [Keys.KEY_1, "random_value"]] 193 | assert await memcached_cache.multi_set(pairs, ttl=0.1) is True 194 | assert str(exc_info.value) == "aiomcache error: exptime not int: 0.1" 195 | 196 | async def test_raw(self, memcached_cache): 197 | await memcached_cache.raw("set", b"key", b"value") 198 | assert await memcached_cache.raw("get", b"key") == "value" 199 | assert await memcached_cache.raw("prepend", b"key", b"super") is True 200 | assert await memcached_cache.raw("get", b"key") == "supervalue" 201 | 202 | async def test_clear_with_namespace_memcached(self, memcached_cache): 203 | await memcached_cache.set(Keys.KEY, "value", namespace="test") 204 | 205 | with pytest.raises(ValueError): 206 | await memcached_cache.clear(namespace="test") 207 | 208 | assert await memcached_cache.exists(Keys.KEY, namespace="test") is True 209 | 210 | async def test_close(self, memcached_cache): 211 | await memcached_cache.set(Keys.KEY, "value") 212 | await memcached_cache._close() 213 | assert memcached_cache.client._pool._pool.qsize() == 0 214 | 215 | 216 | @pytest.mark.redis 217 | class TestRedisCache: 218 | async def test_accept_explicit_args(self): 219 | from aiocache.backends.redis import RedisCache 220 | 221 | with pytest.raises(TypeError): 222 | RedisCache(random_attr="wtf") 223 | 224 | async def test_float_ttl(self, redis_cache): 225 | await redis_cache.set(Keys.KEY, "value", ttl=0.1) 226 | await asyncio.sleep(0.15) 227 | 228 | assert await redis_cache.get(Keys.KEY) is None 229 | 230 | async def test_multi_set_float_ttl(self, redis_cache): 231 | pairs = [(Keys.KEY, "value"), [Keys.KEY_1, "random_value"]] 232 | assert await redis_cache.multi_set(pairs, ttl=0.1) is True 233 | await asyncio.sleep(0.15) 234 | 235 | assert await redis_cache.multi_get([Keys.KEY, Keys.KEY_1]) == [None, None] 236 | 237 | async def test_raw(self, redis_cache): 238 | await redis_cache.raw("set", "key", "value") 239 | assert await redis_cache.raw("get", "key") == "value" 240 | assert await redis_cache.raw("keys", "k*") == ["key"] 241 | # .raw() doesn't build key with namespace prefix, clear it manually 242 | await redis_cache.raw("delete", "key") 243 | 244 | async def test_clear_with_namespace_redis(self, redis_cache): 245 | await redis_cache.set(Keys.KEY, "value", namespace="test") 246 | await redis_cache.clear(namespace="test") 247 | 248 | assert await redis_cache.exists(Keys.KEY, namespace="test") is False 249 | 250 | async def test_close(self, redis_cache): 251 | await redis_cache.set(Keys.KEY, "value") 252 | await redis_cache._close() 253 | -------------------------------------------------------------------------------- /tests/acceptance/test_decorators.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import random 3 | from unittest import mock 4 | 5 | import pytest 6 | 7 | from aiocache import cached, cached_stampede, multi_cached 8 | from ..utils import Keys, ensure_key 9 | 10 | 11 | async def return_dict(keys=None): 12 | ret = {} 13 | for value, key in enumerate(keys or [Keys.KEY, Keys.KEY_1]): 14 | ret[key] = str(value) 15 | return ret 16 | 17 | 18 | async def stub(arg: float, seconds: int = 0) -> str: 19 | await asyncio.sleep(seconds) 20 | return str(random.randint(1, 50)) 21 | 22 | 23 | class TestCached: 24 | async def test_cached_ttl(self, cache): 25 | @cached(cache=cache, ttl=2, key_builder=lambda *args, **kw: Keys.KEY) 26 | async def fn(): 27 | return str(random.randint(1, 50)) 28 | 29 | resp1 = await fn() 30 | resp2 = await fn() 31 | 32 | assert await cache.get(Keys.KEY) == resp1 == resp2 33 | await asyncio.sleep(2.1) 34 | assert await cache.get(Keys.KEY) is None 35 | 36 | async def test_cached_key_builder(self, cache): 37 | def build_key(f, self, a, b): 38 | return "{}_{}_{}_{}".format(self, f.__name__, a, b) 39 | 40 | @cached(cache=cache, key_builder=build_key) 41 | async def fn(self, a, b=2): 42 | return "1" 43 | 44 | await fn("self", 1, 3) 45 | assert await cache.exists(build_key(fn, "self", 1, 3)) is True 46 | 47 | @pytest.mark.parametrize("decorator", (cached, cached_stampede)) 48 | async def test_cached_skip_cache_func(self, cache, decorator): 49 | @decorator(cache=cache, skip_cache_func=lambda r: r is None) 50 | async def sk_func(x): 51 | return x if x > 0 else None 52 | 53 | arg = 1 54 | res = await sk_func(arg) 55 | assert res 56 | 57 | key = decorator(cache=cache).get_cache_key(sk_func, args=(1,), kwargs={}) 58 | 59 | assert key 60 | assert await cache.exists(key) 61 | assert await cache.get(key) == res 62 | 63 | arg = -1 64 | 65 | await sk_func(arg) 66 | 67 | key = decorator(cache=cache).get_cache_key(sk_func, args=(-1,), kwargs={}) 68 | 69 | assert key 70 | assert not await cache.exists(key) 71 | 72 | async def test_cached_without_namespace(self, cache): 73 | """Default cache key is created when no namespace is provided""" 74 | cache.namespace = None 75 | 76 | @cached(cache=cache) 77 | async def fn(): 78 | return "1" 79 | 80 | await fn() 81 | decorator = cached(cache=cache) 82 | key = decorator.get_cache_key(fn, args=(), kwargs={}) 83 | assert await cache.exists(key, namespace=None) is True 84 | 85 | async def test_cached_with_namespace(self, cache): 86 | """Cache key is prefixed with provided namespace""" 87 | key_prefix = "test" 88 | cache.namespace = key_prefix 89 | 90 | @cached(cache=cache) 91 | async def ns_fn(): 92 | return "1" 93 | 94 | await ns_fn() 95 | decorator = cached(cache=cache) 96 | key = decorator.get_cache_key(ns_fn, args=(), kwargs={}) 97 | assert await cache.exists(key, namespace=key_prefix) is True 98 | 99 | 100 | class TestCachedStampede: 101 | 102 | async def test_cached_stampede(self, mocker, cache): 103 | mocker.spy(cache, "get") 104 | mocker.spy(cache, "set") 105 | decorator = cached_stampede(cache=cache, ttl=10, lease=3) 106 | 107 | await asyncio.gather(decorator(stub)(0.5), decorator(stub)(0.5)) 108 | 109 | cache.get.assert_called_with("tests.acceptance.test_decoratorsstub(0.5,)[]") 110 | assert cache.get.call_count == 4 111 | cache.set.assert_called_with("tests.acceptance.test_decoratorsstub(0.5,)[]", 112 | mock.ANY, ttl=10) 113 | assert cache.set.call_count == 1, cache.set.call_args_list 114 | 115 | async def test_locking_dogpile_lease_expiration(self, mocker, cache): 116 | mocker.spy(cache, "get") 117 | mocker.spy(cache, "set") 118 | decorator = cached_stampede(cache=cache, ttl=10, lease=3) 119 | 120 | await asyncio.gather( 121 | decorator(stub)(1, seconds=1), 122 | decorator(stub)(1, seconds=2), 123 | decorator(stub)(1, seconds=3), 124 | ) 125 | 126 | assert cache.get.call_count == 6 127 | assert cache.set.call_count == 3 128 | 129 | async def test_locking_dogpile_task_cancellation(self, cache): 130 | @cached_stampede(cache=cache) 131 | async def cancel_task(): 132 | raise asyncio.CancelledError() 133 | 134 | with pytest.raises(asyncio.CancelledError): 135 | await cancel_task() 136 | 137 | 138 | class TestMultiCachedDecorator: 139 | async def test_multi_cached(self, cache): 140 | multi_cached_decorator = multi_cached(cache, keys_from_attr="keys") 141 | 142 | default_keys = {Keys.KEY, Keys.KEY_1} 143 | await multi_cached_decorator(return_dict)(keys=default_keys) 144 | 145 | for key in default_keys: 146 | assert await cache.get(key) is not None 147 | 148 | async def test_keys_without_kwarg(self, cache): 149 | @multi_cached(cache, keys_from_attr="keys") 150 | async def fn(keys): 151 | return {Keys.KEY: 1} 152 | 153 | await fn([Keys.KEY]) 154 | assert await cache.exists(Keys.KEY) is True 155 | 156 | async def test_multi_cached_key_builder(self, cache): 157 | def build_key(key, f, self, keys, market="ES"): 158 | return "{}_{}_{}".format(f.__name__, ensure_key(key), market) 159 | 160 | @multi_cached(keys_from_attr="keys", key_builder=build_key, cache=cache) 161 | async def fn(self, keys, market="ES"): 162 | return {Keys.KEY: 1, Keys.KEY_1: 2} 163 | 164 | await fn("self", keys=[Keys.KEY, Keys.KEY_1]) 165 | assert await cache.exists("fn_" + ensure_key(Keys.KEY) + "_ES") is True 166 | assert await cache.exists("fn_" + ensure_key(Keys.KEY_1) + "_ES") is True 167 | 168 | async def test_multi_cached_skip_keys(self, cache): 169 | @multi_cached(cache, keys_from_attr="keys", skip_cache_func=lambda _, v: v is None) 170 | async def multi_sk_fn(keys, values): 171 | return {k: v for k, v in zip(keys, values)} 172 | 173 | res = await multi_sk_fn(keys=[Keys.KEY, Keys.KEY_1], values=[42, None]) 174 | assert res 175 | assert Keys.KEY in res and Keys.KEY_1 in res 176 | 177 | assert await cache.exists(Keys.KEY) 178 | assert await cache.get(Keys.KEY) == res[Keys.KEY] 179 | assert not await cache.exists(Keys.KEY_1) 180 | 181 | async def test_fn_with_args(self, cache): 182 | @multi_cached(cache, keys_from_attr="keys") 183 | async def fn(keys, *args): 184 | assert len(args) == 1 185 | return {Keys.KEY: 1} 186 | 187 | await fn([Keys.KEY], "arg") 188 | assert await cache.exists(Keys.KEY) is True 189 | 190 | async def test_double_decorator(self, cache): 191 | def dummy_d(fn): 192 | async def wrapper(*args, **kwargs): 193 | await fn(*args, **kwargs) 194 | 195 | return wrapper 196 | 197 | @dummy_d 198 | @multi_cached(cache, keys_from_attr="keys") 199 | async def fn(keys): 200 | return {Keys.KEY: 1} 201 | 202 | await fn([Keys.KEY]) 203 | assert await cache.exists(Keys.KEY) is True 204 | -------------------------------------------------------------------------------- /tests/acceptance/test_lock.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | import pytest 4 | 5 | from aiocache.lock import OptimisticLock, OptimisticLockError, RedLock 6 | from aiocache.serializers import StringSerializer 7 | from ..utils import KEY_LOCK, Keys 8 | 9 | 10 | @pytest.fixture 11 | def lock(cache): 12 | return RedLock(cache, Keys.KEY, 20) 13 | 14 | 15 | def build_key(key, namespace=None): 16 | return "custom_key" 17 | 18 | 19 | def build_key_bytes(key, namespace=None): 20 | return b"custom_key" 21 | 22 | 23 | @pytest.fixture 24 | def custom_redis_cache(mocker, redis_cache, build_key=build_key): 25 | mocker.patch.object(redis_cache, "build_key", new=build_key) 26 | yield redis_cache 27 | 28 | 29 | @pytest.fixture 30 | def custom_memory_cache(mocker, memory_cache, build_key=build_key): 31 | mocker.patch.object(memory_cache, "build_key", new=build_key) 32 | yield memory_cache 33 | 34 | 35 | @pytest.fixture 36 | def custom_memcached_cache(mocker, memcached_cache, build_key=build_key_bytes): 37 | mocker.patch.object(memcached_cache, "build_key", new=build_key) 38 | yield memcached_cache 39 | 40 | 41 | class TestRedLock: 42 | async def test_acquire(self, cache, lock): 43 | cache.serializer = StringSerializer() 44 | async with lock: 45 | assert await cache.get(KEY_LOCK) == lock._value 46 | 47 | async def test_release_does_nothing_when_no_lock(self, lock): 48 | assert await lock.__aexit__("exc_type", "exc_value", "traceback") is None 49 | 50 | async def test_acquire_release(self, cache, lock): 51 | async with lock: 52 | pass 53 | assert await cache.get(KEY_LOCK) is None 54 | 55 | async def test_locking_dogpile(self, mocker, cache): 56 | mocker.spy(cache, "get") 57 | mocker.spy(cache, "set") 58 | mocker.spy(cache, "_add") 59 | 60 | async def dummy(): 61 | res = await cache.get(Keys.KEY) 62 | assert res is None 63 | 64 | async with RedLock(cache, Keys.KEY, lease=5): 65 | res = await cache.get(Keys.KEY) 66 | if res is not None: 67 | return 68 | await asyncio.sleep(0.1) 69 | await cache.set(Keys.KEY, "value") 70 | 71 | await asyncio.gather(dummy(), dummy(), dummy(), dummy()) 72 | assert cache._add.call_count == 4 73 | assert cache.get.call_count == 8 74 | assert cache.set.call_count == 1, cache.set.call_args_list 75 | 76 | async def test_locking_dogpile_lease_expiration(self, cache): 77 | async def dummy() -> None: 78 | res = await cache.get(Keys.KEY) 79 | assert res is None 80 | 81 | # Lease should expire before cache is set, so res is still None. 82 | async with RedLock(cache, Keys.KEY, lease=1): 83 | res = await cache.get(Keys.KEY) 84 | assert res is None 85 | await asyncio.sleep(1.1) 86 | await cache.set(Keys.KEY, "value") 87 | 88 | await asyncio.gather(dummy(), dummy(), dummy(), dummy()) 89 | 90 | async def test_locking_dogpile_propagates_exceptions(self, cache): 91 | async def dummy(): 92 | async with RedLock(cache, Keys.KEY, lease=1): 93 | raise ValueError() 94 | 95 | with pytest.raises(ValueError): 96 | await dummy() 97 | 98 | 99 | class TestMemoryRedLock: 100 | @pytest.fixture 101 | def lock(self, memory_cache): 102 | return RedLock(memory_cache, Keys.KEY, 20) 103 | 104 | async def test_acquire_key_builder(self, custom_memory_cache, lock): 105 | async with lock: 106 | assert await custom_memory_cache.get(KEY_LOCK) == lock._value 107 | 108 | async def test_acquire_release_key_builder(self, custom_memory_cache, lock): 109 | async with lock: 110 | assert await custom_memory_cache.get(KEY_LOCK) is not None 111 | assert await custom_memory_cache.get(KEY_LOCK) is None 112 | 113 | async def test_release_wrong_token_fails(self, lock): 114 | await lock.__aenter__() 115 | lock._value = "random" 116 | assert await lock.__aexit__("exc_type", "exc_value", "traceback") is None 117 | 118 | async def test_release_wrong_client_fails(self, memory_cache, lock): 119 | wrong_lock = RedLock(memory_cache, Keys.KEY, 20) 120 | await lock.__aenter__() 121 | assert await wrong_lock.__aexit__("exc_type", "exc_value", "traceback") is None 122 | 123 | async def test_float_lease(self, memory_cache): 124 | lock = RedLock(memory_cache, Keys.KEY, 0.1) 125 | await lock.__aenter__() 126 | await asyncio.sleep(0.2) 127 | assert await lock.__aexit__("exc_type", "exc_value", "traceback") is None 128 | 129 | 130 | @pytest.mark.redis 131 | class TestRedisRedLock: 132 | @pytest.fixture 133 | def lock(self, redis_cache): 134 | return RedLock(redis_cache, Keys.KEY, 20) 135 | 136 | async def test_acquire_key_builder(self, custom_redis_cache, lock): 137 | custom_redis_cache.serializer = StringSerializer() 138 | async with lock: 139 | assert await custom_redis_cache.get(KEY_LOCK) == lock._value 140 | 141 | async def test_acquire_release_key_builder(self, custom_redis_cache, lock): 142 | custom_redis_cache.serializer = StringSerializer() 143 | async with lock: 144 | assert await custom_redis_cache.get(KEY_LOCK) is not None 145 | assert await custom_redis_cache.get(KEY_LOCK) is None 146 | 147 | async def test_release_wrong_token_fails(self, lock): 148 | await lock.__aenter__() 149 | lock._value = "random" 150 | assert await lock.__aexit__("exc_type", "exc_value", "traceback") is None 151 | 152 | async def test_release_wrong_client_fails(self, redis_cache, lock): 153 | wrong_lock = RedLock(redis_cache, Keys.KEY, 20) 154 | await lock.__aenter__() 155 | assert await wrong_lock.__aexit__("exc_type", "exc_value", "traceback") is None 156 | 157 | async def test_float_lease(self, redis_cache): 158 | lock = RedLock(redis_cache, Keys.KEY, 0.1) 159 | await lock.__aenter__() 160 | await asyncio.sleep(0.2) 161 | assert await lock.__aexit__("exc_type", "exc_value", "traceback") is None 162 | 163 | 164 | @pytest.mark.memcached 165 | class TestMemcachedRedLock: 166 | @pytest.fixture 167 | def lock(self, memcached_cache): 168 | return RedLock(memcached_cache, Keys.KEY, 20) 169 | 170 | async def test_acquire_key_builder(self, custom_memcached_cache, lock): 171 | custom_memcached_cache.serializer = StringSerializer() 172 | async with lock: 173 | assert await custom_memcached_cache.get(KEY_LOCK) == lock._value 174 | 175 | async def test_acquire_release_key_builder(self, custom_memcached_cache, lock): 176 | custom_memcached_cache.serializer = StringSerializer() 177 | async with lock: 178 | assert await custom_memcached_cache.get(KEY_LOCK) is not None 179 | assert await custom_memcached_cache.get(KEY_LOCK) is None 180 | 181 | async def test_release_wrong_token_succeeds_meh(self, lock): 182 | await lock.__aenter__() 183 | lock._value = "random" 184 | assert await lock.__aexit__("exc_type", "exc_value", "traceback") is None 185 | 186 | async def test_release_wrong_client_succeeds_meh(self, memcached_cache, lock): 187 | wrong_lock = RedLock(memcached_cache, Keys.KEY, 20) 188 | await lock.__aenter__() 189 | assert await wrong_lock.__aexit__("exc_type", "exc_value", "traceback") is None 190 | 191 | async def test_float_lease(self, memcached_cache): 192 | lock = RedLock(memcached_cache, Keys.KEY, 0.1) 193 | with pytest.raises(TypeError): 194 | await lock.__aenter__() 195 | 196 | 197 | class TestOptimisticLock: 198 | @pytest.fixture 199 | def lock(self, cache): 200 | return OptimisticLock(cache, Keys.KEY) 201 | 202 | async def test_acquire(self, cache, lock): 203 | await cache.set(Keys.KEY, "value") 204 | async with lock: 205 | assert lock._token == await cache._gets(cache.build_key(Keys.KEY)) 206 | 207 | async def test_release_does_nothing(self, lock): 208 | assert await lock.__aexit__("exc_type", "exc_value", "traceback") is None 209 | 210 | async def test_check_and_set_not_existing_never_fails(self, cache, lock): 211 | async with lock as locked: 212 | await cache.set(Keys.KEY, "conflicting_value") 213 | await locked.cas("value") 214 | 215 | assert await cache.get(Keys.KEY) == "value" 216 | 217 | async def test_check_and_set(self, cache, lock): 218 | await cache.set(Keys.KEY, "previous_value") 219 | async with lock as locked: 220 | await locked.cas("value") 221 | 222 | assert await cache.get(Keys.KEY) == "value" 223 | 224 | async def test_check_and_set_fail(self, cache, lock): 225 | await cache.set(Keys.KEY, "previous_value") 226 | with pytest.raises(OptimisticLockError): 227 | async with lock as locked: 228 | await cache.set(Keys.KEY, "conflicting_value") 229 | await locked.cas("value") 230 | 231 | async def test_check_and_set_with_int_ttl(self, cache, lock): 232 | await cache.set(Keys.KEY, "previous_value") 233 | async with lock as locked: 234 | await locked.cas("value", ttl=1) 235 | 236 | await asyncio.sleep(1) 237 | assert await cache.get(Keys.KEY) is None 238 | 239 | 240 | class TestMemoryOptimisticLock: 241 | @pytest.fixture 242 | def lock(self, memory_cache): 243 | return OptimisticLock(memory_cache, Keys.KEY) 244 | 245 | async def test_acquire_key_builder(self, custom_memory_cache, lock): 246 | await custom_memory_cache.set(Keys.KEY, "value") 247 | async with lock: 248 | assert await custom_memory_cache.get(KEY_LOCK) == lock._token 249 | await custom_memory_cache.delete(Keys.KEY, "value") 250 | 251 | async def test_check_and_set_with_float_ttl(self, memory_cache, lock): 252 | await memory_cache.set(Keys.KEY, "previous_value") 253 | async with lock as locked: 254 | await locked.cas("value", ttl=0.1) 255 | 256 | await asyncio.sleep(1) 257 | assert await memory_cache.get(Keys.KEY) is None 258 | 259 | 260 | @pytest.mark.redis 261 | class TestRedisOptimisticLock: 262 | @pytest.fixture 263 | def lock(self, redis_cache): 264 | return OptimisticLock(redis_cache, Keys.KEY) 265 | 266 | async def test_acquire_key_builder(self, custom_redis_cache, lock): 267 | custom_redis_cache.serializer = StringSerializer() 268 | await custom_redis_cache.set(Keys.KEY, "value") 269 | async with lock: 270 | assert await custom_redis_cache.get(KEY_LOCK) == lock._token 271 | await custom_redis_cache.delete(Keys.KEY, "value") 272 | 273 | async def test_check_and_set_with_float_ttl(self, redis_cache, lock): 274 | await redis_cache.set(Keys.KEY, "previous_value") 275 | async with lock as locked: 276 | await locked.cas("value", ttl=0.1) 277 | 278 | await asyncio.sleep(1) 279 | assert await redis_cache.get(Keys.KEY) is None 280 | -------------------------------------------------------------------------------- /tests/acceptance/test_plugins.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from aiocache.plugins import HitMissRatioPlugin, TimingPlugin 4 | 5 | 6 | class TestHitMissRatioPlugin: 7 | @pytest.mark.parametrize( 8 | "data, ratio", 9 | [ 10 | ({"testa": 1, "testb": 2, "testc": 3}, 0.6), 11 | ({"testa": 1, "testz": 0}, 0.2), 12 | ({}, 0), 13 | ({"testa": 1, "testb": 2, "testc": 3, "testd": 4, "teste": 5}, 1), 14 | ], 15 | ) 16 | async def test_get_hit_miss_ratio(self, memory_cache, data, ratio): 17 | keys = ["a", "b", "c", "d", "e", "f"] 18 | memory_cache.plugins = [HitMissRatioPlugin()] 19 | memory_cache._cache = data 20 | 21 | for key in keys: 22 | await memory_cache.get(key) 23 | 24 | hits = [x for x in keys if "test" + x in data] 25 | assert memory_cache.hit_miss_ratio["hits"] == len(hits) 26 | assert ( 27 | memory_cache.hit_miss_ratio["hit_ratio"] 28 | == len(hits) / memory_cache.hit_miss_ratio["total"] 29 | ) 30 | 31 | @pytest.mark.parametrize( 32 | "data, ratio", 33 | [ 34 | ({"testa": 1, "testb": 2, "testc": 3}, 0.6), 35 | ({"testa": 1, "testz": 0}, 0.2), 36 | ({}, 0), 37 | ({"testa": 1, "testb": 2, "testc": 3, "testd": 4, "teste": 5}, 1), 38 | ], 39 | ) 40 | async def test_multi_get_hit_miss_ratio(self, memory_cache, data, ratio): 41 | keys = ["a", "b", "c", "d", "e", "f"] 42 | memory_cache.plugins = [HitMissRatioPlugin()] 43 | memory_cache._cache = data 44 | 45 | for key in keys: 46 | await memory_cache.multi_get([key]) 47 | 48 | hits = [x for x in keys if "test" + x in data] 49 | assert memory_cache.hit_miss_ratio["hits"] == len(hits) 50 | assert ( 51 | memory_cache.hit_miss_ratio["hit_ratio"] 52 | == len(hits) / memory_cache.hit_miss_ratio["total"] 53 | ) 54 | 55 | async def test_set_and_get_using_namespace(self, memory_cache): 56 | memory_cache.plugins = [HitMissRatioPlugin()] 57 | key = "A" 58 | namespace = "test" 59 | value = 1 60 | await memory_cache.set(key, value, namespace=namespace) 61 | result = await memory_cache.get(key, namespace=namespace) 62 | assert result == value 63 | 64 | 65 | class TestTimingPlugin: 66 | @pytest.mark.parametrize( 67 | "data, ratio", 68 | [ 69 | ({"testa": 1, "testb": 2, "testc": 3}, 0.6), 70 | ({"testa": 1, "testz": 0}, 0.2), 71 | ({}, 0), 72 | ({"testa": 1, "testb": 2, "testc": 3, "testd": 4, "teste": 5}, 1), 73 | ], 74 | ) 75 | async def test_get_avg_min_max(self, memory_cache, data, ratio): 76 | keys = ["a", "b", "c", "d", "e", "f"] 77 | memory_cache.plugins = [TimingPlugin()] 78 | memory_cache._cache = data 79 | 80 | for key in keys: 81 | await memory_cache.get(key) 82 | 83 | assert "get_max" in memory_cache.profiling 84 | assert "get_min" in memory_cache.profiling 85 | assert "get_total" in memory_cache.profiling 86 | assert "get_avg" in memory_cache.profiling 87 | -------------------------------------------------------------------------------- /tests/acceptance/test_serializers.py: -------------------------------------------------------------------------------- 1 | import pickle 2 | import random 3 | from typing import Any 4 | 5 | import pytest 6 | from marshmallow import Schema, fields, post_load 7 | 8 | try: 9 | import ujson as json # noqa: I900 10 | except ImportError: 11 | import json # type: ignore[no-redef] 12 | 13 | from aiocache.serializers import ( 14 | BaseSerializer, 15 | JsonSerializer, 16 | NullSerializer, 17 | PickleSerializer, 18 | StringSerializer, 19 | ) 20 | from ..utils import Keys 21 | 22 | 23 | class MyType: 24 | MY_CONSTANT = "CONSTANT" 25 | 26 | def __init__(self, r=None): 27 | self.r = r or random.randint(1, 10) 28 | 29 | def __eq__(self, obj): 30 | return self.__dict__ == obj.__dict__ 31 | 32 | 33 | class MySchema(Schema): 34 | r = fields.Integer() 35 | 36 | @post_load 37 | def build_my_type(self, data, **kwargs): 38 | return MyType(**data) 39 | 40 | class Meta: 41 | strict = True 42 | 43 | 44 | class MyTypeSchema(BaseSerializer): 45 | def __init__(self, *args: Any, **kwargs: Any): 46 | super().__init__(*args, **kwargs) 47 | self.schema = MySchema() 48 | 49 | def dumps(self, value: Any) -> str: 50 | return self.schema.dumps(value) 51 | 52 | def loads(self, value: str) -> Any: 53 | return self.schema.loads(value) 54 | 55 | 56 | class TestNullSerializer: 57 | TYPES = (1, 2.0, "hi", True, ["1", 1], {"key": "value"}, MyType()) 58 | 59 | @pytest.mark.parametrize("obj", TYPES) 60 | async def test_set_get_types(self, memory_cache, obj): 61 | memory_cache.serializer = NullSerializer() 62 | assert await memory_cache.set(Keys.KEY, obj) is True 63 | assert await memory_cache.get(Keys.KEY) is obj 64 | 65 | @pytest.mark.parametrize("obj", TYPES) 66 | async def test_add_get_types(self, memory_cache, obj): 67 | memory_cache.serializer = NullSerializer() 68 | assert await memory_cache.add(Keys.KEY, obj) is True 69 | assert await memory_cache.get(Keys.KEY) is obj 70 | 71 | @pytest.mark.parametrize("obj", TYPES) 72 | async def test_multi_set_multi_get_types(self, memory_cache, obj): 73 | memory_cache.serializer = NullSerializer() 74 | assert await memory_cache.multi_set([(Keys.KEY, obj)]) is True 75 | assert (await memory_cache.multi_get([Keys.KEY]))[0] is obj 76 | 77 | 78 | class TestStringSerializer: 79 | TYPES = (1, 2.0, "hi", True, ["1", 1], {"key": "value"}, MyType()) 80 | 81 | @pytest.mark.parametrize("obj", TYPES) 82 | async def test_set_get_types(self, cache, obj): 83 | cache.serializer = StringSerializer() 84 | assert await cache.set(Keys.KEY, obj) is True 85 | assert await cache.get(Keys.KEY) == str(obj) 86 | 87 | @pytest.mark.parametrize("obj", TYPES) 88 | async def test_add_get_types(self, cache, obj): 89 | cache.serializer = StringSerializer() 90 | assert await cache.add(Keys.KEY, obj) is True 91 | assert await cache.get(Keys.KEY) == str(obj) 92 | 93 | @pytest.mark.parametrize("obj", TYPES) 94 | async def test_multi_set_multi_get_types(self, cache, obj): 95 | cache.serializer = StringSerializer() 96 | assert await cache.multi_set([(Keys.KEY, obj)]) is True 97 | assert await cache.multi_get([Keys.KEY]) == [str(obj)] 98 | 99 | 100 | class TestJsonSerializer: 101 | TYPES = (1, 2.0, "hi", True, ["1", 1], {"key": "value"}) 102 | 103 | @pytest.mark.parametrize("obj", TYPES) 104 | async def test_set_get_types(self, cache, obj): 105 | cache.serializer = JsonSerializer() 106 | assert await cache.set(Keys.KEY, obj) is True 107 | assert await cache.get(Keys.KEY) == json.loads(json.dumps(obj)) 108 | 109 | @pytest.mark.parametrize("obj", TYPES) 110 | async def test_add_get_types(self, cache, obj): 111 | cache.serializer = JsonSerializer() 112 | assert await cache.add(Keys.KEY, obj) is True 113 | assert await cache.get(Keys.KEY) == json.loads(json.dumps(obj)) 114 | 115 | @pytest.mark.parametrize("obj", TYPES) 116 | async def test_multi_set_multi_get_types(self, cache, obj): 117 | cache.serializer = JsonSerializer() 118 | assert await cache.multi_set([(Keys.KEY, obj)]) is True 119 | assert await cache.multi_get([Keys.KEY]) == [json.loads(json.dumps(obj))] 120 | 121 | 122 | class TestPickleSerializer: 123 | TYPES = (1, 2.0, "hi", True, ["1", 1], {"key": "value"}, MyType()) 124 | 125 | @pytest.mark.parametrize("obj", TYPES) 126 | async def test_set_get_types(self, cache, obj): 127 | cache.serializer = PickleSerializer() 128 | assert await cache.set(Keys.KEY, obj) is True 129 | assert await cache.get(Keys.KEY) == pickle.loads(pickle.dumps(obj)) 130 | 131 | @pytest.mark.parametrize("obj", TYPES) 132 | async def test_add_get_types(self, cache, obj): 133 | cache.serializer = PickleSerializer() 134 | assert await cache.add(Keys.KEY, obj) is True 135 | assert await cache.get(Keys.KEY) == pickle.loads(pickle.dumps(obj)) 136 | 137 | @pytest.mark.parametrize("obj", TYPES) 138 | async def test_multi_set_multi_get_types(self, cache, obj): 139 | cache.serializer = PickleSerializer() 140 | assert await cache.multi_set([(Keys.KEY, obj)]) is True 141 | assert await cache.multi_get([Keys.KEY]) == [pickle.loads(pickle.dumps(obj))] 142 | 143 | 144 | class TestAltSerializers: 145 | async def test_get_set_alt_serializer_functions(self, cache): 146 | cache.serializer = StringSerializer() 147 | await cache.set(Keys.KEY, "value", dumps_fn=lambda _: "v4lu3") 148 | assert await cache.get(Keys.KEY) == "v4lu3" 149 | assert await cache.get(Keys.KEY, loads_fn=lambda _: "value") == "value" 150 | 151 | async def test_get_set_alt_serializer_class(self, cache): 152 | my_serializer = MyTypeSchema() 153 | my_obj = MyType() 154 | cache.serializer = my_serializer 155 | await cache.set(Keys.KEY, my_obj) 156 | assert await cache.get(Keys.KEY) == my_serializer.loads(my_serializer.dumps(my_obj)) 157 | -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | 4 | @pytest.fixture() 5 | def max_conns(): 6 | return None 7 | 8 | 9 | @pytest.fixture() 10 | def decode_responses(): 11 | return False 12 | 13 | 14 | @pytest.fixture 15 | async def redis_client(max_conns, decode_responses): 16 | import redis.asyncio as redis 17 | 18 | async with redis.Redis( 19 | host="127.0.0.1", 20 | port=6379, 21 | db=0, 22 | password=None, 23 | decode_responses=decode_responses, 24 | socket_connect_timeout=None, 25 | max_connections=max_conns 26 | ) as r: 27 | yield r 28 | -------------------------------------------------------------------------------- /tests/performance/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aio-libs/aiocache/28b3af84086e53e6a4ee5196770660a8f797fb3d/tests/performance/__init__.py -------------------------------------------------------------------------------- /tests/performance/conftest.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | 4 | @pytest.fixture 5 | async def redis_cache(redis_client): 6 | # redis connection pool raises ConnectionError but doesn't wait for conn reuse 7 | # when exceeding max pool size. 8 | from aiocache.backends.redis import RedisCache 9 | async with RedisCache(namespace="test", client=redis_client) as cache: 10 | yield cache 11 | 12 | 13 | @pytest.fixture 14 | async def memcached_cache(): 15 | from aiocache.backends.memcached import MemcachedCache 16 | async with MemcachedCache(namespace="test", pool_size=1) as cache: 17 | yield cache 18 | -------------------------------------------------------------------------------- /tests/performance/server.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import logging 3 | import uuid 4 | 5 | import redis.asyncio as redis 6 | from aiohttp import web 7 | 8 | logging.getLogger("aiohttp.access").propagate = False 9 | 10 | 11 | class CacheManager: 12 | def __init__(self, backend: str): 13 | if backend == "redis": 14 | from aiocache.backends.redis import RedisCache 15 | cache = RedisCache( 16 | client=redis.Redis( 17 | host="127.0.0.1", 18 | port=6379, 19 | db=0, 20 | password=None, 21 | decode_responses=False, 22 | ) 23 | ) 24 | elif backend == "memcached": 25 | from aiocache.backends.memcached import MemcachedCache 26 | cache = MemcachedCache() 27 | elif backend == "memory": 28 | from aiocache.backends.memory import SimpleMemoryCache 29 | cache = SimpleMemoryCache() 30 | else: 31 | raise ValueError("Invalid backend") 32 | self.cache = cache 33 | 34 | async def get(self, key): 35 | return await self.cache.get(key, timeout=0.1) 36 | 37 | async def set(self, key, value): 38 | return await self.cache.set(key, value, timeout=0.1) 39 | 40 | async def close(self, *_): 41 | await self.cache.close() 42 | 43 | 44 | cache_key = web.AppKey("cache_key", CacheManager) 45 | 46 | 47 | async def handler_get(req: web.Request) -> web.Response: 48 | try: 49 | data = await req.app[cache_key].get("testkey") 50 | if data: 51 | return web.Response(text=data) 52 | except asyncio.TimeoutError: 53 | return web.Response(status=404) 54 | 55 | data = str(uuid.uuid4()) 56 | await req.app[cache_key].set("testkey", data) 57 | return web.Response(text=str(data)) 58 | 59 | 60 | def run_server(backend: str) -> None: 61 | app = web.Application() 62 | app[cache_key] = CacheManager(backend) 63 | app.on_shutdown.append(app[cache_key].close) 64 | app.router.add_route("GET", "/", handler_get) 65 | web.run_app(app) 66 | -------------------------------------------------------------------------------- /tests/performance/test_concurrency.py: -------------------------------------------------------------------------------- 1 | import platform 2 | import re 3 | import subprocess 4 | import time 5 | from multiprocessing import Process 6 | 7 | import pytest 8 | 9 | from .server import run_server 10 | 11 | 12 | # TODO: Fix and readd "memcached" (currently fails >98% of requests) 13 | @pytest.fixture(params=("memory", "redis")) 14 | def server(request): 15 | p = Process(target=run_server, args=(request.param,)) 16 | p.start() 17 | time.sleep(1) 18 | yield 19 | p.terminate() 20 | p.join(timeout=15) 21 | 22 | 23 | @pytest.mark.xfail(reason="currently fails >85% of requests on GitHub runner, " 24 | "requires several re-runs to pass", 25 | strict=False) 26 | @pytest.mark.skipif(platform.python_implementation() == "PyPy", reason="Not working currently.") 27 | def test_concurrency_error_rates(server): 28 | """Test with Apache benchmark tool.""" 29 | 30 | total_requests = 1500 31 | # On some platforms, it's required to enlarge number of "open file descriptors" 32 | # with "ulimit -n number" before doing the benchmark. 33 | cmd = ("ab", "-n", str(total_requests), "-c", "500", "http://127.0.0.1:8080/") 34 | result = subprocess.run(cmd, capture_output=True, check=True, encoding="utf-8") 35 | 36 | m = re.search(r"Failed requests:\s+([0-9]+)", result.stdout) 37 | assert m, "Missing output from ab: " + result.stdout 38 | failed_requests = int(m.group(1)) 39 | 40 | m = re.search(r"Non-2xx responses:\s+([0-9]+)", result.stdout) 41 | non_200 = int(m.group(1)) if m else 0 42 | 43 | assert failed_requests / total_requests < 0.75, result.stdout 44 | assert non_200 / total_requests < 0.75, result.stdout 45 | -------------------------------------------------------------------------------- /tests/performance/test_footprint.py: -------------------------------------------------------------------------------- 1 | import platform 2 | import time 3 | from typing import AsyncIterator, cast 4 | 5 | import aiomcache 6 | import pytest 7 | import redis.asyncio as redis 8 | 9 | 10 | @pytest.fixture 11 | async def redis_client() -> AsyncIterator["redis.Redis[str]"]: 12 | async with cast("redis.Redis[str]", 13 | redis.Redis(host="127.0.0.1", port=6379, max_connections=1)) as r: 14 | yield r 15 | 16 | 17 | @pytest.mark.skipif(platform.python_implementation() == "PyPy", reason="Too slow") 18 | class TestRedis: 19 | async def test_redis_getsetdel(self, redis_client, redis_cache): 20 | N = 10000 21 | redis_total_time = 0 22 | for _n in range(N): 23 | start = time.time() 24 | await redis_client.set("hi", "value") 25 | await redis_client.get("hi") 26 | await redis_client.delete("hi") 27 | redis_total_time += time.time() - start 28 | 29 | aiocache_total_time = 0 30 | for _n in range(N): 31 | start = time.time() 32 | await redis_cache.set("hi", "value", timeout=0) 33 | await redis_cache.get("hi", timeout=0) 34 | await redis_cache.delete("hi", timeout=0) 35 | aiocache_total_time += time.time() - start 36 | 37 | print( 38 | "\n{:0.2f}/{:0.2f}: {:0.2f}".format( 39 | aiocache_total_time, redis_total_time, aiocache_total_time / redis_total_time 40 | ) 41 | ) 42 | print("aiocache avg call: {:0.5f}s".format(aiocache_total_time / N)) 43 | print("redis avg call: {:0.5f}s".format(redis_total_time / N)) 44 | assert aiocache_total_time / redis_total_time < 1.35 45 | 46 | async def test_redis_multigetsetdel(self, redis_client, redis_cache): 47 | N = 5000 48 | redis_total_time = 0 49 | values = ["a", "b", "c", "d", "e", "f"] 50 | for _n in range(N): 51 | start = time.time() 52 | await redis_client.mset({x: x for x in values}) 53 | await redis_client.mget(values) 54 | for k in values: 55 | await redis_client.delete(k) 56 | redis_total_time += time.time() - start 57 | 58 | aiocache_total_time = 0 59 | for _n in range(N): 60 | start = time.time() 61 | await redis_cache.multi_set([(x, x) for x in values], timeout=0) 62 | await redis_cache.multi_get(values, timeout=0) 63 | for k in values: 64 | await redis_cache.delete(k, timeout=0) 65 | aiocache_total_time += time.time() - start 66 | 67 | print( 68 | "\n{:0.2f}/{:0.2f}: {:0.2f}".format( 69 | aiocache_total_time, redis_total_time, aiocache_total_time / redis_total_time 70 | ) 71 | ) 72 | print("aiocache avg call: {:0.5f}s".format(aiocache_total_time / N)) 73 | print("redis_client avg call: {:0.5f}s".format(redis_total_time / N)) 74 | assert aiocache_total_time / redis_total_time < 1.35 75 | 76 | 77 | @pytest.fixture 78 | async def aiomcache_pool(): 79 | client = aiomcache.Client("127.0.0.1", 11211, pool_size=1) 80 | yield client 81 | await client.close() 82 | 83 | 84 | @pytest.mark.skipif(platform.python_implementation() == "PyPy", reason="Too slow") 85 | class TestMemcached: 86 | async def test_memcached_getsetdel(self, aiomcache_pool, memcached_cache): 87 | N = 10000 88 | aiomcache_total_time = 0 89 | for _n in range(N): 90 | start = time.time() 91 | await aiomcache_pool.set(b"hi", b"value") 92 | await aiomcache_pool.get(b"hi") 93 | await aiomcache_pool.delete(b"hi") 94 | aiomcache_total_time += time.time() - start 95 | 96 | aiocache_total_time = 0 97 | for _n in range(N): 98 | start = time.time() 99 | await memcached_cache.set("hi", "value", timeout=0) 100 | await memcached_cache.get("hi", timeout=0) 101 | await memcached_cache.delete("hi", timeout=0) 102 | aiocache_total_time += time.time() - start 103 | 104 | print( 105 | "\n{:0.2f}/{:0.2f}: {:0.2f}".format( 106 | aiocache_total_time, 107 | aiomcache_total_time, 108 | aiocache_total_time / aiomcache_total_time, 109 | ) 110 | ) 111 | print("aiocache avg call: {:0.5f}s".format(aiocache_total_time / N)) 112 | print("aiomcache avg call: {:0.5f}s".format(aiomcache_total_time / N)) 113 | assert aiocache_total_time / aiomcache_total_time < 1.40 114 | 115 | async def test_memcached_multigetsetdel(self, aiomcache_pool, memcached_cache): 116 | N = 2000 117 | aiomcache_total_time = 0 118 | values = [b"a", b"b", b"c", b"d", b"e", b"f"] 119 | for _n in range(N): 120 | start = time.time() 121 | for k in values: 122 | await aiomcache_pool.set(k, k) 123 | await aiomcache_pool.multi_get(*values) 124 | for k in values: 125 | await aiomcache_pool.delete(k) 126 | aiomcache_total_time += time.time() - start 127 | 128 | aiocache_total_time = 0 129 | values = ["a", "b", "c", "d", "e", "f"] 130 | for _n in range(N): 131 | start = time.time() 132 | await memcached_cache.multi_set([(x, x) for x in values], timeout=0) 133 | await memcached_cache.multi_get(values, timeout=0) 134 | for k in values: 135 | await memcached_cache.delete(k, timeout=0) 136 | aiocache_total_time += time.time() - start 137 | 138 | print( 139 | "\n{:0.2f}/{:0.2f}: {:0.2f}".format( 140 | aiocache_total_time, 141 | aiomcache_total_time, 142 | aiocache_total_time / aiomcache_total_time, 143 | ) 144 | ) 145 | print("aiocache avg call: {:0.5f}s".format(aiocache_total_time / N)) 146 | print("aiomcache avg call: {:0.5f}s".format(aiomcache_total_time / N)) 147 | assert aiocache_total_time / aiomcache_total_time < 1.40 148 | -------------------------------------------------------------------------------- /tests/ut/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aio-libs/aiocache/28b3af84086e53e6a4ee5196770660a8f797fb3d/tests/ut/__init__.py -------------------------------------------------------------------------------- /tests/ut/backends/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aio-libs/aiocache/28b3af84086e53e6a4ee5196770660a8f797fb3d/tests/ut/backends/__init__.py -------------------------------------------------------------------------------- /tests/ut/backends/test_memory.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from unittest.mock import ANY, MagicMock, create_autospec, patch 3 | 4 | import pytest 5 | 6 | from aiocache.backends.memory import SimpleMemoryBackend, SimpleMemoryCache 7 | from aiocache.base import BaseCache 8 | from aiocache.serializers import NullSerializer 9 | from ...utils import Keys 10 | 11 | 12 | @pytest.fixture 13 | def memory(mocker): 14 | memory = SimpleMemoryBackend() 15 | mocker.spy(memory, "_cache") 16 | return memory 17 | 18 | 19 | class TestSimpleMemoryBackend: 20 | async def test_get(self, memory): 21 | await memory._get(Keys.KEY) 22 | memory._cache.get.assert_called_with(Keys.KEY) 23 | 24 | async def test_gets(self, mocker, memory): 25 | mocker.spy(memory, "_get") 26 | await memory._gets(Keys.KEY) 27 | memory._get.assert_called_with(Keys.KEY, encoding="utf-8", _conn=ANY) 28 | 29 | async def test_set(self, memory): 30 | await memory._set(Keys.KEY, "value") 31 | memory._cache.__setitem__.assert_called_with(Keys.KEY, "value") 32 | 33 | async def test_set_no_ttl_no_handle(self, memory): 34 | await memory._set(Keys.KEY, "value", ttl=0) 35 | assert Keys.KEY not in memory._handlers 36 | 37 | await memory._set(Keys.KEY, "value") 38 | assert Keys.KEY not in memory._handlers 39 | 40 | async def test_set_cancel_previous_ttl_handle(self, memory): 41 | with patch("asyncio.get_running_loop", autospec=True): 42 | await memory._set(Keys.KEY, "value", ttl=0.1) 43 | memory._handlers[Keys.KEY].cancel.assert_not_called() 44 | 45 | await memory._set(Keys.KEY, "new_value", ttl=0.1) 46 | memory._handlers[Keys.KEY].cancel.assert_called_once_with() 47 | 48 | async def test_set_ttl_handle(self, memory): 49 | await memory._set(Keys.KEY, "value", ttl=100) 50 | assert Keys.KEY in memory._handlers 51 | assert isinstance(memory._handlers[Keys.KEY], asyncio.Handle) 52 | 53 | async def test_set_cas_token(self, memory): 54 | memory._cache.get.return_value = "old_value" 55 | assert await memory._set(Keys.KEY, "value", _cas_token="old_value") == 1 56 | memory._cache.__setitem__.assert_called_with(Keys.KEY, "value") 57 | 58 | async def test_set_cas_fail(self, memory): 59 | memory._cache.get.return_value = "value" 60 | assert await memory._set(Keys.KEY, "value", _cas_token="old_value") == 0 61 | assert memory._cache.__setitem__.call_count == 0 62 | 63 | async def test_multi_get(self, memory): 64 | await memory._multi_get([Keys.KEY, Keys.KEY_1]) 65 | memory._cache.get.assert_any_call(Keys.KEY) 66 | memory._cache.get.assert_any_call(Keys.KEY_1) 67 | 68 | async def test_multi_set(self, memory): 69 | await memory._multi_set([(Keys.KEY, "value"), (Keys.KEY_1, "random")]) 70 | memory._cache.__setitem__.assert_any_call(Keys.KEY, "value") 71 | memory._cache.__setitem__.assert_any_call(Keys.KEY_1, "random") 72 | 73 | async def test_add(self, memory, mocker): 74 | mocker.spy(memory, "_set") 75 | await memory._add(Keys.KEY, "value") 76 | memory._set.assert_called_with(Keys.KEY, "value", ttl=None) 77 | 78 | async def test_add_existing(self, memory): 79 | memory._cache.__contains__.return_value = True 80 | with pytest.raises(ValueError): 81 | await memory._add(Keys.KEY, "value") 82 | 83 | async def test_exists(self, memory): 84 | await memory._exists(Keys.KEY) 85 | memory._cache.__contains__.assert_called_with(Keys.KEY) 86 | 87 | async def test_increment(self, memory): 88 | await memory._increment(Keys.KEY, 2) 89 | memory._cache.__contains__.assert_called_with(Keys.KEY) 90 | memory._cache.__setitem__.assert_called_with(Keys.KEY, 2) 91 | 92 | async def test_increment_missing(self, memory): 93 | memory._cache.__contains__.return_value = True 94 | memory._cache.__getitem__.return_value = 2 95 | await memory._increment(Keys.KEY, 2) 96 | memory._cache.__getitem__.assert_called_with(Keys.KEY) 97 | memory._cache.__setitem__.assert_called_with(Keys.KEY, 4) 98 | 99 | async def test_increment_typerror(self, memory): 100 | memory._cache.__contains__.return_value = True 101 | memory._cache.__getitem__.return_value = "asd" 102 | with pytest.raises(TypeError): 103 | await memory._increment(Keys.KEY, 2) 104 | 105 | async def test_expire_no_handle_no_ttl(self, memory): 106 | memory._cache.__contains__.return_value = True 107 | await memory._expire(Keys.KEY, 0) 108 | assert memory._handlers.get(Keys.KEY) is None 109 | 110 | async def test_expire_no_handle_ttl(self, memory): 111 | memory._cache.__contains__.return_value = True 112 | await memory._expire(Keys.KEY, 1) 113 | assert isinstance(memory._handlers.get(Keys.KEY), asyncio.Handle) 114 | 115 | async def test_expire_handle_ttl(self, memory): 116 | fake = create_autospec(asyncio.TimerHandle, instance=True) 117 | memory._handlers[Keys.KEY] = fake 118 | memory._cache.__contains__.return_value = True 119 | await memory._expire(Keys.KEY, 1) 120 | assert fake.cancel.call_count == 1 121 | assert isinstance(memory._handlers.get(Keys.KEY), asyncio.Handle) 122 | 123 | async def test_expire_missing(self, memory): 124 | memory._cache.__contains__.return_value = False 125 | assert await memory._expire(Keys.KEY, 1) is False 126 | 127 | async def test_delete(self, memory): 128 | fake = create_autospec(asyncio.TimerHandle, instance=True) 129 | memory._handlers[Keys.KEY] = fake 130 | await memory._delete(Keys.KEY) 131 | assert fake.cancel.call_count == 1 132 | assert Keys.KEY not in memory._handlers 133 | memory._cache.pop.assert_called_with(Keys.KEY, None) 134 | 135 | async def test_delete_missing(self, memory): 136 | memory._cache.pop.return_value = None 137 | await memory._delete(Keys.KEY) 138 | memory._cache.pop.assert_called_with(Keys.KEY, None) 139 | 140 | async def test_delete_non_truthy(self, memory): 141 | non_truthy = MagicMock(spec_set=("__bool__",)) 142 | non_truthy.__bool__.side_effect = ValueError("Does not implement truthiness") 143 | 144 | with pytest.raises(ValueError): 145 | bool(non_truthy) 146 | 147 | memory._cache.pop.return_value = non_truthy 148 | await memory._delete(Keys.KEY) 149 | 150 | assert non_truthy.__bool__.call_count == 1 151 | memory._cache.pop.assert_called_with(Keys.KEY, None) 152 | 153 | async def test_clear_namespace(self, memory): 154 | memory._cache.__iter__.return_value = iter(["nma", "nmb", "no"]) 155 | await memory._clear("nm") 156 | assert memory._cache.pop.call_count == 2 157 | memory._cache.pop.assert_any_call("nma", None) 158 | memory._cache.pop.assert_any_call("nmb", None) 159 | 160 | async def test_clear_no_namespace(self, memory): 161 | memory._handlers = "asdad" 162 | memory._cache = "asdad" 163 | await memory._clear() 164 | memory._handlers = {} 165 | memory._cache = {} 166 | 167 | async def test_raw(self, memory): 168 | await memory._raw("get", Keys.KEY) 169 | memory._cache.get.assert_called_with(Keys.KEY) 170 | 171 | await memory._set(Keys.KEY, "value") 172 | memory._cache.__setitem__.assert_called_with(Keys.KEY, "value") 173 | 174 | async def test_redlock_release(self, memory): 175 | memory._cache.get.return_value = "lock" 176 | fake = create_autospec(asyncio.TimerHandle, instance=True) 177 | memory._handlers[Keys.KEY] = fake 178 | assert await memory._redlock_release(Keys.KEY, "lock") == 1 179 | memory._cache.get.assert_called_with(Keys.KEY) 180 | memory._cache.pop.assert_called_with(Keys.KEY, None) 181 | assert fake.cancel.call_count == 1 182 | assert Keys.KEY not in memory._handlers 183 | 184 | async def test_redlock_release_nokey(self, memory): 185 | memory._cache.get.return_value = None 186 | assert await memory._redlock_release(Keys.KEY, "lock") == 0 187 | memory._cache.get.assert_called_with(Keys.KEY) 188 | assert memory._cache.pop.call_count == 0 189 | 190 | 191 | class TestSimpleMemoryCache: 192 | def test_name(self): 193 | assert SimpleMemoryCache.NAME == "memory" 194 | 195 | def test_inheritance(self): 196 | assert isinstance(SimpleMemoryCache(), BaseCache) 197 | 198 | def test_default_serializer(self): 199 | assert isinstance(SimpleMemoryCache().serializer, NullSerializer) 200 | 201 | def test_parse_uri_path(self): 202 | assert SimpleMemoryCache().parse_uri_path("/1/2/3") == {} 203 | -------------------------------------------------------------------------------- /tests/ut/backends/test_redis.py: -------------------------------------------------------------------------------- 1 | from unittest.mock import ANY, AsyncMock, create_autospec, patch 2 | 3 | import pytest 4 | from redis.asyncio.client import Pipeline 5 | from redis.exceptions import ResponseError 6 | 7 | from aiocache.backends.redis import RedisBackend, RedisCache 8 | from aiocache.base import BaseCache 9 | from aiocache.serializers import JsonSerializer 10 | from ...utils import Keys, ensure_key 11 | 12 | 13 | @pytest.fixture 14 | def redis(redis_client): 15 | redis = RedisBackend(client=redis_client) 16 | with patch.object(redis, "client", autospec=True) as m: 17 | # These methods actually return an awaitable. 18 | for method in ( 19 | "eval", "expire", "get", "psetex", "setex", "execute_command", "exists", 20 | "incrby", "persist", "delete", "keys", "flushdb", 21 | ): 22 | setattr(m, method, AsyncMock(return_value=None, spec_set=())) 23 | m.mget = AsyncMock(return_value=[None], spec_set=()) 24 | m.set = AsyncMock(return_value=True, spec_set=()) 25 | 26 | m.pipeline.return_value = create_autospec(Pipeline, instance=True) 27 | m.pipeline.return_value.__aenter__.return_value = m.pipeline.return_value 28 | yield redis 29 | 30 | 31 | class TestRedisBackend: 32 | 33 | @pytest.mark.parametrize("decode_responses", [True]) 34 | async def test_redis_backend_requires_client_decode_responses(self, redis_client): 35 | with pytest.raises(ValueError) as ve: 36 | RedisBackend(client=redis_client) 37 | 38 | assert str(ve.value) == ( 39 | "redis client must be constructed with decode_responses set to False" 40 | ) 41 | 42 | async def test_get(self, redis): 43 | redis.client.get.return_value = b"value" 44 | assert await redis._get(Keys.KEY) == "value" 45 | redis.client.get.assert_called_with(Keys.KEY) 46 | 47 | async def test_gets(self, mocker, redis): 48 | mocker.spy(redis, "_get") 49 | await redis._gets(Keys.KEY) 50 | redis._get.assert_called_with(Keys.KEY, encoding="utf-8", _conn=ANY) 51 | 52 | async def test_set(self, redis): 53 | await redis._set(Keys.KEY, "value") 54 | redis.client.set.assert_called_with(Keys.KEY, "value") 55 | 56 | await redis._set(Keys.KEY, "value", ttl=1) 57 | redis.client.setex.assert_called_with(Keys.KEY, 1, "value") 58 | 59 | async def test_set_cas_token(self, mocker, redis): 60 | mocker.spy(redis, "_cas") 61 | await redis._set(Keys.KEY, "value", _cas_token="old_value", _conn=redis.client) 62 | redis._cas.assert_called_with( 63 | Keys.KEY, "value", "old_value", ttl=None, _conn=redis.client 64 | ) 65 | 66 | async def test_cas(self, mocker, redis): 67 | mocker.spy(redis, "_raw") 68 | await redis._cas(Keys.KEY, "value", "old_value", ttl=10, _conn=redis.client) 69 | redis._raw.assert_called_with( 70 | "eval", 71 | redis.CAS_SCRIPT, 72 | 1, 73 | *[Keys.KEY, "value", "old_value", "EX", 10], 74 | _conn=redis.client, 75 | ) 76 | 77 | async def test_cas_float_ttl(self, mocker, redis): 78 | mocker.spy(redis, "_raw") 79 | await redis._cas(Keys.KEY, "value", "old_value", ttl=0.1, _conn=redis.client) 80 | redis._raw.assert_called_with( 81 | "eval", 82 | redis.CAS_SCRIPT, 83 | 1, 84 | *[Keys.KEY, "value", "old_value", "PX", 100], 85 | _conn=redis.client, 86 | ) 87 | 88 | async def test_multi_get(self, redis): 89 | await redis._multi_get([Keys.KEY, Keys.KEY_1]) 90 | redis.client.mget.assert_called_with(Keys.KEY, Keys.KEY_1) 91 | 92 | async def test_multi_set(self, redis): 93 | await redis._multi_set([(Keys.KEY, "value"), (Keys.KEY_1, "random")]) 94 | redis.client.execute_command.assert_called_with( 95 | "MSET", Keys.KEY, "value", Keys.KEY_1, "random" 96 | ) 97 | 98 | async def test_multi_set_with_ttl(self, redis): 99 | await redis._multi_set([(Keys.KEY, "value"), (Keys.KEY_1, "random")], ttl=1) 100 | assert redis.client.pipeline.call_count == 1 101 | pipeline = redis.client.pipeline.return_value 102 | pipeline.execute_command.assert_called_with( 103 | "MSET", Keys.KEY, "value", Keys.KEY_1, "random" 104 | ) 105 | pipeline.expire.assert_any_call(Keys.KEY, time=1) 106 | pipeline.expire.assert_any_call(Keys.KEY_1, time=1) 107 | assert pipeline.execute.call_count == 1 108 | 109 | async def test_add(self, redis): 110 | await redis._add(Keys.KEY, "value") 111 | redis.client.set.assert_called_with(Keys.KEY, "value", nx=True, ex=None) 112 | 113 | await redis._add(Keys.KEY, "value", 1) 114 | redis.client.set.assert_called_with(Keys.KEY, "value", nx=True, ex=1) 115 | 116 | async def test_add_existing(self, redis): 117 | redis.client.set.return_value = False 118 | with pytest.raises(ValueError): 119 | await redis._add(Keys.KEY, "value") 120 | 121 | async def test_add_float_ttl(self, redis): 122 | await redis._add(Keys.KEY, "value", 0.1) 123 | redis.client.set.assert_called_with(Keys.KEY, "value", nx=True, px=100) 124 | 125 | async def test_exists(self, redis): 126 | redis.client.exists.return_value = 1 127 | await redis._exists(Keys.KEY) 128 | redis.client.exists.assert_called_with(Keys.KEY) 129 | 130 | async def test_increment(self, redis): 131 | await redis._increment(Keys.KEY, delta=2) 132 | redis.client.incrby.assert_called_with(Keys.KEY, 2) 133 | 134 | async def test_increment_typerror(self, redis): 135 | redis.client.incrby.side_effect = ResponseError("msg") 136 | with pytest.raises(TypeError): 137 | await redis._increment(Keys.KEY, delta=2) 138 | redis.client.incrby.assert_called_with(Keys.KEY, 2) 139 | 140 | async def test_expire(self, redis): 141 | await redis._expire(Keys.KEY, 1) 142 | redis.client.expire.assert_called_with(Keys.KEY, 1) 143 | await redis._increment(Keys.KEY, 2) 144 | 145 | async def test_expire_0_ttl(self, redis): 146 | await redis._expire(Keys.KEY, ttl=0) 147 | redis.client.persist.assert_called_with(Keys.KEY) 148 | 149 | async def test_delete(self, redis): 150 | await redis._delete(Keys.KEY) 151 | redis.client.delete.assert_called_with(Keys.KEY) 152 | 153 | async def test_clear(self, redis): 154 | redis.client.keys.return_value = ["nm:a", "nm:b"] 155 | await redis._clear("nm") 156 | redis.client.delete.assert_called_with("nm:a", "nm:b") 157 | 158 | async def test_clear_no_keys(self, redis): 159 | redis.client.keys.return_value = [] 160 | await redis._clear("nm") 161 | redis.client.delete.assert_not_called() 162 | 163 | async def test_clear_no_namespace(self, redis): 164 | await redis._clear() 165 | assert redis.client.flushdb.call_count == 1 166 | 167 | async def test_raw(self, redis): 168 | await redis._raw("get", Keys.KEY) 169 | await redis._raw("set", Keys.KEY, 1) 170 | redis.client.get.assert_called_with(Keys.KEY) 171 | redis.client.set.assert_called_with(Keys.KEY, 1) 172 | 173 | async def test_redlock_release(self, mocker, redis): 174 | mocker.spy(redis, "_raw") 175 | await redis._redlock_release(Keys.KEY, "random") 176 | redis._raw.assert_called_with("eval", redis.RELEASE_SCRIPT, 1, Keys.KEY, "random") 177 | 178 | 179 | class TestRedisCache: 180 | @pytest.fixture 181 | def set_test_namespace(self, redis_cache): 182 | redis_cache.namespace = "test" 183 | yield 184 | redis_cache.namespace = None 185 | 186 | def test_name(self): 187 | assert RedisCache.NAME == "redis" 188 | 189 | def test_inheritance(self, redis_client): 190 | assert isinstance(RedisCache(client=redis_client), BaseCache) 191 | 192 | def test_default_serializer(self, redis_client): 193 | assert isinstance(RedisCache(client=redis_client).serializer, JsonSerializer) 194 | 195 | @pytest.mark.parametrize( 196 | "path,expected", [("", {}), ("/", {}), ("/1", {"db": "1"}), ("/1/2/3", {"db": "1"})] 197 | ) 198 | def test_parse_uri_path(self, path, expected, redis_client): 199 | assert RedisCache(client=redis_client).parse_uri_path(path) == expected 200 | 201 | @pytest.mark.parametrize( 202 | "namespace, expected", 203 | ([None, "test:" + ensure_key(Keys.KEY)], ["", ensure_key(Keys.KEY)], ["my_ns", "my_ns:" + ensure_key(Keys.KEY)]), # noqa: B950 204 | ) 205 | def test_build_key_double_dot(self, set_test_namespace, redis_cache, namespace, expected): 206 | assert redis_cache.build_key(Keys.KEY, namespace) == expected 207 | 208 | def test_build_key_no_namespace(self, redis_cache): 209 | assert redis_cache.build_key(Keys.KEY, namespace=None) == Keys.KEY 210 | -------------------------------------------------------------------------------- /tests/ut/conftest.py: -------------------------------------------------------------------------------- 1 | from contextlib import ExitStack 2 | from unittest.mock import create_autospec, patch 3 | 4 | import pytest 5 | 6 | from aiocache.plugins import BasePlugin 7 | from ..utils import AbstractBaseCache, ConcreteBaseCache 8 | 9 | 10 | @pytest.fixture 11 | def mock_cache(mocker): 12 | return create_autospec(ConcreteBaseCache()) 13 | 14 | 15 | @pytest.fixture 16 | def mock_base_cache(): 17 | """Return BaseCache instance with unimplemented methods mocked out.""" 18 | plugin = create_autospec(BasePlugin, instance=True) 19 | cache = ConcreteBaseCache(timeout=0.002, plugins=(plugin,)) 20 | methods = ("_add", "_get", "_gets", "_set", "_multi_get", "_multi_set", "_delete", 21 | "_exists", "_increment", "_expire", "_clear", "_raw", "_close", 22 | "_redlock_release", "acquire_conn", "release_conn") 23 | with ExitStack() as stack: 24 | for f in methods: 25 | stack.enter_context(patch.object(cache, f, autospec=True)) 26 | stack.enter_context(patch.object(cache, "_serializer", autospec=True)) 27 | stack.enter_context(patch.object(cache, "build_key", cache._str_build_key)) 28 | yield cache 29 | 30 | 31 | @pytest.fixture 32 | def abstract_base_cache(): 33 | return AbstractBaseCache() 34 | 35 | 36 | @pytest.fixture 37 | def base_cache(): 38 | cache = ConcreteBaseCache() 39 | return cache 40 | 41 | 42 | @pytest.fixture 43 | async def redis_cache(redis_client): 44 | from aiocache.backends.redis import RedisCache 45 | 46 | async with RedisCache(client=redis_client) as cache: 47 | yield cache 48 | 49 | 50 | @pytest.fixture 51 | async def memcached_cache(): 52 | from aiocache.backends.memcached import MemcachedCache 53 | 54 | async with MemcachedCache() as cache: 55 | yield cache 56 | -------------------------------------------------------------------------------- /tests/ut/test_exceptions.py: -------------------------------------------------------------------------------- 1 | from aiocache.exceptions import InvalidCacheType 2 | 3 | 4 | def test_inherit_from_exception(): 5 | assert isinstance(InvalidCacheType(), Exception) 6 | -------------------------------------------------------------------------------- /tests/ut/test_lock.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from unittest.mock import Mock, patch 3 | 4 | import pytest 5 | 6 | from aiocache.lock import OptimisticLock, OptimisticLockError, RedLock 7 | from ..utils import KEY_LOCK, Keys 8 | 9 | 10 | class TestRedLock: 11 | @pytest.fixture 12 | def lock(self, mock_base_cache): 13 | RedLock._EVENTS = {} 14 | yield RedLock(mock_base_cache, Keys.KEY, 20) 15 | 16 | async def test_acquire(self, mock_base_cache, lock): 17 | await lock._acquire() 18 | mock_base_cache._add.assert_called_with(KEY_LOCK, lock._value, ttl=20) 19 | assert lock._EVENTS[KEY_LOCK].is_set() is False 20 | 21 | async def test_release(self, mock_base_cache, lock): 22 | mock_base_cache._redlock_release.return_value = True 23 | await lock._acquire() 24 | await lock._release() 25 | mock_base_cache._redlock_release.assert_called_with(KEY_LOCK, lock._value) 26 | assert KEY_LOCK not in lock._EVENTS 27 | 28 | async def test_release_no_acquire(self, mock_base_cache, lock): 29 | mock_base_cache._redlock_release.return_value = False 30 | assert KEY_LOCK not in lock._EVENTS 31 | await lock._release() 32 | assert KEY_LOCK not in lock._EVENTS 33 | 34 | async def test_context_manager(self, mock_base_cache, lock): 35 | async with lock: 36 | pass 37 | mock_base_cache._add.assert_called_with(KEY_LOCK, lock._value, ttl=20) 38 | mock_base_cache._redlock_release.assert_called_with(KEY_LOCK, lock._value) 39 | 40 | async def test_raises_exceptions(self, mock_base_cache, lock): 41 | mock_base_cache._redlock_release.return_value = True 42 | with pytest.raises(ValueError): 43 | async with lock: 44 | raise ValueError 45 | 46 | async def test_acquire_block_timeouts(self, mock_base_cache, lock): 47 | await lock._acquire() 48 | 49 | # Mock .wait() to avoid unawaited coroutine warning. 50 | with patch.object(RedLock._EVENTS[lock.key], "wait", Mock(spec_set=())): 51 | with patch("asyncio.wait_for", autospec=True, side_effect=asyncio.TimeoutError): 52 | mock_base_cache._add.side_effect = ValueError 53 | result = await lock._acquire() 54 | assert result is None 55 | 56 | async def test_wait_for_release_no_acquire(self, mock_base_cache, lock): 57 | mock_base_cache._add.side_effect = ValueError 58 | assert await lock._acquire() is None 59 | 60 | async def test_multiple_locks_lock(self, mock_base_cache, lock): 61 | lock_1 = RedLock(mock_base_cache, Keys.KEY, 20) 62 | lock_2 = RedLock(mock_base_cache, Keys.KEY, 20) 63 | mock_base_cache._add.side_effect = [True, ValueError(), ValueError()] 64 | await lock._acquire() 65 | event = lock._EVENTS[KEY_LOCK] 66 | 67 | assert KEY_LOCK in lock._EVENTS 68 | assert KEY_LOCK in lock_1._EVENTS 69 | assert KEY_LOCK in lock_2._EVENTS 70 | assert not event.is_set() 71 | 72 | await asyncio.gather(lock_1._acquire(), lock._release(), lock_2._acquire()) 73 | 74 | assert KEY_LOCK not in lock._EVENTS 75 | assert KEY_LOCK not in lock_1._EVENTS 76 | assert KEY_LOCK not in lock_2._EVENTS 77 | assert event.is_set() 78 | 79 | 80 | class TestOptimisticLock: 81 | @pytest.fixture 82 | def lock(self, mock_base_cache): 83 | yield OptimisticLock(mock_base_cache, Keys.KEY) 84 | 85 | def test_init(self, mock_base_cache, lock): 86 | assert lock.client == mock_base_cache 87 | assert lock._token is None 88 | assert lock.key == Keys.KEY 89 | assert lock.ns_key == mock_base_cache.build_key(Keys.KEY) 90 | 91 | async def test_aenter_returns_lock(self, lock): 92 | assert await lock.__aenter__() is lock 93 | 94 | async def test_aexit_not_crashing(self, lock): 95 | async with lock: 96 | pass 97 | 98 | async def test_acquire_calls_get(self, lock): 99 | await lock._acquire() 100 | lock.client._gets.assert_called_with(Keys.KEY) 101 | assert lock._token == lock.client._gets.return_value 102 | 103 | async def test_cas_calls_set_with_token(self, lock, mocker): 104 | m = mocker.spy(lock.client, "set") 105 | await lock._acquire() 106 | await lock.cas("value") 107 | m.assert_called_with(Keys.KEY, "value", _cas_token=lock._token) 108 | 109 | async def test_wrong_token_raises_error(self, mock_base_cache, lock): 110 | mock_base_cache._set.return_value = 0 111 | with pytest.raises(OptimisticLockError): 112 | await lock.cas("value") 113 | -------------------------------------------------------------------------------- /tests/ut/test_plugins.py: -------------------------------------------------------------------------------- 1 | from unittest.mock import create_autospec 2 | 3 | import pytest 4 | 5 | from aiocache.base import API, BaseCache 6 | from aiocache.plugins import BasePlugin, HitMissRatioPlugin, TimingPlugin 7 | from ..utils import Keys 8 | 9 | 10 | class TestBasePlugin: 11 | async def test_interface_methods(self): 12 | for method in API.CMDS: 13 | pre = await getattr(BasePlugin, "pre_{}".format(method.__name__))(None) 14 | assert pre is None 15 | post = await getattr(BasePlugin, "post_{}".format(method.__name__))(None) 16 | assert post is None 17 | 18 | async def test_do_nothing(self): 19 | assert await BasePlugin().do_nothing() is None 20 | 21 | 22 | class TestTimingPlugin: 23 | async def test_save_time(self, mock_cache): 24 | do_save_time = TimingPlugin().save_time("get") 25 | await do_save_time("self", mock_cache, took=1) 26 | await do_save_time("self", mock_cache, took=2) 27 | 28 | assert mock_cache.profiling["get_total"] == 2 29 | assert mock_cache.profiling["get_max"] == 2 30 | assert mock_cache.profiling["get_min"] == 1 31 | assert mock_cache.profiling["get_avg"] == 1.5 32 | 33 | async def test_save_time_post_set(self, mock_cache): 34 | await TimingPlugin().post_set(mock_cache, took=1) 35 | await TimingPlugin().post_set(mock_cache, took=2) 36 | 37 | assert mock_cache.profiling["set_total"] == 2 38 | assert mock_cache.profiling["set_max"] == 2 39 | assert mock_cache.profiling["set_min"] == 1 40 | assert mock_cache.profiling["set_avg"] == 1.5 41 | 42 | async def test_interface_methods(self): 43 | for method in API.CMDS: 44 | assert hasattr(TimingPlugin, "pre_{}".format(method.__name__)) 45 | assert hasattr(TimingPlugin, "post_{}".format(method.__name__)) 46 | 47 | 48 | class TestHitMissRatioPlugin: 49 | @pytest.fixture 50 | def plugin(self): 51 | return HitMissRatioPlugin() 52 | 53 | async def test_post_get(self, plugin): 54 | client = create_autospec(BaseCache, instance=True) 55 | await plugin.post_get(client, Keys.KEY) 56 | 57 | assert client.hit_miss_ratio["hits"] == 0 58 | assert client.hit_miss_ratio["total"] == 1 59 | assert client.hit_miss_ratio["hit_ratio"] == 0 60 | 61 | await plugin.post_get(client, Keys.KEY, ret="value") 62 | assert client.hit_miss_ratio["hits"] == 1 63 | assert client.hit_miss_ratio["total"] == 2 64 | assert client.hit_miss_ratio["hit_ratio"] == 0.5 65 | 66 | async def test_post_multi_get(self, plugin): 67 | client = create_autospec(BaseCache, instance=True) 68 | await plugin.post_multi_get(client, [Keys.KEY, Keys.KEY_1], ret=[None, None]) 69 | 70 | assert client.hit_miss_ratio["hits"] == 0 71 | assert client.hit_miss_ratio["total"] == 2 72 | assert client.hit_miss_ratio["hit_ratio"] == 0 73 | 74 | await plugin.post_multi_get(client, [Keys.KEY, Keys.KEY_1], ret=["value", "random"]) 75 | assert client.hit_miss_ratio["hits"] == 2 76 | assert client.hit_miss_ratio["total"] == 4 77 | assert client.hit_miss_ratio["hit_ratio"] == 0.5 78 | -------------------------------------------------------------------------------- /tests/ut/test_serializers.py: -------------------------------------------------------------------------------- 1 | import pickle 2 | from collections import namedtuple 3 | from unittest import mock 4 | 5 | import pytest 6 | 7 | from aiocache.serializers import ( 8 | BaseSerializer, 9 | JsonSerializer, 10 | MsgPackSerializer, 11 | NullSerializer, 12 | PickleSerializer, 13 | StringSerializer, 14 | ) 15 | 16 | 17 | Dummy = namedtuple("Dummy", "a, b") 18 | 19 | TYPES = [1, 2.0, "hi", True, ["1", 1], {"key": "value"}, Dummy(1, 2)] 20 | JSON_TYPES = [1, 2.0, "hi", True, ["1", 1], {"key": "value"}] 21 | 22 | 23 | class TestNullSerializer: 24 | def test_init(self): 25 | serializer = NullSerializer() 26 | assert isinstance(serializer, BaseSerializer) 27 | assert serializer.DEFAULT_ENCODING == "utf-8" 28 | assert serializer.encoding == "utf-8" 29 | 30 | def test_init_encoding(self): 31 | serializer = NullSerializer(encoding="whatever") 32 | assert serializer.DEFAULT_ENCODING == "utf-8" 33 | assert serializer.encoding == "whatever" 34 | 35 | @pytest.mark.parametrize("obj", TYPES) 36 | def test_set_types(self, obj): 37 | assert NullSerializer().dumps(obj) is obj 38 | 39 | def test_loads(self): 40 | assert NullSerializer().loads("hi") == "hi" 41 | 42 | 43 | class TestStringSerializer: 44 | def test_init(self): 45 | serializer = StringSerializer() 46 | assert isinstance(serializer, BaseSerializer) 47 | assert serializer.DEFAULT_ENCODING == "utf-8" 48 | assert serializer.encoding == "utf-8" 49 | 50 | @pytest.mark.parametrize("obj", TYPES) 51 | def test_set_types(self, obj): 52 | assert StringSerializer().dumps(obj) == str(obj) 53 | 54 | def test_loads(self): 55 | assert StringSerializer().loads("hi") == "hi" 56 | 57 | 58 | class TestPickleSerializer: 59 | @pytest.fixture 60 | def serializer(self): 61 | yield PickleSerializer(protocol=4) 62 | 63 | def test_init(self, serializer): 64 | assert isinstance(serializer, PickleSerializer) 65 | assert serializer.DEFAULT_ENCODING is None 66 | assert serializer.encoding is None 67 | assert serializer.protocol == 4 68 | 69 | def test_init_sets_default_protocol(self): 70 | serializer = PickleSerializer() 71 | assert serializer.protocol == pickle.DEFAULT_PROTOCOL 72 | 73 | @pytest.mark.parametrize("obj", TYPES) 74 | def test_set_types(self, obj, serializer): 75 | assert serializer.loads(serializer.dumps(obj)) == obj 76 | 77 | def test_dumps(self, serializer): 78 | expected = b"\x80\x04\x95\x06\x00\x00\x00\x00\x00\x00\x00\x8c\x02hi\x94." 79 | assert serializer.dumps("hi") == expected 80 | 81 | def test_dumps_with_none(self, serializer): 82 | assert isinstance(serializer.dumps(None), bytes) 83 | 84 | def test_loads(self, serializer): 85 | assert serializer.loads(b"\x80\x03X\x02\x00\x00\x00hiq\x00.") == "hi" 86 | 87 | def test_loads_with_none(self, serializer): 88 | assert serializer.loads(None) is None 89 | 90 | def test_dumps_and_loads(self, serializer): 91 | obj = Dummy(1, 2) 92 | assert serializer.loads(serializer.dumps(obj)) == obj 93 | 94 | 95 | class TestJsonSerializer: 96 | def test_init(self): 97 | serializer = JsonSerializer() 98 | assert isinstance(serializer, BaseSerializer) 99 | assert serializer.DEFAULT_ENCODING == "utf-8" 100 | assert serializer.encoding == "utf-8" 101 | 102 | @pytest.mark.parametrize("obj", JSON_TYPES) 103 | def test_set_types(self, obj): 104 | serializer = JsonSerializer() 105 | assert serializer.loads(serializer.dumps(obj)) == obj 106 | 107 | def test_dumps(self): 108 | assert ( 109 | JsonSerializer().dumps({"hi": 1}) == '{"hi": 1}' 110 | or JsonSerializer().dumps({"hi": 1}) == '{"hi":1}' # json 111 | ) # ujson 112 | 113 | def test_dumps_with_none(self): 114 | assert JsonSerializer().dumps(None) == "null" 115 | 116 | def test_loads_with_null(self): 117 | assert JsonSerializer().loads("null") is None 118 | 119 | def test_loads_with_none(self): 120 | assert JsonSerializer().loads(None) is None 121 | 122 | def test_dumps_and_loads(self): 123 | obj = {"hi": 1} 124 | serializer = JsonSerializer() 125 | assert serializer.loads(serializer.dumps(obj)) == obj 126 | 127 | 128 | class TestMsgPackSerializer: 129 | def test_init(self): 130 | serializer = MsgPackSerializer() 131 | assert isinstance(serializer, BaseSerializer) 132 | assert serializer.DEFAULT_ENCODING == "utf-8" 133 | assert serializer.encoding == "utf-8" 134 | 135 | def test_init_fails_if_msgpack_not_installed(self): 136 | with mock.patch("aiocache.serializers.serializers.msgpack", None): 137 | with pytest.raises(RuntimeError): 138 | MsgPackSerializer() 139 | assert JsonSerializer(), "Other serializers should still initialize" 140 | 141 | def test_init_use_list(self): 142 | serializer = MsgPackSerializer(use_list=True) 143 | assert serializer.use_list is True 144 | 145 | @pytest.mark.parametrize("obj", JSON_TYPES) 146 | def test_set_types(self, obj): 147 | serializer = MsgPackSerializer() 148 | assert serializer.loads(serializer.dumps(obj)) == obj 149 | 150 | def test_dumps(self): 151 | assert MsgPackSerializer().dumps("hi") == b"\xa2hi" 152 | 153 | def test_dumps_with_none(self): 154 | assert isinstance(MsgPackSerializer().dumps(None), bytes) 155 | 156 | def test_loads(self): 157 | assert MsgPackSerializer().loads(b"\xa2hi") == "hi" 158 | 159 | def test_loads_no_encoding(self): 160 | assert MsgPackSerializer(encoding=None).loads(b"\xa2hi") == b"hi" 161 | 162 | def test_loads_with_none(self): 163 | assert MsgPackSerializer().loads(None) is None 164 | 165 | def test_dumps_and_loads_tuple(self): 166 | serializer = MsgPackSerializer() 167 | assert serializer.loads(serializer.dumps(Dummy(1, 2))) == [1, 2] 168 | 169 | def test_dumps_and_loads_dict(self): 170 | serializer = MsgPackSerializer() 171 | d = {"a": [1, 2, ("1", 2)], "b": {"b": 1, "c": [1, 2]}} 172 | assert serializer.loads(serializer.dumps(d)) == { 173 | "a": [1, 2, ["1", 2]], 174 | "b": {"b": 1, "c": [1, 2]}, 175 | } 176 | -------------------------------------------------------------------------------- /tests/utils.py: -------------------------------------------------------------------------------- 1 | from enum import Enum 2 | from typing import Optional, Union 3 | 4 | from aiocache.base import BaseCache 5 | 6 | 7 | class Keys(str, Enum): 8 | KEY: str = "key" 9 | KEY_1: str = "random" 10 | 11 | 12 | KEY_LOCK = Keys.KEY + "-lock" 13 | 14 | 15 | def ensure_key(key: Union[str, Enum]) -> str: 16 | if isinstance(key, Enum): 17 | return key.value 18 | else: 19 | return key 20 | 21 | 22 | class AbstractBaseCache(BaseCache[str]): 23 | """BaseCache that can be mocked for NotImplementedError tests""" 24 | def __init__(self, *args, **kwargs): 25 | super().__init__(*args, **kwargs) 26 | 27 | def build_key(self, key: str, namespace: Optional[str] = None) -> str: 28 | return super().build_key(key, namespace) 29 | 30 | async def _add(self, key, value, ttl, _conn=None): 31 | return await super()._add(key, value, ttl, _conn) 32 | 33 | async def _get(self, key, encoding, _conn=None): 34 | return await super()._get(key, encoding, _conn) 35 | 36 | async def _gets(self, key, encoding="utf-8", _conn=None): 37 | return await super()._gets(key, encoding, _conn) 38 | 39 | async def _multi_get(self, keys, encoding, _conn=None): 40 | return await super()._multi_get(keys, encoding, _conn) 41 | 42 | async def _set(self, key, value, ttl, _cas_token=None, _conn=None): 43 | return await super()._set(key, value, ttl, _cas_token, _conn) 44 | 45 | async def _multi_set(self, pairs, ttl, _conn=None): 46 | return await super()._multi_set(pairs, ttl, _conn) 47 | 48 | async def _delete(self, key, _conn=None): 49 | return await super()._delete(key, _conn) 50 | 51 | async def _exists(self, key, _conn=None): 52 | return await super()._exists(key, _conn) 53 | 54 | async def _increment(self, key, delta, _conn=None): 55 | return await super()._increment(key, delta, _conn) 56 | 57 | async def _expire(self, key, ttl, _conn=None): 58 | return await super()._expire(key, ttl, _conn) 59 | 60 | async def _clear(self, namespace, _conn=None): 61 | return await super()._clear(namespace, _conn) 62 | 63 | async def _raw(self, command, *args, **kwargs): 64 | return await super()._raw(command, *args, **kwargs) 65 | 66 | async def _redlock_release(self, key, value): 67 | return await super()._redlock_release(key, value) 68 | 69 | 70 | class ConcreteBaseCache(AbstractBaseCache): 71 | """BaseCache that can be mocked for tests""" 72 | def __init__(self, *args, **kwargs): 73 | super().__init__(*args, **kwargs) 74 | 75 | def build_key(self, key: str, namespace: Optional[str] = None) -> str: 76 | return self._str_build_key(key, namespace) 77 | --------------------------------------------------------------------------------