├── .coveragerc ├── .flake8 ├── .github ├── dependabot.yml └── workflows │ ├── auto-merge.yml │ ├── ci.yaml │ └── codeql.yml ├── .gitignore ├── .mypy.ini ├── CHANGES.rst ├── CONTRIBUTORS.txt ├── LICENSE ├── MANIFEST.in ├── Makefile ├── README.rst ├── aiomcache ├── __init__.py ├── client.py ├── constants.py ├── exceptions.py ├── pool.py └── py.typed ├── examples ├── simple.py └── simple_with_flag_handler.py ├── pytest.ini ├── requirements-dev.txt ├── requirements.txt ├── setup.cfg ├── setup.py └── tests ├── __init__.py ├── commands_test.py ├── conftest.py ├── conn_args_test.py ├── flag_helper.py └── pool_test.py /.coveragerc: -------------------------------------------------------------------------------- 1 | [run] 2 | branch = True 3 | source = aiomcache, tests 4 | omit = site-packages 5 | 6 | [html] 7 | directory = htmlcov 8 | -------------------------------------------------------------------------------- /.flake8: -------------------------------------------------------------------------------- 1 | [flake8] 2 | enable-extensions = G 3 | exclude = build/ venv/ 4 | max-doc-length = 90 5 | max-line-length = 90 6 | select = A,B,C,D,E,F,G,H,I,J,K,L,M,N,O,P,Q,R,S,T,U,V,W,X,Y,Z,B901,B902,B903,B950 7 | # E226: Missing whitespace around arithmetic operators can help group things together. 8 | # E501: Superseeded by B950 (from Bugbear) 9 | # E722: Superseeded by B001 (from Bugbear) 10 | # W503: Mutually exclusive with W504. 11 | ignore = E226,E501,E722,W503 12 | per-file-ignores = 13 | # S101: Pytest uses assert 14 | tests/*:S101 15 | 16 | # flake8-import-order 17 | application-import-names = aiomcache 18 | import-order-style = pycharm 19 | 20 | # flake8-quotes 21 | inline-quotes = " 22 | 23 | # flake8-requirements 24 | known-modules = docker-py:[docker],python-memcached:[memcache] 25 | requirements-file = requirements.txt 26 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: pip 4 | directory: "/" 5 | schedule: 6 | interval: daily 7 | 8 | - package-ecosystem: "github-actions" 9 | directory: "/" 10 | schedule: 11 | interval: "monthly" 12 | -------------------------------------------------------------------------------- /.github/workflows/auto-merge.yml: -------------------------------------------------------------------------------- 1 | name: Dependabot auto-merge 2 | on: pull_request_target 3 | 4 | permissions: 5 | pull-requests: write 6 | contents: write 7 | 8 | jobs: 9 | dependabot: 10 | runs-on: ubuntu-latest 11 | if: ${{ github.actor == 'dependabot[bot]' }} 12 | steps: 13 | - name: Dependabot metadata 14 | id: metadata 15 | uses: dependabot/fetch-metadata@v2.4.0 16 | with: 17 | github-token: "${{ secrets.GITHUB_TOKEN }}" 18 | - name: Enable auto-merge for Dependabot PRs 19 | run: gh pr merge --auto --squash "$PR_URL" 20 | env: 21 | PR_URL: ${{github.event.pull_request.html_url}} 22 | GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}} 23 | -------------------------------------------------------------------------------- /.github/workflows/ci.yaml: -------------------------------------------------------------------------------- 1 | name: CI 2 | 3 | on: 4 | push: 5 | branches: 6 | - master 7 | - '[0-9].[0-9]+' # matches to backport branches, e.g. 3.6 8 | tags: [ 'v*' ] 9 | pull_request: 10 | branches: 11 | - master 12 | - '[0-9].[0-9]+' 13 | 14 | jobs: 15 | lint: 16 | name: Linter 17 | runs-on: ubuntu-latest 18 | timeout-minutes: 5 19 | steps: 20 | - name: Checkout 21 | uses: actions/checkout@v4 22 | - name: Setup Python 23 | uses: actions/setup-python@v5 24 | with: 25 | python-version: 3.9 26 | cache: 'pip' 27 | cache-dependency-path: '**/requirements*.txt' 28 | - name: Install system dependencies 29 | run: sudo apt-get install -y libmemcached-dev 30 | - name: Install python dependencies 31 | uses: py-actions/py-dependency-install@v4 32 | with: 33 | path: requirements-dev.txt 34 | - name: Mypy 35 | run: | 36 | mypy 37 | - name: Flake8 38 | run: | 39 | flake8 40 | - name: Prepare twine checker 41 | run: | 42 | pip install -U twine wheel 43 | python setup.py sdist bdist_wheel 44 | - name: Run twine checker 45 | run: | 46 | twine check dist/* 47 | 48 | test: 49 | name: Tests 50 | runs-on: ubuntu-latest 51 | strategy: 52 | matrix: 53 | python-version: ['3.9', '3.10', '3.11', '3.12', '3.13'] 54 | 55 | steps: 56 | - name: Checkout 57 | uses: actions/checkout@v4 58 | - name: Set up Python ${{ matrix.python-version }} 59 | uses: actions/setup-python@v5 60 | with: 61 | allow-prereleases: true 62 | python-version: ${{ matrix.python-version }} 63 | - name: Install system dependencies 64 | run: sudo apt-get install -y libmemcached-dev 65 | - name: Install python dependencies 66 | run: | 67 | pip install --upgrade pip 68 | pip install -U setuptools 69 | pip install -r requirements.txt 70 | pip install codecov 71 | - name: Run memcached service 72 | uses: jkeys089/actions-memcached@master 73 | - name: Run tests 74 | run: pytest 75 | - run: python -m coverage xml 76 | - name: Upload coverage 77 | uses: codecov/codecov-action@v5 78 | with: 79 | fail_ci_if_error: true 80 | token: ${{ secrets.CODECOV_TOKEN }} 81 | 82 | check: # This job does nothing and is only used for the branch protection 83 | if: always() 84 | needs: [lint, test] 85 | runs-on: ubuntu-latest 86 | steps: 87 | - name: Decide whether the needed jobs succeeded or failed 88 | uses: re-actors/alls-green@release/v1 89 | with: 90 | jobs: ${{ toJSON(needs) }} 91 | 92 | deploy: 93 | name: Deploy 94 | environment: release 95 | if: github.event_name == 'push' && contains(github.ref, 'refs/tags/') 96 | needs: [check] 97 | runs-on: ubuntu-latest 98 | steps: 99 | - name: Checkout 100 | uses: actions/checkout@v4 101 | - name: Update pip, wheel, setuptools, build, twine 102 | run: | 103 | python -m pip install -U pip wheel setuptools build twine 104 | - name: Build dists 105 | run: | 106 | python -m build 107 | - name: Make Release 108 | uses: aio-libs/create-release@v1.6.6 109 | with: 110 | changes_file: CHANGES.rst 111 | name: aiomcache 112 | version_file: aiomcache/__init__.py 113 | github_token: ${{ secrets.GITHUB_TOKEN }} 114 | pypi_token: ${{ secrets.PYPI_API_TOKEN }} 115 | dist_dir: dist 116 | fix_issue_regex: "`#(\\d+) `" 117 | fix_issue_repl: "(#\\1)" 118 | -------------------------------------------------------------------------------- /.github/workflows/codeql.yml: -------------------------------------------------------------------------------- 1 | name: "CodeQL" 2 | 3 | on: 4 | push: 5 | branches: [ 'master' ] 6 | pull_request: 7 | # The branches below must be a subset of the branches above 8 | branches: [ 'master' ] 9 | schedule: 10 | - cron: '11 11 * * 5' 11 | 12 | jobs: 13 | analyze: 14 | name: Analyze 15 | runs-on: ubuntu-latest 16 | permissions: 17 | actions: read 18 | contents: read 19 | security-events: write 20 | 21 | strategy: 22 | fail-fast: false 23 | matrix: 24 | language: [ 'python' ] 25 | # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ] 26 | # Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support 27 | 28 | steps: 29 | - name: Checkout repository 30 | uses: actions/checkout@v4 31 | 32 | # Initializes the CodeQL tools for scanning. 33 | - name: Initialize CodeQL 34 | uses: github/codeql-action/init@v3 35 | with: 36 | languages: ${{ matrix.language }} 37 | # If you wish to specify custom queries, you can do so here or in a config file. 38 | # By default, queries listed here will override any specified in a config file. 39 | # Prefix the list here with "+" to use these queries and those in the config file. 40 | 41 | # Details on CodeQL's query packs refer to : https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs 42 | # queries: security-extended,security-and-quality 43 | 44 | 45 | # Autobuild attempts to build any compiled languages (C/C++, C#, Go, or Java). 46 | # If this step fails, then you should remove it and run the build manually (see below) 47 | - name: Autobuild 48 | uses: github/codeql-action/autobuild@v3 49 | 50 | # ℹ️ Command-line programs to run using the OS shell. 51 | # 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun 52 | 53 | # If the Autobuild fails above, remove it and uncomment the following three lines. 54 | # modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance. 55 | 56 | # - run: | 57 | # echo "Run, Build Application using script" 58 | # ./location_of_script_within_repo/buildscript.sh 59 | 60 | - name: Perform CodeQL Analysis 61 | uses: github/codeql-action/analyze@v3 62 | with: 63 | category: "/language:${{matrix.language}}" 64 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *~ 2 | bin 3 | parts 4 | coverage 5 | eggs 6 | sources 7 | dist 8 | venv 9 | htmlcov 10 | .installed.cfg 11 | develop-eggs 12 | var/* 13 | *.egg-info 14 | *.pyc 15 | *.pyo 16 | *.bak 17 | *.egg 18 | *.tar.gz 19 | *.so 20 | .tox 21 | .DS_Store 22 | .coverage 23 | .idea 24 | docs/_build/ 25 | build 26 | .cache 27 | .python-version 28 | .pytest_cache/ 29 | -------------------------------------------------------------------------------- /.mypy.ini: -------------------------------------------------------------------------------- 1 | [mypy] 2 | files = aiomcache, examples, tests 3 | check_untyped_defs = True 4 | follow_imports_for_stubs = True 5 | disallow_any_decorated = True 6 | disallow_any_generics = True 7 | disallow_any_unimported = True 8 | disallow_incomplete_defs = True 9 | disallow_subclassing_any = True 10 | disallow_untyped_calls = True 11 | disallow_untyped_decorators = True 12 | disallow_untyped_defs = True 13 | enable_error_code = redundant-expr, truthy-bool, ignore-without-code, unused-awaitable 14 | implicit_reexport = False 15 | no_implicit_optional = True 16 | pretty = True 17 | show_column_numbers = True 18 | show_error_codes = True 19 | strict_equality = True 20 | warn_incomplete_stub = True 21 | warn_redundant_casts = True 22 | warn_return_any = True 23 | warn_unreachable = True 24 | warn_unused_ignores = True 25 | 26 | [mypy-tests.*] 27 | disallow_any_decorated = False 28 | 29 | 30 | [mypy-docker.*] 31 | ignore_missing_imports = True 32 | 33 | [mypy-memcache.*] 34 | ignore_missing_imports = True 35 | -------------------------------------------------------------------------------- /CHANGES.rst: -------------------------------------------------------------------------------- 1 | ======= 2 | CHANGES 3 | ======= 4 | 5 | .. towncrier release notes start 6 | 7 | 0.8.2 (2024-05-07) 8 | ================== 9 | - Fix a static typing error with ``Client.get()``. 10 | 11 | 0.8.1 (2023-02-10) 12 | ================== 13 | - Add ``conn_args`` to ``Client`` to allow TLS and other options when connecting to memcache. 14 | 15 | 0.8.0 (2022-12-11) 16 | ================== 17 | - Add ``FlagClient`` to support memcached flags. 18 | - Fix type annotations for ``@acquire``. 19 | - Fix rare exception caused by memcached server dying in middle of operation. 20 | - Fix get method to not use CAS. 21 | 22 | 0.7.0 (2022-01-20) 23 | ===================== 24 | 25 | - Added support for Python 3.10 26 | - Added support for non-ascii keys 27 | - Added type annotations 28 | 29 | 0.6.0 (2017-12-03) 30 | ================== 31 | 32 | - Drop python 3.3 support 33 | 34 | 0.5.2 (2017-05-27) 35 | ================== 36 | 37 | - Fix issue with pool concurrency and task cancellation 38 | 39 | 0.5.1 (2017-03-08) 40 | ================== 41 | 42 | - Added MANIFEST.in 43 | 44 | 0.5.0 (2017-02-08) 45 | ================== 46 | 47 | - Added gets and cas commands 48 | 49 | 0.4.0 (2016-09-26) 50 | ================== 51 | 52 | - Make max_size strict #14 53 | 54 | 0.3.0 (2016-03-11) 55 | ================== 56 | 57 | - Dockerize tests 58 | 59 | - Reuse memcached connections in Client Pool #4 60 | 61 | - Fix stats parse to compatible more mc class software #5 62 | 63 | 0.2 (2015-12-15) 64 | ================ 65 | 66 | - Make the library Python 3.5 compatible 67 | 68 | 0.1 (2014-06-18) 69 | ================ 70 | 71 | - Initial release 72 | -------------------------------------------------------------------------------- /CONTRIBUTORS.txt: -------------------------------------------------------------------------------- 1 | Contributors 2 | ------------ 3 | 4 | Maarten Draijer 5 | Manuel Miranda 6 | Nikolay Novik 7 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2013-2016, Nikolay Kim, KeepSafe 2 | All rights reserved. 3 | 4 | Redistribution and use in source and binary forms, with or without 5 | modification, are permitted provided that the following conditions are 6 | met: 7 | 8 | 1. Redistributions of source code must retain the above copyright 9 | notice, this list of conditions and the following disclaimer. 10 | 11 | 2. Redistributions in binary form must reproduce the above copyright 12 | notice, this list of conditions and the following disclaimer in the 13 | documentation and/or other materials provided with the distribution. 14 | 15 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 16 | "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 17 | LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 18 | A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 19 | HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 20 | SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 21 | LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 22 | DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 23 | THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 24 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 25 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 26 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include LICENSE 2 | include CHANGES.rst 3 | include README.rst 4 | graft aiomcache 5 | global-exclude *.pyc *.swp 6 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | # Some simple testing tasks (sorry, UNIX only). 2 | 3 | doc: 4 | cd docs && make html 5 | echo "open file://`pwd`/docs/_build/html/index.html" 6 | 7 | 8 | cov cover coverage: 9 | py.test --cov=aiomcache --cov-report=html --cov-report=xml --cov-report=term-missing tests 10 | @echo "open file://`pwd`/htmlcov/index.html" 11 | 12 | 13 | clean: 14 | find . -name __pycache__ |xargs rm -rf 15 | find . -type f -name '*.py[co]' -delete 16 | find . -type f -name '*~' -delete 17 | find . -type f -name '.*~' -delete 18 | find . -type f -name '@*' -delete 19 | find . -type f -name '#*#' -delete 20 | find . -type f -name '*.orig' -delete 21 | find . -type f -name '*.rej' -delete 22 | rm -f .coverage 23 | rm -rf coverage 24 | rm -rf docs/_build 25 | 26 | .PHONY: all cov clean 27 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | memcached client for asyncio 2 | ============================ 3 | 4 | asyncio (PEP 3156) library to work with memcached. 5 | 6 | 7 | Getting started 8 | --------------- 9 | 10 | The API looks very similar to the other memcache clients: 11 | 12 | .. code:: python 13 | 14 | import asyncio 15 | import aiomcache 16 | 17 | async def hello_aiomcache(): 18 | mc = aiomcache.Client("127.0.0.1", 11211) 19 | await mc.set(b"some_key", b"Some value") 20 | value = await mc.get(b"some_key") 21 | print(value) 22 | values = await mc.multi_get(b"some_key", b"other_key") 23 | print(values) 24 | await mc.delete(b"another_key") 25 | 26 | asyncio.run(hello_aiomcache()) 27 | 28 | 29 | Version 0.8 introduces `FlagClient` which allows registering callbacks to 30 | set or process flags. See `examples/simple_with_flag_handler.py` 31 | -------------------------------------------------------------------------------- /aiomcache/__init__.py: -------------------------------------------------------------------------------- 1 | """memcached client, based on mixpanel's memcache_client library 2 | 3 | Usage example:: 4 | 5 | import aiomcache 6 | mc = aiomcache.Client("127.0.0.1", 11211) 7 | await mc.set("some_key", "Some value") 8 | value = await mc.get("some_key") 9 | await mc.delete("another_key") 10 | """ 11 | 12 | from .client import Client, FlagClient 13 | from .exceptions import ClientException, ValidationException 14 | 15 | __all__ = ("Client", "ClientException", "FlagClient", "ValidationException") 16 | 17 | __version__ = "0.8.2" 18 | -------------------------------------------------------------------------------- /aiomcache/client.py: -------------------------------------------------------------------------------- 1 | import functools 2 | import re 3 | import sys 4 | from typing import (Any, Awaitable, Callable, Dict, Generic, Literal, Mapping, Optional, 5 | Tuple, TypeVar, Union, overload) 6 | 7 | from . import constants as const 8 | from .exceptions import ClientException, ValidationException 9 | from .pool import Connection, MemcachePool 10 | 11 | if sys.version_info >= (3, 10): 12 | from typing import Concatenate, ParamSpec 13 | else: 14 | from typing_extensions import Concatenate, ParamSpec 15 | 16 | __all__ = ['Client'] 17 | 18 | _P = ParamSpec("_P") 19 | _T = TypeVar("_T") 20 | _U = TypeVar("_U") 21 | _Client = TypeVar("_Client", bound="FlagClient[Any]") 22 | _Result = Tuple[Dict[bytes, Union[bytes, _T]], Dict[bytes, _U]] 23 | 24 | _GetFlagHandler = Callable[[bytes, int], Awaitable[_T]] 25 | _SetFlagHandler = Callable[[_T], Awaitable[Tuple[bytes, int]]] 26 | 27 | 28 | def acquire( 29 | func: Callable[Concatenate[_Client, Connection, _P], Awaitable[_T]] 30 | ) -> Callable[Concatenate[_Client, _P], Awaitable[_T]]: 31 | 32 | @functools.wraps(func) 33 | async def wrapper(self: _Client, *args: _P.args, # type: ignore[misc] 34 | **kwargs: _P.kwargs) -> _T: 35 | conn = await self._pool.acquire() 36 | try: 37 | return await func(self, conn, *args, **kwargs) 38 | except Exception as exc: 39 | conn[0].set_exception(exc) 40 | raise 41 | finally: 42 | self._pool.release(conn) 43 | 44 | return wrapper 45 | 46 | 47 | class FlagClient(Generic[_T]): 48 | def __init__(self, host: str, port: int = 11211, *, 49 | pool_size: int = 2, pool_minsize: Optional[int] = None, 50 | conn_args: Optional[Mapping[str, Any]] = None, 51 | get_flag_handler: Optional[_GetFlagHandler[_T]] = None, 52 | set_flag_handler: Optional[_SetFlagHandler[_T]] = None): 53 | """ 54 | Creates new Client instance. 55 | 56 | :param host: memcached host 57 | :param port: memcached port 58 | :param pool_size: max connection pool size 59 | :param pool_minsize: min connection pool size 60 | :param conn_args: extra arguments passed to 61 | asyncio.open_connection(). For details, see: 62 | https://docs.python.org/3/library/asyncio-stream.html#asyncio.open_connection. 63 | :param get_flag_handler: async method to call to convert flagged 64 | values. Method takes tuple: (value, flags) and should return 65 | processed value or raise ClientException if not supported. 66 | :param set_flag_handler: async method to call to convert non bytes 67 | value to flagged value. Method takes value and must return tuple: 68 | (value, flags). 69 | """ 70 | if not pool_minsize: 71 | pool_minsize = pool_size 72 | 73 | self._pool = MemcachePool( 74 | host, port, minsize=pool_minsize, maxsize=pool_size, 75 | conn_args=conn_args) 76 | 77 | self._get_flag_handler = get_flag_handler 78 | self._set_flag_handler = set_flag_handler 79 | 80 | # key may be anything except whitespace and control chars, upto 250 characters. 81 | # Must be str for unicode-aware regex. 82 | _valid_key_re = re.compile("^[^\\s\x00-\x1F\x7F-\x9F]{1,250}$") 83 | 84 | def _validate_key(self, key: bytes) -> bytes: 85 | if not isinstance(key, bytes): # avoid bugs subtle and otherwise 86 | raise ValidationException('key must be bytes', key) 87 | 88 | # Must decode to str for unicode-aware comparison. 89 | key_str = key.decode() 90 | m = self._valid_key_re.match(key_str) 91 | if m: 92 | # in python re, $ matches either end of line or right before 93 | # \n at end of line. We can't allow latter case, so 94 | # making sure length matches is simplest way to detect 95 | if len(m.group(0)) != len(key_str): 96 | raise ValidationException('trailing newline', key) 97 | else: 98 | raise ValidationException('invalid key', key) 99 | 100 | return key 101 | 102 | async def _execute_simple_command(self, conn: Connection, raw_command: bytes) -> bytes: 103 | response, line = bytearray(), b'' 104 | 105 | conn.writer.write(raw_command) 106 | await conn.writer.drain() 107 | 108 | while not line.endswith(b'\r\n'): 109 | line = await conn.reader.readline() 110 | response.extend(line) 111 | return response[:-2] 112 | 113 | async def close(self) -> None: 114 | """Closes the sockets if its open.""" 115 | await self._pool.clear() 116 | 117 | @overload 118 | async def _multi_get(self, conn: Connection, *keys: bytes, 119 | with_cas: Literal[True] = ...) -> _Result[_T, int]: 120 | ... 121 | 122 | @overload 123 | async def _multi_get(self, conn: Connection, *keys: bytes, 124 | with_cas: Literal[False]) -> _Result[_T, None]: 125 | ... 126 | 127 | async def _multi_get( # type: ignore[misc] 128 | self, conn: Connection, *keys: bytes, 129 | with_cas: bool = True) -> _Result[_T, Optional[int]]: 130 | # req - get [ ...]\r\n 131 | # resp - VALUE []\r\n 132 | # \r\n (if exists) 133 | # [...] 134 | # END\r\n 135 | if not keys: 136 | return {}, {} 137 | 138 | for key in keys: 139 | self._validate_key(key) 140 | 141 | if len(set(keys)) != len(keys): 142 | raise ClientException('duplicate keys passed to multi_get') 143 | 144 | cmd = b'gets ' if with_cas else b'get ' 145 | conn.writer.write(cmd + b' '.join(keys) + b'\r\n') 146 | 147 | received = {} 148 | cas_tokens = {} 149 | line = await conn.reader.readline() 150 | 151 | while line != b'END\r\n': 152 | terms = line.split() 153 | 154 | if terms and terms[0] == b"VALUE": # exists 155 | key = terms[1] 156 | flags = int(terms[2]) 157 | length = int(terms[3]) 158 | 159 | val_bytes = (await conn.reader.readexactly(length+2))[:-2] 160 | if key in received: 161 | raise ClientException('duplicate results from server') 162 | 163 | if flags: 164 | if not self._get_flag_handler: 165 | raise ClientException("received flags without handler") 166 | 167 | val: Union[bytes, _T] = await self._get_flag_handler(val_bytes, flags) 168 | else: 169 | val = val_bytes 170 | 171 | received[key] = val 172 | cas_tokens[key] = int(terms[4]) if with_cas else None 173 | else: 174 | raise ClientException('get failed', line) 175 | 176 | line = await conn.reader.readline() 177 | 178 | if len(received) > len(keys): 179 | raise ClientException('received too many responses') 180 | 181 | return received, cas_tokens 182 | 183 | @acquire 184 | async def delete(self, conn: Connection, key: bytes) -> bool: 185 | """Deletes a key/value pair from the server. 186 | 187 | :param key: is the key to delete. 188 | :return: True if case values was deleted or False to indicate 189 | that the item with this key was not found. 190 | """ 191 | self._validate_key(key) 192 | 193 | command = b'delete ' + key + b'\r\n' 194 | response = await self._execute_simple_command(conn, command) 195 | 196 | if response not in (const.DELETED, const.NOT_FOUND): 197 | raise ClientException('Memcached delete failed', response) 198 | 199 | return response == const.DELETED 200 | 201 | @acquire 202 | @overload 203 | async def get(self, conn: Connection, /, key: bytes, 204 | default: None = ...) -> Union[bytes, _T, None]: 205 | ... 206 | 207 | @acquire 208 | @overload 209 | async def get(self, conn: Connection, /, key: bytes, default: _U) -> Union[bytes, _T, _U]: 210 | ... 211 | 212 | @acquire 213 | async def get( 214 | self, conn: Connection, /, key: bytes, default: Optional[_U] = None 215 | ) -> Union[bytes, _T, _U, None]: 216 | """Gets a single value from the server. 217 | 218 | :param key: ``bytes``, is the key for the item being fetched 219 | :param default: default value if there is no value. 220 | :return: ``bytes``, is the data for this specified key. 221 | """ 222 | values, _ = await self._multi_get(conn, key, with_cas=False) 223 | return values.get(key, default) 224 | 225 | @acquire 226 | async def gets( 227 | self, conn: Connection, key: bytes, default: Optional[bytes] = None 228 | ) -> Tuple[Union[bytes, _T, None], Optional[int]]: 229 | """Gets a single value from the server together with the cas token. 230 | 231 | :param key: ``bytes``, is the key for the item being fetched 232 | :param default: default value if there is no value. 233 | :return: ``bytes``, ``bytes tuple with the value and the cas 234 | """ 235 | values, cas_tokens = await self._multi_get(conn, key, with_cas=True) 236 | return values.get(key, default), cas_tokens.get(key) 237 | 238 | @acquire 239 | async def multi_get( 240 | self, conn: Connection, *keys: bytes 241 | ) -> Tuple[Union[bytes, _T, None], ...]: 242 | """Takes a list of keys and returns a list of values. 243 | 244 | :param keys: ``list`` keys for the item being fetched. 245 | :return: ``list`` of values for the specified keys. 246 | :raises:``ValidationException``, ``ClientException``, 247 | and socket errors 248 | """ 249 | values, _ = await self._multi_get(conn, *keys) 250 | return tuple(values.get(key) for key in keys) 251 | 252 | @acquire 253 | async def stats( 254 | self, conn: Connection, args: Optional[bytes] = None 255 | ) -> Dict[bytes, Optional[bytes]]: 256 | """Runs a stats command on the server.""" 257 | # req - stats [additional args]\r\n 258 | # resp - STAT \r\n (one per result) 259 | # END\r\n 260 | if args is None: 261 | args = b'' 262 | 263 | conn.writer.write(b''.join((b'stats ', args, b'\r\n'))) 264 | 265 | result: Dict[bytes, Optional[bytes]] = {} 266 | 267 | resp = await conn.reader.readline() 268 | while resp != b'END\r\n': 269 | terms = resp.split() 270 | 271 | if len(terms) == 2 and terms[0] == b'STAT': 272 | result[terms[1]] = None 273 | elif len(terms) == 3 and terms[0] == b'STAT': 274 | result[terms[1]] = terms[2] 275 | elif len(terms) >= 3 and terms[0] == b'STAT': 276 | result[terms[1]] = b' '.join(terms[2:]) 277 | else: 278 | raise ClientException('stats failed', resp) 279 | 280 | resp = await conn.reader.readline() 281 | 282 | return result 283 | 284 | async def _storage_command(self, conn: Connection, command: bytes, key: bytes, 285 | value: Union[bytes, _T], exptime: int = 0, 286 | cas: Optional[int] = None) -> bool: 287 | # req - set [noreply]\r\n 288 | # \r\n 289 | # resp - STORED\r\n (or others) 290 | # req - set [noreply]\r\n 291 | # \r\n 292 | # resp - STORED\r\n (or others) 293 | 294 | # typically, if val is > 1024**2 bytes server returns: 295 | # SERVER_ERROR object too large for cache\r\n 296 | # however custom-compiled memcached can have different limit 297 | # so, we'll let the server decide what's too much 298 | self._validate_key(key) 299 | 300 | if not isinstance(exptime, int): 301 | raise ValidationException('exptime not int', exptime) 302 | elif exptime < 0: 303 | raise ValidationException('exptime negative', exptime) 304 | 305 | flags = 0 306 | if not isinstance(value, bytes): 307 | # flag handler only invoked on non-byte values, 308 | # consistent with only being invoked on non-zero flags on retrieval 309 | if self._set_flag_handler is None: 310 | raise ValidationException("flag handler must be set for non-byte values") 311 | value, flags = await self._set_flag_handler(value) 312 | cas_value = b" %a" % cas if cas else b"" 313 | cmd = b"%b %b %a %a %a%b\r\n%b\r\n" % ( 314 | command, key, flags, exptime, len(value), cas_value, value 315 | ) 316 | resp = await self._execute_simple_command(conn, cmd) 317 | 318 | if resp not in ( 319 | const.STORED, const.NOT_STORED, const.EXISTS, const.NOT_FOUND): 320 | raise ClientException('stats {} failed'.format(command.decode()), resp) 321 | return resp == const.STORED 322 | 323 | @acquire 324 | async def set(self, conn: Connection, key: bytes, value: Union[bytes, _T], 325 | exptime: int = 0) -> bool: 326 | """Sets a key to a value on the server 327 | with an optional exptime (0 means don't auto-expire) 328 | 329 | :param key: ``bytes``, is the key of the item. 330 | :param value: ``bytes``, data to store. 331 | :param exptime: ``int``, is expiration time. If it's 0, the 332 | item never expires. 333 | :return: ``bool``, True in case of success. 334 | """ 335 | return await self._storage_command(conn, b"set", key, value, exptime) 336 | 337 | @acquire 338 | async def cas(self, conn: Connection, key: bytes, value: Union[bytes, _T], cas_token: int, 339 | exptime: int = 0) -> bool: 340 | """Sets a key to a value on the server 341 | with an optional exptime (0 means don't auto-expire) 342 | only if value hasn't changed from first retrieval 343 | 344 | :param key: ``bytes``, is the key of the item. 345 | :param value: ``bytes``, data to store. 346 | :param exptime: ``int``, is expiration time. If it's 0, the 347 | item never expires. 348 | :param cas_token: ``int``, unique cas token retrieve from previous 349 | ``gets`` 350 | :return: ``bool``, True in case of success. 351 | """ 352 | return await self._storage_command(conn, b"cas", key, value, exptime, 353 | cas=cas_token) 354 | 355 | @acquire 356 | async def add(self, conn: Connection, key: bytes, value: Union[bytes, _T], 357 | exptime: int = 0) -> bool: 358 | """Store this data, but only if the server *doesn't* already 359 | hold data for this key. 360 | 361 | :param key: ``bytes``, is the key of the item. 362 | :param value: ``bytes``, data to store. 363 | :param exptime: ``int`` is expiration time. If it's 0, the 364 | item never expires. 365 | :return: ``bool``, True in case of success. 366 | """ 367 | return await self._storage_command(conn, b"add", key, value, exptime) 368 | 369 | @acquire 370 | async def replace(self, conn: Connection, key: bytes, value: Union[bytes, _T], 371 | exptime: int = 0) -> bool: 372 | """Store this data, but only if the server *does* 373 | already hold data for this key. 374 | 375 | :param key: ``bytes``, is the key of the item. 376 | :param value: ``bytes``, data to store. 377 | :param exptime: ``int`` is expiration time. If it's 0, the 378 | item never expires. 379 | :return: ``bool``, True in case of success. 380 | """ 381 | return await self._storage_command(conn, b"replace", key, value, exptime) 382 | 383 | @acquire 384 | async def append(self, conn: Connection, key: bytes, value: Union[bytes, _T], 385 | exptime: int = 0) -> bool: 386 | """Add data to an existing key after existing data 387 | 388 | :param key: ``bytes``, is the key of the item. 389 | :param value: ``bytes``, data to store. 390 | :param exptime: ``int`` is expiration time. If it's 0, the 391 | item never expires. 392 | :return: ``bool``, True in case of success. 393 | """ 394 | return await self._storage_command(conn, b"append", key, value, exptime) 395 | 396 | @acquire 397 | async def prepend(self, conn: Connection, key: bytes, value: bytes, exptime: int = 0) -> bool: 398 | """Add data to an existing key before existing data 399 | 400 | :param key: ``bytes``, is the key of the item. 401 | :param value: ``bytes``, data to store. 402 | :param exptime: ``int`` is expiration time. If it's 0, the 403 | item never expires. 404 | :return: ``bool``, True in case of success. 405 | """ 406 | return await self._storage_command(conn, b"prepend", key, value, exptime) 407 | 408 | async def _incr_decr( 409 | self, conn: Connection, command: bytes, key: bytes, delta: int 410 | ) -> Optional[int]: 411 | cmd = b"%b %b %a\r\n" % (command, key, delta) 412 | resp = await self._execute_simple_command(conn, cmd) 413 | if not resp.isdigit() or resp == const.NOT_FOUND: 414 | raise ClientException( 415 | 'Memcached {} command failed'.format(str(command)), resp) 416 | return int(resp) if resp.isdigit() else None 417 | 418 | @acquire 419 | async def incr(self, conn: Connection, key: bytes, increment: int = 1) -> Optional[int]: 420 | """Command is used to change data for some item in-place, 421 | incrementing it. The data for the item is treated as decimal 422 | representation of a 64-bit unsigned integer. 423 | 424 | :param key: ``bytes``, is the key of the item the client wishes 425 | to change 426 | :param increment: ``int``, is the amount by which the client 427 | wants to increase the item. 428 | :return: ``int``, new value of the item's data, 429 | after the increment or ``None`` to indicate the item with 430 | this value was not found 431 | """ 432 | self._validate_key(key) 433 | return await self._incr_decr(conn, b"incr", key, increment) 434 | 435 | @acquire 436 | async def decr(self, conn: Connection, key: bytes, decrement: int = 1) -> Optional[int]: 437 | """Command is used to change data for some item in-place, 438 | decrementing it. The data for the item is treated as decimal 439 | representation of a 64-bit unsigned integer. 440 | 441 | :param key: ``bytes``, is the key of the item the client wishes 442 | to change 443 | :param decrement: ``int``, is the amount by which the client 444 | wants to decrease the item. 445 | :return: ``int`` new value of the item's data, 446 | after the increment or ``None`` to indicate the item with 447 | this value was not found 448 | """ 449 | self._validate_key(key) 450 | return await self._incr_decr(conn, b"decr", key, decrement) 451 | 452 | @acquire 453 | async def touch(self, conn: Connection, key: bytes, exptime: int) -> bool: 454 | """The command is used to update the expiration time of 455 | an existing item without fetching it. 456 | 457 | :param key: ``bytes``, is the key to update expiration time 458 | :param exptime: ``int``, is expiration time. This replaces the existing 459 | expiration time. 460 | :return: ``bool``, True in case of success. 461 | """ 462 | self._validate_key(key) 463 | 464 | cmd = b"touch %b %a\r\n" % (key, exptime) 465 | resp = await self._execute_simple_command(conn, cmd) 466 | if resp not in (const.TOUCHED, const.NOT_FOUND): 467 | raise ClientException('Memcached touch failed', resp) 468 | return resp == const.TOUCHED 469 | 470 | @acquire 471 | async def version(self, conn: Connection) -> bytes: 472 | """Current version of the server. 473 | 474 | :return: ``bytes``, memcached version for current the server. 475 | """ 476 | 477 | command = b'version\r\n' 478 | response = await self._execute_simple_command(conn, command) 479 | if not response.startswith(const.VERSION): 480 | raise ClientException('Memcached version failed', response) 481 | version, number = response.rstrip(b"\r\n").split(maxsplit=1) 482 | return number 483 | 484 | @acquire 485 | async def flush_all(self, conn: Connection) -> None: 486 | """Its effect is to invalidate all existing items immediately""" 487 | command = b'flush_all\r\n' 488 | response = await self._execute_simple_command(conn, command) 489 | 490 | if const.OK != response: 491 | raise ClientException('Memcached flush_all failed', response) 492 | 493 | 494 | class Client(FlagClient[bytes]): 495 | def __init__(self, host: str, port: int = 11211, *, 496 | pool_size: int = 2, pool_minsize: Optional[int] = None, 497 | conn_args: Optional[Mapping[str, Any]] = None): 498 | super().__init__(host, port, pool_size=pool_size, pool_minsize=pool_minsize, 499 | conn_args=conn_args, 500 | get_flag_handler=None, set_flag_handler=None) 501 | -------------------------------------------------------------------------------- /aiomcache/constants.py: -------------------------------------------------------------------------------- 1 | STORED = b'STORED' 2 | NOT_STORED = b'NOT_STORED' 3 | TOUCHED = b'TOUCHED' 4 | NOT_FOUND = b'NOT_FOUND' 5 | DELETED = b'DELETED' 6 | VERSION = b'VERSION' 7 | EXISTS = b'EXISTS' 8 | OK = b'OK' 9 | -------------------------------------------------------------------------------- /aiomcache/exceptions.py: -------------------------------------------------------------------------------- 1 | from typing import Optional 2 | 3 | __all__ = ['ClientException', 'ValidationException'] 4 | 5 | 6 | class ClientException(Exception): 7 | """Raised when the server does something we don't expect.""" 8 | 9 | def __init__(self, msg: str, item: Optional[object] = None): 10 | if item is not None: 11 | msg = '%s: %r' % (msg, item) 12 | super().__init__(msg) 13 | 14 | 15 | class ValidationException(ClientException): 16 | """Raised when an invalid parameter is passed to a ``Client`` function.""" 17 | -------------------------------------------------------------------------------- /aiomcache/pool.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from typing import Any, Mapping, NamedTuple, Optional, Set 3 | 4 | __all__ = ['MemcachePool'] 5 | 6 | 7 | class Connection(NamedTuple): 8 | reader: asyncio.StreamReader 9 | writer: asyncio.StreamWriter 10 | 11 | 12 | class MemcachePool: 13 | def __init__(self, host: str, port: int, *, minsize: int, maxsize: int, 14 | conn_args: Optional[Mapping[str, Any]] = None): 15 | self._host = host 16 | self._port = port 17 | self._minsize = minsize 18 | self._maxsize = maxsize 19 | self.conn_args = conn_args or {} 20 | self._pool: asyncio.Queue[Connection] = asyncio.Queue() 21 | self._in_use: Set[Connection] = set() 22 | 23 | async def clear(self) -> None: 24 | """Clear pool connections.""" 25 | while not self._pool.empty(): 26 | conn = await self._pool.get() 27 | self._do_close(conn) 28 | 29 | def _do_close(self, conn: Connection) -> None: 30 | conn.reader.feed_eof() 31 | conn.writer.close() 32 | 33 | async def acquire(self) -> Connection: 34 | """Acquire connection from the pool, or spawn new one 35 | if pool maxsize permits. 36 | 37 | :return: ``tuple`` (reader, writer) 38 | """ 39 | while self.size() == 0 or self.size() < self._minsize: 40 | _conn = await self._create_new_conn() 41 | if _conn is None: 42 | break 43 | self._pool.put_nowait(_conn) 44 | 45 | conn: Optional[Connection] = None 46 | while not conn: 47 | _conn = await self._pool.get() 48 | if _conn.reader.at_eof() or _conn.reader.exception() is not None: 49 | self._do_close(_conn) 50 | conn = await self._create_new_conn() 51 | else: 52 | conn = _conn 53 | 54 | self._in_use.add(conn) 55 | return conn 56 | 57 | def release(self, conn: Connection) -> None: 58 | """Releases connection back to the pool. 59 | 60 | :param conn: ``namedtuple`` (reader, writer) 61 | """ 62 | self._in_use.remove(conn) 63 | if conn.reader.at_eof() or conn.reader.exception() is not None: 64 | self._do_close(conn) 65 | else: 66 | self._pool.put_nowait(conn) 67 | 68 | async def _create_new_conn(self) -> Optional[Connection]: 69 | if self.size() < self._maxsize: 70 | reader, writer = await asyncio.open_connection( 71 | self._host, self._port, **self.conn_args) 72 | if self.size() < self._maxsize: 73 | return Connection(reader, writer) 74 | else: 75 | reader.feed_eof() 76 | writer.close() 77 | return None 78 | else: 79 | return None 80 | 81 | def size(self) -> int: 82 | return self._pool.qsize() + len(self._in_use) 83 | -------------------------------------------------------------------------------- /aiomcache/py.typed: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aio-libs/aiomcache/eed26f21c7080dfc29af7053dee20e26fc0d9ea9/aiomcache/py.typed -------------------------------------------------------------------------------- /examples/simple.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | import aiomcache 4 | 5 | 6 | async def hello_aiomcache() -> None: 7 | mc = aiomcache.Client("127.0.0.1", 11211) 8 | await mc.set(b"some_key", b"Some value") 9 | value = await mc.get(b"some_key") 10 | print(value) 11 | values = await mc.multi_get(b"some_key", b"other_key") 12 | print(values) 13 | await mc.delete(b"another_key") 14 | 15 | 16 | asyncio.run(hello_aiomcache()) 17 | -------------------------------------------------------------------------------- /examples/simple_with_flag_handler.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import datetime 3 | import pickle # noqa: S403 4 | from enum import IntEnum 5 | from typing import Any, Tuple 6 | 7 | import aiomcache 8 | 9 | 10 | class SimpleFlags(IntEnum): 11 | DEMO_FLAG_PICKLE = 1 12 | 13 | 14 | async def simple_get_flag_handler(value: bytes, flags: int) -> Any: 15 | print("get flag handler invoked") 16 | 17 | if flags == SimpleFlags.DEMO_FLAG_PICKLE: 18 | return pickle.loads(value) # noqa: S301 19 | 20 | raise ValueError(f"unrecognized flag: {flags}") 21 | 22 | 23 | async def simple_set_flag_handler(value: Any) -> Tuple[bytes, int]: 24 | print("set flag handler invoked") 25 | 26 | return pickle.dumps(value), SimpleFlags.DEMO_FLAG_PICKLE.value 27 | 28 | 29 | async def hello_aiomcache_with_flag_handlers() -> None: 30 | mc = aiomcache.FlagClient("127.0.0.1", 11211, 31 | get_flag_handler=simple_get_flag_handler, 32 | set_flag_handler=simple_set_flag_handler) 33 | 34 | await mc.set(b"some_first_key", b"Some value") 35 | value = await mc.get(b"some_first_key") 36 | 37 | print(f"retrieved value {repr(value)} without flag handler") 38 | 39 | date_value = datetime.date(2015, 12, 28) 40 | 41 | # flag handlers only triggered for non-byte values 42 | await mc.set(b"some_key_with_flag_handlers", date_value) 43 | value = await mc.get(b"some_key_with_flag_handlers") 44 | 45 | print(f'retrieved value with flag handler: {repr(value)}') 46 | 47 | 48 | asyncio.run(hello_aiomcache_with_flag_handlers()) 49 | -------------------------------------------------------------------------------- /pytest.ini: -------------------------------------------------------------------------------- 1 | [pytest] 2 | addopts = 3 | # show 10 slowest invocations: 4 | --durations=10 5 | 6 | # a bit of verbosity doesn't hurt: 7 | -v 8 | 9 | # report all the things == -rxXs: 10 | -ra 11 | 12 | # show values of the local vars in errors: 13 | --showlocals 14 | # coverage reports 15 | --cov=aiomcache/ --cov=tests/ --cov-report term 16 | asyncio_mode = auto 17 | filterwarnings = 18 | error 19 | testpaths = tests/ 20 | xfail_strict = true 21 | -------------------------------------------------------------------------------- /requirements-dev.txt: -------------------------------------------------------------------------------- 1 | -r requirements.txt 2 | 3 | mypy==1.16.0 4 | flake8==7.2.0 5 | flake8-bandit==4.1.1 6 | flake8-bugbear==24.12.12 7 | flake8-import-order==0.18.2 8 | flake8-requirements==2.2.1 9 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | docker-py==1.10.6 2 | pytest==8.4.0 3 | pytest-asyncio==0.26.0 4 | pytest-cov==6.1.1 5 | python-memcached==1.62 6 | typing_extensions==4.13.2; python_version<"3.11" 7 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [easy_install] 2 | zip_ok = false 3 | 4 | [nosetests] 5 | nocapture = 1 6 | cover-package = aiomcache 7 | cover-erase = 1 8 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | import codecs 2 | import os 3 | import re 4 | 5 | from setuptools import setup 6 | 7 | 8 | with codecs.open(os.path.join(os.path.abspath(os.path.dirname( 9 | __file__)), 'aiomcache', '__init__.py'), 'r', 'latin1') as fp: 10 | try: 11 | version = re.findall(r'^__version__ = "([^"]+)"\r?$', fp.read(), re.M)[0] 12 | except IndexError: 13 | raise RuntimeError('Unable to determine version.') 14 | 15 | 16 | def read(f): 17 | return open(os.path.join(os.path.dirname(__file__), f)).read().strip() 18 | 19 | 20 | setup(name='aiomcache', 21 | version=version, 22 | description=('Minimal pure python memcached client'), 23 | long_description='\n\n'.join((read('README.rst'), read('CHANGES.rst'))), 24 | long_description_content_type='text/x-rst', 25 | classifiers=[ 26 | 'License :: OSI Approved :: BSD License', 27 | 'Intended Audience :: Developers', 28 | 'Programming Language :: Python', 29 | 'Programming Language :: Python :: 3.9', 30 | 'Programming Language :: Python :: 3.10', 31 | 'Programming Language :: Python :: 3.11', 32 | 'Programming Language :: Python :: 3.12', 33 | "Programming Language :: Python :: 3.13", 34 | 'Operating System :: POSIX', 35 | 'Operating System :: MacOS :: MacOS X', 36 | 'Operating System :: Microsoft :: Windows', 37 | 'Environment :: Web Environment', 38 | 'Framework :: AsyncIO', 39 | ], 40 | author='Nikolay Kim', 41 | author_email='fafhrd91@gmail.com', 42 | maintainer=', '.join(('Nikolay Kim ', 43 | 'Andrew Svetlov ')), 44 | maintainer_email='aio-libs@googlegroups.com', 45 | url='https://github.com/aio-libs/aiomcache/', 46 | license='BSD', 47 | packages=("aiomcache",), 48 | python_requires=">=3.9", 49 | install_requires=('typing_extensions>=4; python_version<"3.11"',), 50 | tests_require=("nose",), 51 | test_suite='nose.collector', 52 | include_package_data=True) 53 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aio-libs/aiomcache/eed26f21c7080dfc29af7053dee20e26fc0d9ea9/tests/__init__.py -------------------------------------------------------------------------------- /tests/commands_test.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import datetime 3 | from typing import Any 4 | from unittest import mock 5 | from unittest.mock import MagicMock 6 | 7 | import pytest 8 | 9 | from aiomcache import Client, FlagClient 10 | from aiomcache.exceptions import ClientException, ValidationException 11 | from .flag_helper import FlagHelperDemo 12 | 13 | 14 | @pytest.mark.parametrize("key", ( 15 | b"key", 16 | b"123", 17 | bytes("!@#", "utf-8"), 18 | bytes("中文", "utf-8"), 19 | bytes("こんにちは", "utf-8"), 20 | bytes("안녕하세요", "utf-8"), 21 | )) 22 | async def test_valid_key(mcache: Client, key: bytes) -> None: 23 | assert mcache._validate_key(key) == key 24 | 25 | 26 | @pytest.mark.parametrize("key", ( 27 | # Whitespace 28 | b"foo bar", 29 | b"foo\t", 30 | b"\nbar", 31 | b"foo\x20\x0Dbar", 32 | b"\x18\x0E", 33 | b"\x20\x60", 34 | b"\x30\x00", 35 | b"\x20\x01", 36 | # Control characters 37 | b"foo\x00bar", 38 | b"\x1F", 39 | b"\x7F", 40 | "\u0080".encode(), 41 | "\u009F".encode(), 42 | )) 43 | async def test_invalid_key(mcache: Client, key: bytes) -> None: 44 | with pytest.raises(ValidationException, match="invalid key"): 45 | mcache._validate_key(key) 46 | 47 | 48 | async def test_version(mcache: Client) -> None: 49 | version = await mcache.version() 50 | stats = await mcache.stats() 51 | assert version == stats[b'version'] 52 | 53 | with mock.patch.object( 54 | mcache, 55 | "_execute_simple_command", 56 | new_callable=MagicMock) as patched: 57 | fut: asyncio.Future[bytes] = asyncio.Future() 58 | fut.set_result(b'SERVER_ERROR error\r\n') 59 | patched.return_value = fut 60 | with pytest.raises(ClientException): 61 | await mcache.version() 62 | 63 | 64 | async def test_flush_all(mcache: Client) -> None: 65 | key, value = b'key:flush_all', b'flush_all_value' 66 | await mcache.set(key, value) 67 | # make sure value exists 68 | test_value = await mcache.get(key) 69 | assert test_value == value 70 | # flush data 71 | await mcache.flush_all() 72 | # make sure value does not exists 73 | test_value = await mcache.get(key) 74 | assert test_value is None 75 | 76 | with mock.patch.object(mcache, '_execute_simple_command') as patched: 77 | fut: asyncio.Future[bytes] = asyncio.Future() 78 | fut.set_result(b'SERVER_ERROR error\r\n') 79 | patched.return_value = fut 80 | with pytest.raises(ClientException): 81 | await mcache.flush_all() 82 | 83 | 84 | async def test_set_get(mcache: Client) -> None: 85 | key, value = b'key:set', b'1' 86 | await mcache.set(key, value) 87 | test_value = await mcache.get(key) 88 | assert test_value == value 89 | test_value = await mcache.get(b"not:" + key) 90 | assert test_value is None 91 | test_value = await mcache.get(b"not:" + key, default=value) 92 | assert test_value == value 93 | 94 | with mock.patch.object(mcache, '_execute_simple_command') as patched: 95 | fut: asyncio.Future[bytes] = asyncio.Future() 96 | fut.set_result(b'SERVER_ERROR error\r\n') 97 | patched.return_value = fut 98 | with pytest.raises(ClientException): 99 | await mcache.set(key, value) 100 | 101 | 102 | async def test_gets(mcache: Client) -> None: 103 | key, value = b'key:set', b'1' 104 | await mcache.set(key, value) 105 | 106 | test_value, cas = await mcache.gets(key) 107 | assert test_value == value 108 | assert isinstance(cas, int) 109 | 110 | test_value, cas = await mcache.gets(b"not:" + key) 111 | assert test_value is None 112 | assert cas is None 113 | 114 | test_value, cas = await mcache.gets(b"not:" + key, default=value) 115 | assert test_value == value 116 | assert cas is None 117 | 118 | 119 | async def test_multi_get(mcache: Client) -> None: 120 | key1, value1 = b'key:multi_get:1', b'1' 121 | key2, value2 = b'key:multi_get:2', b'2' 122 | await mcache.set(key1, value1) 123 | await mcache.set(key2, value2) 124 | test_value = await mcache.multi_get(key1, key2) 125 | assert test_value == (value1, value2) 126 | 127 | test_value = await mcache.multi_get(b'not' + key1, key2) 128 | assert test_value == (None, value2) 129 | test_value = await mcache.multi_get() 130 | assert test_value == () 131 | 132 | 133 | async def test_multi_get_doubling_keys(mcache: Client) -> None: 134 | key, value = b'key:multi_get:3', b'1' 135 | await mcache.set(key, value) 136 | 137 | with pytest.raises(ClientException): 138 | await mcache.multi_get(key, key) 139 | 140 | 141 | async def test_set_expire(mcache: Client) -> None: 142 | key, value = b'key:set', b'1' 143 | await mcache.set(key, value, exptime=1) 144 | test_value = await mcache.get(key) 145 | assert test_value == value 146 | 147 | await asyncio.sleep(1) 148 | 149 | test_value = await mcache.get(key) 150 | assert test_value is None 151 | 152 | 153 | async def test_set_errors(mcache: Client) -> None: 154 | key, value = b'key:set', b'1' 155 | await mcache.set(key, value, exptime=1) 156 | 157 | with pytest.raises(ValidationException): 158 | await mcache.set(key, value, exptime=-1) 159 | 160 | with pytest.raises(ValidationException): 161 | await mcache.set(key, value, exptime=3.14) # type: ignore[arg-type] 162 | 163 | 164 | async def test_gets_cas(mcache: Client) -> None: 165 | key, value = b'key:set', b'1' 166 | await mcache.set(key, value) 167 | 168 | test_value, cas = await mcache.gets(key) 169 | 170 | assert cas is not None 171 | 172 | stored = await mcache.cas(key, value, cas) 173 | assert stored is True 174 | 175 | stored = await mcache.cas(key, value, cas) 176 | assert stored is False 177 | 178 | 179 | async def test_cas_missing(mcache: Client) -> None: 180 | key, value = b'key:set', b'1' 181 | stored = await mcache.cas(key, value, 123) 182 | assert stored is False 183 | 184 | 185 | async def test_add(mcache: Client) -> None: 186 | key, value = b'key:add', b'1' 187 | await mcache.set(key, value) 188 | 189 | test_value1 = await mcache.add(key, b"2") 190 | assert not test_value1 191 | 192 | test_value2 = await mcache.add(b"not:" + key, b"2") 193 | assert test_value2 194 | 195 | test_value3 = await mcache.get(b"not:" + key) 196 | assert test_value3 == b"2" 197 | 198 | 199 | async def test_replace(mcache: Client) -> None: 200 | key, value = b'key:replace', b'1' 201 | await mcache.set(key, value) 202 | 203 | test_value1 = await mcache.replace(key, b"2") 204 | assert test_value1 205 | # make sure value exists 206 | test_value2 = await mcache.get(key) 207 | assert test_value2 == b"2" 208 | 209 | test_value3 = await mcache.replace(b"not:" + key, b"3") 210 | assert not test_value3 211 | # make sure value exists 212 | test_value4 = await mcache.get(b"not:" + key) 213 | assert test_value4 is None 214 | 215 | 216 | async def test_append(mcache: Client) -> None: 217 | key, value = b'key:append', b'1' 218 | await mcache.set(key, value) 219 | 220 | test_value1 = await mcache.append(key, b"2") 221 | assert test_value1 222 | 223 | # make sure value exists 224 | test_value2 = await mcache.get(key) 225 | assert test_value2 == b"12" 226 | 227 | test_value3 = await mcache.append(b"not:" + key, b"3") 228 | assert not test_value3 229 | # make sure value exists 230 | test_value4 = await mcache.get(b"not:" + key) 231 | assert test_value4 is None 232 | 233 | 234 | async def test_prepend(mcache: Client) -> None: 235 | key, value = b'key:prepend', b'1' 236 | await mcache.set(key, value) 237 | 238 | test_value1 = await mcache.prepend(key, b"2") 239 | assert test_value1 240 | 241 | # make sure value exists 242 | test_value2 = await mcache.get(key) 243 | assert test_value2 == b"21" 244 | 245 | test_value3 = await mcache.prepend(b"not:" + key, b"3") 246 | assert not test_value3 247 | # make sure value exists 248 | test_value4 = await mcache.get(b"not:" + key) 249 | assert test_value4 is None 250 | 251 | 252 | async def test_delete(mcache: Client) -> None: 253 | key, value = b'key:delete', b'value' 254 | await mcache.set(key, value) 255 | 256 | # make sure value exists 257 | test_value = await mcache.get(key) 258 | assert test_value == value 259 | 260 | is_deleted = await mcache.delete(key) 261 | assert is_deleted 262 | # make sure value does not exists 263 | test_value = await mcache.get(key) 264 | assert test_value is None 265 | 266 | with mock.patch.object(mcache, '_execute_simple_command') as patched: 267 | fut: asyncio.Future[bytes] = asyncio.Future() 268 | fut.set_result(b'SERVER_ERROR error\r\n') 269 | patched.return_value = fut 270 | 271 | with pytest.raises(ClientException): 272 | await mcache.delete(key) 273 | 274 | 275 | async def test_delete_key_not_exists(mcache: Client) -> None: 276 | is_deleted = await mcache.delete(b"not:key") 277 | assert not is_deleted 278 | 279 | 280 | async def test_incr(mcache: Client) -> None: 281 | key, value = b'key:incr:1', b'1' 282 | await mcache.set(key, value) 283 | 284 | test_value1 = await mcache.incr(key, 2) 285 | assert test_value1 == 3 286 | 287 | # make sure value exists 288 | test_value2 = await mcache.get(key) 289 | assert test_value2 == b"3" 290 | 291 | 292 | async def test_incr_errors(mcache: Client) -> None: 293 | key, value = b'key:incr:2', b'string' 294 | await mcache.set(key, value) 295 | 296 | with pytest.raises(ClientException): 297 | await mcache.incr(key, 2) 298 | 299 | with pytest.raises(ClientException): 300 | await mcache.incr(key, 3.14) # type: ignore[arg-type] 301 | 302 | 303 | async def test_decr(mcache: Client) -> None: 304 | key, value = b'key:decr:1', b'17' 305 | await mcache.set(key, value) 306 | 307 | test_value1 = await mcache.decr(key, 2) 308 | assert test_value1 == 15 309 | 310 | test_value2 = await mcache.get(key) 311 | assert test_value2 == b"15" 312 | 313 | test_value3 = await mcache.decr(key, 1000) 314 | assert test_value3 == 0 315 | 316 | 317 | async def test_decr_errors(mcache: Client) -> None: 318 | key, value = b'key:decr:2', b'string' 319 | await mcache.set(key, value) 320 | 321 | with pytest.raises(ClientException): 322 | await mcache.decr(key, 2) 323 | 324 | with pytest.raises(ClientException): 325 | await mcache.decr(key, 3.14) # type: ignore[arg-type] 326 | 327 | 328 | async def test_stats(mcache: Client) -> None: 329 | stats = await mcache.stats() 330 | assert b'pid' in stats 331 | 332 | 333 | async def test_touch(mcache: Client) -> None: 334 | key, value = b'key:touch:1', b'17' 335 | await mcache.set(key, value) 336 | 337 | test_value1 = await mcache.touch(key, 1) 338 | assert test_value1 339 | 340 | test_value2 = await mcache.get(key) 341 | assert test_value2 == value 342 | 343 | await asyncio.sleep(1) 344 | 345 | test_value3 = await mcache.get(key) 346 | assert test_value3 is None 347 | 348 | test_value4 = await mcache.touch(b"not:" + key, 1) 349 | assert not test_value4 350 | 351 | with mock.patch.object(mcache, '_execute_simple_command') as patched: 352 | fut: asyncio.Future[bytes] = asyncio.Future() 353 | fut.set_result(b'SERVER_ERROR error\r\n') 354 | patched.return_value = fut 355 | 356 | with pytest.raises(ClientException): 357 | await mcache.touch(b"not:" + key, 1) 358 | 359 | 360 | async def test_close(mcache: Client) -> None: 361 | await mcache.close() 362 | assert mcache._pool.size() == 0 363 | 364 | 365 | @pytest.mark.parametrize( 366 | "value", 367 | [ 368 | "key", 369 | b"bkey", 370 | False, 371 | 1, 372 | None, 373 | 0.5, 374 | [1, 2, 3], 375 | tuple([1, 2, 3]), 376 | [datetime.date(2015, 12, 28)], 377 | bytes("!@#", "utf-8"), 378 | bytes("안녕하세요", "utf-8"), 379 | ] 380 | ) 381 | async def test_flag_helper( 382 | mcache_flag_client: FlagClient[Any], value: object) -> None: 383 | key = b"key:test_flag_helper" 384 | 385 | await mcache_flag_client.set(key, value) 386 | v2 = await mcache_flag_client.get(key) 387 | assert v2 == value 388 | 389 | 390 | async def test_objects_not_supported_without_flag_handler(mcache: Client) -> None: 391 | key = b"key:test_objects_not_supported_without_flag_handler" 392 | 393 | date_value = datetime.date(2015, 12, 28) 394 | 395 | with pytest.raises(ValidationException): 396 | await mcache.set(key, date_value) # type: ignore[arg-type] 397 | 398 | result = await mcache.get(key) 399 | assert result is None 400 | 401 | 402 | async def test_flag_handler_invoked_only_when_expected( 403 | mcache_flag_client: FlagClient[Any], demo_flag_helper: FlagHelperDemo) -> None: 404 | key = b"key:test_flag_handler_invoked_only_when_expected" 405 | 406 | orig_get_count = demo_flag_helper.get_invocation_count 407 | orig_set_count = demo_flag_helper.set_invocation_count 408 | 409 | # should be invoked on non-byte values 410 | 411 | date_value = datetime.date(2015, 12, 28) 412 | 413 | await mcache_flag_client.set(key, date_value) 414 | v2 = await mcache_flag_client.get(key) 415 | assert v2 == date_value 416 | 417 | assert orig_get_count + 1 == demo_flag_helper.get_invocation_count 418 | assert orig_set_count + 1 == demo_flag_helper.set_invocation_count 419 | 420 | # should not be invoked on byte values 421 | 422 | byte_value = bytes("안녕하세요", "utf-8") 423 | 424 | await mcache_flag_client.set(key, byte_value) 425 | v3 = await mcache_flag_client.get(key) 426 | assert v3 == byte_value 427 | 428 | assert orig_get_count + 1 == demo_flag_helper.get_invocation_count 429 | assert orig_set_count + 1 == demo_flag_helper.set_invocation_count 430 | -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | import contextlib 2 | import socket 3 | import sys 4 | import time 5 | import uuid 6 | from typing import Any, AsyncIterator, Callable, Iterator, TypedDict 7 | 8 | import docker as docker_mod 9 | import memcache 10 | import pytest 11 | 12 | import aiomcache 13 | from .flag_helper import FlagHelperDemo 14 | 15 | if sys.version_info < (3, 11): 16 | from typing_extensions import NotRequired 17 | else: 18 | from typing import NotRequired 19 | 20 | 21 | class McacheParams(TypedDict): 22 | host: str 23 | port: int 24 | 25 | 26 | class ServerParams(TypedDict): 27 | Id: NotRequired[str] 28 | host: str 29 | port: int 30 | mcache_params: McacheParams 31 | 32 | 33 | mcache_server_option = "localhost" 34 | 35 | 36 | def pytest_addoption(parser: pytest.Parser) -> None: 37 | parser.addoption( 38 | '--memcached', help='Memcached server') 39 | 40 | 41 | @pytest.fixture(scope='session') 42 | def unused_port() -> Callable[[], int]: 43 | def f() -> int: 44 | with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s: 45 | s.bind(('127.0.0.1', 0)) 46 | return s.getsockname()[1] # type: ignore[no-any-return] 47 | return f 48 | 49 | 50 | def pytest_runtest_setup(item: pytest.Item) -> None: 51 | global mcache_server_option 52 | mcache_server_option = item.config.getoption("--memcached", "localhost") 53 | 54 | 55 | @pytest.fixture(scope='session') 56 | def session_id() -> str: 57 | '''Unique session identifier, random string.''' 58 | return str(uuid.uuid4()) 59 | 60 | 61 | @pytest.fixture(scope='session') 62 | def docker() -> docker_mod.Client: # type: ignore[no-any-unimported] 63 | return docker_mod.from_env() 64 | 65 | 66 | def mcache_server_actual(host: str, port: int = 11211) -> ServerParams: 67 | port = int(port) 68 | return { 69 | "host": host, 70 | "port": port, 71 | "mcache_params": {"host": host, "port": port} 72 | } 73 | 74 | 75 | @contextlib.contextmanager 76 | def mcache_server_docker( # type: ignore[no-any-unimported] 77 | unused_port: Callable[[], int], docker: docker_mod.Client, session_id: str 78 | ) -> Iterator[ServerParams]: 79 | docker.images.pull("memcached:alpine") 80 | container = docker.containers.run( 81 | image='memcached:alpine', 82 | name='memcached-test-server-{}'.format(session_id), 83 | ports={"11211/tcp": None}, 84 | detach=True, 85 | ) 86 | try: 87 | container.start() 88 | container.reload() 89 | net_settings = container.attrs["NetworkSettings"] 90 | host = net_settings["IPAddress"] 91 | port = int(net_settings["Ports"]["11211/tcp"][0]["HostPort"]) 92 | mcache_params: McacheParams = {"host": host, "port": port} 93 | delay = 0.001 94 | for _i in range(10): 95 | try: 96 | conn = memcache.Client(["{host}:{port}".format_map(mcache_params)]) 97 | conn.get_stats() 98 | break 99 | except Exception: 100 | time.sleep(delay) 101 | delay *= 2 102 | else: 103 | pytest.fail("Cannot start memcached") 104 | ret: ServerParams = { 105 | "Id": container.id, 106 | "host": host, 107 | "port": port, 108 | "mcache_params": mcache_params 109 | } 110 | time.sleep(0.1) 111 | yield ret 112 | finally: 113 | container.kill() 114 | container.remove() 115 | 116 | 117 | @pytest.fixture(scope='session') 118 | def mcache_server() -> ServerParams: 119 | return mcache_server_actual("localhost") 120 | 121 | 122 | @pytest.fixture 123 | def mcache_params(mcache_server: ServerParams) -> McacheParams: 124 | return mcache_server["mcache_params"] 125 | 126 | 127 | @pytest.fixture 128 | async def mcache(mcache_params: McacheParams) -> AsyncIterator[aiomcache.Client]: 129 | client = aiomcache.Client(**mcache_params) 130 | yield client 131 | await client.close() 132 | 133 | 134 | test_only_demo_flag_helper = FlagHelperDemo() 135 | 136 | 137 | @pytest.fixture 138 | async def demo_flag_helper() -> FlagHelperDemo: 139 | return test_only_demo_flag_helper 140 | 141 | 142 | @pytest.fixture 143 | async def mcache_flag_client( 144 | mcache_params: McacheParams, demo_flag_helper: FlagHelperDemo 145 | ) -> AsyncIterator[aiomcache.FlagClient[Any]]: 146 | 147 | client = aiomcache.FlagClient( 148 | get_flag_handler=demo_flag_helper.demo_get_flag_handler, 149 | set_flag_handler=demo_flag_helper.demo_set_flag_handler, 150 | **mcache_params) 151 | try: 152 | yield client 153 | finally: 154 | await client.close() 155 | -------------------------------------------------------------------------------- /tests/conn_args_test.py: -------------------------------------------------------------------------------- 1 | import ssl 2 | from asyncio import StreamReader, StreamWriter 3 | from unittest import mock 4 | 5 | import pytest 6 | 7 | from aiomcache import Client 8 | from .conftest import McacheParams 9 | 10 | 11 | async def test_params_forwarded_from_client() -> None: 12 | client = Client("host", port=11211, conn_args={ 13 | "ssl": True, "ssl_handshake_timeout": 20 14 | }) 15 | 16 | with mock.patch( 17 | "asyncio.open_connection", 18 | return_value=( 19 | mock.create_autospec(StreamReader), 20 | mock.create_autospec(StreamWriter), 21 | ), 22 | autospec=True, 23 | ) as oc: 24 | await client._pool.acquire() 25 | 26 | oc.assert_called_with("host", 11211, ssl=True, ssl_handshake_timeout=20) 27 | 28 | 29 | async def test_ssl_client_fails_against_plaintext_server( 30 | mcache_params: McacheParams, 31 | ) -> None: 32 | client = Client(**mcache_params, conn_args={"ssl": True}) 33 | # If SSL was correctly enabled, this should 34 | # fail, since SSL isn't enabled on the memcache 35 | # server. 36 | with pytest.raises(ssl.SSLError): 37 | await client.get(b"key") 38 | -------------------------------------------------------------------------------- /tests/flag_helper.py: -------------------------------------------------------------------------------- 1 | import pickle # noqa: S403 2 | from enum import IntEnum 3 | from typing import Any, Tuple 4 | 5 | 6 | # See also: 7 | # https://github.com/lericson/pylibmc/blob/master/src/_pylibmcmodule.h#L63 8 | class DemoFlags(IntEnum): 9 | DEMO_FLAG_PICKLE = 1 10 | 11 | 12 | # demo/ref flag handler, for more elaborate potential handlers, see: 13 | # https://github.com/lericson/pylibmc/blob/master/src/_pylibmcmodule.c#L640 14 | class FlagHelperDemo: 15 | 16 | get_invocation_count = 0 17 | set_invocation_count = 0 18 | 19 | async def demo_get_flag_handler(self, value: bytes, flags: int) -> Any: 20 | self.get_invocation_count += 1 21 | 22 | if flags == DemoFlags.DEMO_FLAG_PICKLE: 23 | return pickle.loads(value) # noqa: S301 24 | 25 | raise ValueError(f"unrecognized flag: {flags}") 26 | 27 | # demo/ref flag handler, for more elaborate potential handlers, see: 28 | # https://github.com/lericson/pylibmc/blob/master/src/_pylibmcmodule.c#L1241 29 | async def demo_set_flag_handler(self, value: Any) -> Tuple[bytes, int]: 30 | self.set_invocation_count += 1 31 | 32 | # in this example exclusively use Pickle, more elaborate handler 33 | # could use additional/alternate flags 34 | return pickle.dumps(value), DemoFlags.DEMO_FLAG_PICKLE.value 35 | -------------------------------------------------------------------------------- /tests/pool_test.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import random 3 | import socket 4 | 5 | import pytest 6 | 7 | from aiomcache.client import Client, acquire 8 | from aiomcache.pool import Connection, MemcachePool 9 | from .conftest import McacheParams 10 | 11 | 12 | async def test_pool_creation(mcache_params: McacheParams) -> None: 13 | pool = MemcachePool(minsize=1, maxsize=5, **mcache_params) 14 | assert pool.size() == 0 15 | assert pool._minsize == 1 16 | 17 | 18 | async def test_pool_acquire_release(mcache_params: McacheParams) -> None: 19 | pool = MemcachePool(minsize=1, maxsize=5, **mcache_params) 20 | conn = await pool.acquire() 21 | assert isinstance(conn.reader, asyncio.StreamReader) 22 | assert isinstance(conn.writer, asyncio.StreamWriter) 23 | pool.release(conn) 24 | await pool.clear() 25 | 26 | 27 | async def test_pool_acquire_release2(mcache_params: McacheParams) -> None: 28 | pool = MemcachePool(minsize=1, maxsize=5, **mcache_params) 29 | reader, writer = await asyncio.open_connection( 30 | mcache_params["host"], mcache_params["port"]) 31 | # put dead connection to the pool 32 | writer.close() 33 | reader.feed_eof() 34 | conn = Connection(reader, writer) 35 | await pool._pool.put(conn) 36 | conn = await pool.acquire() 37 | assert isinstance(conn.reader, asyncio.StreamReader) 38 | assert isinstance(conn.writer, asyncio.StreamWriter) 39 | pool.release(conn) 40 | await pool.clear() 41 | 42 | 43 | async def test_pool_clear(mcache_params: McacheParams) -> None: 44 | pool = MemcachePool(minsize=1, maxsize=5, **mcache_params) 45 | conn = await pool.acquire() 46 | pool.release(conn) 47 | assert pool.size() == 1 48 | await pool.clear() 49 | assert pool._pool.qsize() == 0 50 | 51 | 52 | async def test_acquire_dont_create_new_connection_if_have_conn_in_pool( 53 | mcache_params: McacheParams, 54 | ) -> None: 55 | pool = MemcachePool(minsize=1, maxsize=5, **mcache_params) 56 | assert pool.size() == 0 57 | 58 | # Add a valid connection 59 | _conn = await pool._create_new_conn() 60 | assert _conn is not None 61 | await pool._pool.put(_conn) 62 | assert pool.size() == 1 63 | 64 | conn = await pool.acquire() 65 | assert conn is _conn 66 | assert pool.size() == 1 67 | pool.release(conn) 68 | await pool.clear() 69 | 70 | 71 | async def test_acquire_limit_maxsize(mcache_params: McacheParams) -> None: 72 | pool = MemcachePool(minsize=1, maxsize=1, **mcache_params) 73 | assert pool.size() == 0 74 | 75 | # Create up to max connections 76 | _conn = await pool.acquire() 77 | assert pool.size() == 1 78 | pool.release(_conn) 79 | 80 | async def acquire_wait_release() -> None: 81 | conn = await pool.acquire() 82 | assert conn is _conn 83 | await asyncio.sleep(0.01) 84 | assert len(pool._in_use) == 1 85 | assert pool.size() == 1 86 | assert pool._pool.qsize() == 0 87 | pool.release(conn) 88 | 89 | await asyncio.gather(*([acquire_wait_release()] * 50)) 90 | assert pool.size() == 1 91 | assert len(pool._in_use) == 0 92 | assert pool._pool.qsize() == 1 93 | await pool.clear() 94 | 95 | 96 | async def test_acquire_task_cancellation(mcache_params: McacheParams) -> None: 97 | 98 | class TestClient(Client): 99 | def __init__(self, pool_size: int = 4): 100 | self._pool = MemcachePool( 101 | minsize=pool_size, maxsize=pool_size, 102 | **mcache_params) 103 | 104 | @acquire 105 | async def acquire_wait_release(self, conn: Connection) -> str: 106 | assert self._pool.size() <= pool_size 107 | await asyncio.sleep(random.uniform(0.01, 0.02)) # noqa: S311 108 | return "foo" 109 | 110 | pool_size = 4 111 | client = TestClient(pool_size=pool_size) 112 | tasks = [ 113 | asyncio.wait_for( 114 | client.acquire_wait_release(), 115 | random.uniform(1, 2)) for x in range(1000) # noqa: S311 116 | ] 117 | results = await asyncio.gather(*tasks, return_exceptions=True) 118 | assert client._pool.size() <= pool_size 119 | assert len(client._pool._in_use) == 0 120 | assert "foo" in results 121 | await client._pool.clear() 122 | 123 | 124 | async def test_maxsize_greater_than_minsize(mcache_params: McacheParams) -> None: 125 | pool = MemcachePool(minsize=5, maxsize=1, **mcache_params) 126 | conn = await pool.acquire() 127 | assert isinstance(conn.reader, asyncio.StreamReader) 128 | assert isinstance(conn.writer, asyncio.StreamWriter) 129 | pool.release(conn) 130 | await pool.clear() 131 | 132 | 133 | async def test_0_minsize(mcache_params: McacheParams) -> None: 134 | pool = MemcachePool(minsize=0, maxsize=5, **mcache_params) 135 | conn = await pool.acquire() 136 | assert isinstance(conn.reader, asyncio.StreamReader) 137 | assert isinstance(conn.writer, asyncio.StreamWriter) 138 | pool.release(conn) 139 | await pool.clear() 140 | 141 | 142 | async def test_bad_connection(mcache_params: McacheParams) -> None: 143 | pool = MemcachePool(minsize=5, maxsize=1, **mcache_params) 144 | pool._host = "INVALID_HOST" 145 | assert pool.size() == 0 146 | with pytest.raises(socket.gaierror): 147 | conn = await pool.acquire() 148 | assert isinstance(conn.reader, asyncio.StreamReader) 149 | assert isinstance(conn.writer, asyncio.StreamWriter) 150 | pool.release(conn) 151 | assert pool.size() == 0 152 | --------------------------------------------------------------------------------