├── .github ├── CODEOWNERS ├── dependabot.yml └── workflows │ ├── ci.yml │ └── codeql.yml ├── .gitignore ├── ADOPTERS.md ├── CODE_OF_CONDUCT.md ├── CONTRIBUTING.md ├── ChangeLog.rst ├── LICENSE.txt ├── MANIFEST.in ├── README.rst ├── docs-requirements.txt ├── docs ├── Makefile ├── changelog.rst ├── conf.py ├── getting_started.rst ├── index.rst └── make.bat ├── lint-requirements.txt ├── mypy-requirements.txt ├── pymemcache ├── __init__.py ├── client │ ├── __init__.py │ ├── base.py │ ├── ext │ │ ├── __init__.py │ │ └── aws_ec_client.py │ ├── hash.py │ ├── murmur3.py │ ├── rendezvous.py │ └── retrying.py ├── exceptions.py ├── fallback.py ├── pool.py ├── py.typed ├── serde.py └── test │ ├── __init__.py │ ├── certs │ ├── ca-root.crt │ ├── client.crt │ ├── client.key │ └── update.sh │ ├── conftest.py │ ├── test_benchmark.py │ ├── test_client.py │ ├── test_client_hash.py │ ├── test_client_retry.py │ ├── test_compression.py │ ├── test_ext_aws_ec_client.py │ ├── test_integration.py │ ├── test_rendezvous.py │ ├── test_serde.py │ ├── test_utils.py │ └── utils.py ├── pyproject.toml ├── setup.cfg ├── setup.py ├── test-requirements.txt └── tox.ini /.github/CODEOWNERS: -------------------------------------------------------------------------------- 1 | # Global 2 | * @pinterest/pymemcache @jparise 3 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | --- 2 | version: 2 3 | updates: 4 | - package-ecosystem: "github-actions" 5 | directory: "/" 6 | schedule: 7 | interval: "weekly" 8 | - package-ecosystem: "pip" 9 | directory: "/" 10 | schedule: 11 | interval: "daily" 12 | ignore: 13 | - dependency-name: "*" 14 | update-types: ["version-update:semver-patch"] 15 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: CI 2 | 3 | on: 4 | push: 5 | branches: 6 | - master 7 | pull_request: 8 | branches: 9 | - master 10 | 11 | jobs: 12 | build: 13 | runs-on: ubuntu-latest 14 | strategy: 15 | matrix: 16 | python-version: ['3.9', '3.10', '3.11', '3.12', '3.13', 'pypy-3.10'] 17 | 18 | steps: 19 | - uses: actions/checkout@v4 20 | - name: Set up Python ${{ matrix.python-version }} 21 | uses: actions/setup-python@v5 22 | with: 23 | python-version: ${{ matrix.python-version }} 24 | cache: 'pip' 25 | - name: Install dependencies 26 | run: | 27 | python -m pip install tox tox-gh-actions 28 | sudo apt-get install libmemcached-dev 29 | - name: Lint 30 | if: matrix.python-version == '3.10' 31 | run: | 32 | tox -e lint,mypy 33 | - name: Docs 34 | if: matrix.python-version == '3.10' 35 | run: | 36 | tox -e docs 37 | - name: Disable IPv6 localhost 38 | run: | 39 | sudo sed -i '/::1/d' /etc/hosts 40 | - name: Tests 41 | run: | 42 | tox -- pymemcache/test/ \ 43 | -m 'unit or integration' \ 44 | --port ${{ job.services.memcached.ports[11211] }} \ 45 | --tls-port ${{ job.services.tls_memcached.ports[11211] }} 46 | 47 | services: 48 | memcached: 49 | image: memcached:latest 50 | ports: 51 | - 11211/tcp 52 | tls_memcached: 53 | image: scoriacorp/tls_memcached:latest 54 | ports: 55 | - 11211/tcp 56 | -------------------------------------------------------------------------------- /.github/workflows/codeql.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: "CodeQL" 3 | 4 | on: 5 | push: 6 | branches: 7 | - master 8 | pull_request: 9 | branches: 10 | - master 11 | schedule: 12 | - cron: '45 8 * * 4' 13 | 14 | jobs: 15 | analyze: 16 | name: Analyze 17 | runs-on: ubuntu-latest 18 | permissions: 19 | actions: read 20 | contents: read 21 | security-events: write 22 | 23 | strategy: 24 | fail-fast: false 25 | matrix: 26 | language: 27 | - 'python' 28 | 29 | steps: 30 | - name: Checkout repository 31 | uses: actions/checkout@v4 32 | 33 | - name: Initialize CodeQL 34 | uses: github/codeql-action/init@v3 35 | with: 36 | languages: ${{ matrix.language }} 37 | 38 | - name: Autobuild 39 | uses: github/codeql-action/autobuild@v3 40 | 41 | - name: Perform CodeQL Analysis 42 | uses: github/codeql-action/analyze@v3 43 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # compiled code. 2 | *.py[co] 3 | 4 | # Packages 5 | *.egg 6 | *.egg-info 7 | # dist 8 | # build 9 | eggs 10 | .eggs/ 11 | # parts 12 | # bin 13 | # var 14 | # sdist 15 | develop-eggs 16 | .installed.cfg 17 | build 18 | dist 19 | .cache 20 | 21 | # Installer logs 22 | pip-log.txt 23 | env/ 24 | 25 | # Unit test / coverage reports 26 | .coverage 27 | .pytest_cache 28 | .tox 29 | 30 | #Translations 31 | *.mo 32 | 33 | # Mac FS 34 | .DS_Store 35 | 36 | #Mr Developer 37 | .mr.developer.cfg 38 | 39 | # Swap files. 40 | *.swp 41 | 42 | .pip 43 | .pypirc 44 | coverage.xml 45 | .python-version 46 | \#*\# 47 | 48 | #Docs 49 | docs/_build 50 | docs/apidoc/ 51 | -------------------------------------------------------------------------------- /ADOPTERS.md: -------------------------------------------------------------------------------- 1 | # Adopters 2 | 3 | This is an alphabetical list of people and organizations who are using this 4 | project. If you'd like to be included here, please send a Pull Request that 5 | adds your information to this file. 6 | 7 | - [Django](https://www.djangoproject.com/) 8 | - [Pinterest](https://www.pinterest.com/) 9 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | # Code of Conduct 2 | 3 | At Pinterest, we work hard to ensure that our work environment is welcoming 4 | and inclusive to as many people as possible. We are committed to creating this 5 | environment for everyone involved in our open source projects as well. We 6 | welcome all participants regardless of ability, age, ethnicity, identified 7 | gender, religion (or lack there of), sexual orientation and socioeconomic 8 | status. 9 | 10 | This code of conduct details our expectations for upholding these values. 11 | 12 | ## Good behavior 13 | 14 | We expect members of our community to exhibit good behavior including (but of 15 | course not limited to): 16 | 17 | - Using intentional and empathetic language. 18 | - Focusing on resolving instead of escalating conflict. 19 | - Providing constructive feedback. 20 | 21 | ## Unacceptable behavior 22 | 23 | Some examples of unacceptable behavior (again, this is not an exhaustive 24 | list): 25 | 26 | - Harassment, publicly or in private. 27 | - Trolling. 28 | - Sexual advances (this isn’t the place for it). 29 | - Publishing other’s personal information. 30 | - Any behavior which would be deemed unacceptable in a professional environment. 31 | 32 | ## Recourse 33 | 34 | If you are witness to or the target of unacceptable behavior, it should be 35 | reported to Pinterest at opensource-policy@pinterest.com. All reporters will 36 | be kept confidential and an appropriate response for each incident will be 37 | evaluated. 38 | 39 | If the pymemcache maintainers do not uphold and enforce this code of conduct 40 | in good faith, community leadership will hold them accountable. 41 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing 2 | 3 | First off, thanks for taking the time to contribute! This guide will answer 4 | some common questions about how this project works. 5 | 6 | While this is a Pinterest open source project, we welcome contributions from 7 | everyone. Regular outside contributors can become project maintainers. 8 | 9 | ## Code of Conduct 10 | 11 | Please read and understand our [`CODE_OF_CONDUCT.md`](CODE_OF_CONDUCT.md). We 12 | work hard to ensure that our projects are welcoming and inclusive to as many 13 | people as possible. 14 | 15 | ## Making Changes 16 | 17 | 1. Fork this repository to your own account 18 | 2. Make your changes and verify that tests pass 19 | 3. Commit your work and push to a new branch on your fork 20 | 4. Submit a pull request 21 | 5. Participate in the code review process by responding to feedback 22 | 23 | Once there is agreement that the code is in good shape, one of the project's 24 | maintainers will merge your contribution. 25 | 26 | To increase the chances that your pull request will be accepted: 27 | 28 | - Follow the coding style 29 | - Write tests for your changes 30 | - Write a good commit message 31 | 32 | ## Coding Style 33 | 34 | This project follows [PEP 8](https://www.python.org/dev/peps/pep-0008/) 35 | conventions and is linted using [flake8](http://flake8.pycqa.org/). 36 | 37 | ## Testing 38 | 39 | The tests use [pytest](https://docs.pytest.org/) and can be run using `tox` or 40 | directly via: 41 | 42 | py.test pymemcache/test/ 43 | 44 | Note that the tests require a local memcached instance. 45 | 46 | ## License 47 | 48 | By contributing to this project, you agree that your contributions will be 49 | licensed under its [Apache 2 license](LICENSE). 50 | -------------------------------------------------------------------------------- /ChangeLog.rst: -------------------------------------------------------------------------------- 1 | Changelog 2 | ========= 3 | New in version 4.0.0 4 | -------------------- 5 | * Dropped Python 2 and 3.6 support 6 | `#321 `_ 7 | `#363 `_ 8 | * Begin adding typing 9 | * Add pluggable compression serde 10 | `#407 `_ 11 | 12 | 13 | New in version 3.5.2 14 | -------------------- 15 | * Handle blank ``STAT`` values. 16 | 17 | New in version 3.5.1 18 | -------------------- 19 | * ``Client.get`` returns the default when using ``ignore_exc`` and if memcached 20 | is unavailable 21 | * Added ``noreply`` support to ``HashClient.flush_all``. 22 | 23 | New in version 3.5.0 24 | -------------------- 25 | * Sockets are now closed on ``MemcacheUnexpectedCloseError``. 26 | * Added support for TCP keepalive for client sockets on Linux platforms. 27 | * Added retrying mechanisms by wrapping clients. 28 | 29 | New in version 3.4.4 30 | -------------------- 31 | * Idle connections will be removed from the pool after ``pool_idle_timeout``. 32 | 33 | New in version 3.4.3 34 | -------------------- 35 | * Fix ``HashClient.{get,set}_many()`` with UNIX sockets. 36 | 37 | New in version 3.4.2 38 | -------------------- 39 | * Remove trailing space for commands that don't take arguments, such as 40 | ``stats``. This was a violation of the memcached protocol. 41 | 42 | New in version 3.4.1 43 | -------------------- 44 | * CAS operations will now raise ``MemcacheIllegalInputError`` when ``None`` is 45 | given as the ``cas`` value. 46 | 47 | New in version 3.4.0 48 | -------------------- 49 | * Added IPv6 support for TCP socket connections. Note that IPv6 may be used in 50 | preference to IPv4 when passing a domain name as the host if an IPv6 address 51 | can be resolved for that domain. 52 | * ``HashClient`` now supports UNIX sockets. 53 | 54 | New in version 3.3.0 55 | -------------------- 56 | * ``HashClient`` can now be imported from the top-level ``pymemcache`` package 57 | (e.g. ``pymemcache.HashClient``). 58 | * ``HashClient.get_many()`` now longer stores ``False`` for missing keys from 59 | unavailable clients. Instead, the result won't contain the key at all. 60 | * Added missing ``HashClient.close()`` and ``HashClient.quit()``. 61 | 62 | New in version 3.2.0 63 | -------------------- 64 | * ``PooledClient`` and ``HashClient`` now support custom ``Client`` classes 65 | 66 | New in version 3.1.1 67 | -------------------- 68 | * Improve ``MockMemcacheClient`` to behave even more like ``Client`` 69 | 70 | New in version 3.1.0 71 | -------------------- 72 | * Add TLS support for TCP sockets. 73 | * Fix corner case when dead hashed server comes back alive. 74 | 75 | New in version 3.0.1 76 | -------------------- 77 | * Make MockMemcacheClient more consistent with the real client. 78 | * Pass ``encoding`` from HashClient to its pooled clients when ``use_pooling`` 79 | is enabled. 80 | 81 | New in version 3.0.0 82 | -------------------- 83 | * The serialization API has been reworked. Instead of consuming a serializer 84 | and deserializer as separate arguments, client objects now expect an argument 85 | ``serde`` to be an object which implements ``serialize`` and ``deserialize`` 86 | as methods. (``serialize`` and ``deserialize`` are still supported but 87 | considered deprecated.) 88 | * Validate integer inputs for ``expire``, ``delay``, ``incr``, ``decr``, and 89 | ``memlimit`` -- non-integer values now raise ``MemcacheIllegalInputError`` 90 | * Validate inputs for ``cas`` -- values which are not integers or strings of 91 | 0-9 now raise ``MemcacheIllegalInputError`` 92 | * Add ``prepend`` and ``append`` support to ``MockMemcacheClient``. 93 | * Add the ``touch`` method to ``HashClient``. 94 | * Added official support for Python 3.8. 95 | 96 | New in version 2.2.2 97 | -------------------- 98 | * Fix ``long_description`` string in Python packaging. 99 | 100 | New in version 2.2.1 101 | -------------------- 102 | * Fix ``flags`` when setting multiple differently-typed values at once. 103 | 104 | New in version 2.2.0 105 | -------------------- 106 | * Drop official support for Python 3.4. 107 | * Use ``setup.cfg`` metadata instead ``setup.py`` config to generate package. 108 | * Add ``default_noreply`` parameter to ``HashClient``. 109 | * Add ``encoding`` parameter to ``Client`` constructors (defaults to ``ascii``). 110 | * Add ``flags`` parameter to write operation methods. 111 | * Handle unicode key values in ``MockMemcacheClient`` correctly. 112 | * Improve ASCII encoding failure exception. 113 | 114 | New in version 2.1.1 115 | -------------------- 116 | * Fix ``setup.py`` dependency on six already being installed. 117 | 118 | New in version 2.1.0 119 | -------------------- 120 | * Public classes and exceptions can now be imported from the top-level 121 | ``pymemcache`` package (e.g. ``pymemcache.Client``). 122 | `#197 `_ 123 | * Add UNIX domain socket support and document server connection options. 124 | `#206 `_ 125 | * Add support for the ``cache_memlimit`` command. 126 | `#211 `_ 127 | * Commands key are now always sent in their original order. 128 | `#209 `_ 129 | 130 | New in version 2.0.0 131 | -------------------- 132 | * Change set_many and set_multi api return value. `#179 `_ 133 | * Fix support for newbytes from python-future. `#187 `_ 134 | * Add support for Python 3.7, and drop support for Python 3.3 135 | * Properly batch Client.set_many() call. `#182 `_ 136 | * Improve _check_key() and _store_cmd() performance. `#183 `_ 137 | * Properly batch Client.delete_many() call. `#184 `_ 138 | * Add option to explicitly set pickle version used by serde. `#190 `_ 139 | 140 | New in version 1.4.4 141 | -------------------- 142 | * pypy3 to travis test matrix 143 | * full benchmarks in test 144 | * fix flake8 issues 145 | * Have mockmemcacheclient support non-ascii strings 146 | * Switch from using pickle format 0 to the highest available version. See `#156 `_ 147 | 148 | *Warning*: different versions of python have different highest pickle versions: https://docs.python.org/3/library/pickle.html 149 | 150 | 151 | New in version 1.4.3 152 | -------------------- 153 | * Documentation improvements 154 | * Fixed cachedump stats command, see `#103 `_ 155 | * Honor default_value in HashClient 156 | 157 | New in version 1.4.2 158 | -------------------- 159 | * Drop support for python 2.6, see `#109 `_ 160 | 161 | New in version 1.4.1 162 | -------------------- 163 | * Python 3 serializations fixes `#131 `_ 164 | * Drop support for pypy3 165 | * Comment cleanup 166 | * Add gets_many to hash_client 167 | * Better checking for illegal chars in key 168 | 169 | New in version 1.4.0 170 | -------------------- 171 | * Unicode keys support. It is now possible to pass the flag ``allow_unicode_keys`` when creating the clients, thanks @jogo! 172 | * Fixed a bug where PooledClient wasn't following ``default_noreply`` arg set on init, thanks @kols! 173 | * Improved documentation 174 | 175 | New in version 1.3.8 176 | -------------------- 177 | * use cpickle instead of pickle when possible (python2) 178 | 179 | New in version 1.3.7 180 | -------------------- 181 | * default parameter on get(key, default=0) 182 | * fixed docs to autogenerate themselves with sphinx 183 | * fix linter to work with python3 184 | * improve error message on illegal Input for the key 185 | * refactor stat parsing 186 | * fix MockMemcacheClient 187 | * fix unicode char in middle of key bug 188 | 189 | New in version 1.3.6 190 | -------------------- 191 | * Fix flake8 and cleanup tox building 192 | * Fix security vulnerability by sanitizing key input 193 | 194 | New in version 1.3.5 195 | -------------------- 196 | * Bug fix for HashClient when retries is set to zero. 197 | * Adding the VERSION command to the clients. 198 | 199 | New in version 1.3.4 200 | -------------------- 201 | * Bug fix for the HashClient that corrects behavior when there are no working servers. 202 | 203 | New in version 1.3.3 204 | -------------------- 205 | * Adding caching to the Travis build. 206 | * A bug fix for pluggable hashing in HashClient. 207 | * Adding a default_noreply argument to the Client ctor. 208 | 209 | New in version 1.3.2 210 | -------------------- 211 | * Making the location of Memcache Exceptions backwards compatible. 212 | 213 | New in version 1.3.0 214 | -------------------- 215 | * Python 3 Support 216 | * Introduced HashClient that uses consistent hasing for allocating keys across many memcached nodes. It also can detect servers going down and rebalance keys across the available nodes. 217 | * Retry sock.recv() when it raises EINTR 218 | 219 | New in version 1.2.9 220 | -------------------- 221 | * Introduced PooledClient a thread-safe pool of clients 222 | -------------------------------------------------------------------------------- /LICENSE.txt: -------------------------------------------------------------------------------- 1 | 2 | Apache License 3 | Version 2.0, January 2004 4 | http://www.apache.org/licenses/ 5 | 6 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 7 | 8 | 1. Definitions. 9 | 10 | "License" shall mean the terms and conditions for use, reproduction, 11 | and distribution as defined by Sections 1 through 9 of this document. 12 | 13 | "Licensor" shall mean the copyright owner or entity authorized by 14 | the copyright owner that is granting the License. 15 | 16 | "Legal Entity" shall mean the union of the acting entity and all 17 | other entities that control, are controlled by, or are under common 18 | control with that entity. For the purposes of this definition, 19 | "control" means (i) the power, direct or indirect, to cause the 20 | direction or management of such entity, whether by contract or 21 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 22 | outstanding shares, or (iii) beneficial ownership of such entity. 23 | 24 | "You" (or "Your") shall mean an individual or Legal Entity 25 | exercising permissions granted by this License. 26 | 27 | "Source" form shall mean the preferred form for making modifications, 28 | including but not limited to software source code, documentation 29 | source, and configuration files. 30 | 31 | "Object" form shall mean any form resulting from mechanical 32 | transformation or translation of a Source form, including but 33 | not limited to compiled object code, generated documentation, 34 | and conversions to other media types. 35 | 36 | "Work" shall mean the work of authorship, whether in Source or 37 | Object form, made available under the License, as indicated by a 38 | copyright notice that is included in or attached to the work 39 | (an example is provided in the Appendix below). 40 | 41 | "Derivative Works" shall mean any work, whether in Source or Object 42 | form, that is based on (or derived from) the Work and for which the 43 | editorial revisions, annotations, elaborations, or other modifications 44 | represent, as a whole, an original work of authorship. For the purposes 45 | of this License, Derivative Works shall not include works that remain 46 | separable from, or merely link (or bind by name) to the interfaces of, 47 | the Work and Derivative Works thereof. 48 | 49 | "Contribution" shall mean any work of authorship, including 50 | the original version of the Work and any modifications or additions 51 | to that Work or Derivative Works thereof, that is intentionally 52 | submitted to Licensor for inclusion in the Work by the copyright owner 53 | or by an individual or Legal Entity authorized to submit on behalf of 54 | the copyright owner. For the purposes of this definition, "submitted" 55 | means any form of electronic, verbal, or written communication sent 56 | to the Licensor or its representatives, including but not limited to 57 | communication on electronic mailing lists, source code control systems, 58 | and issue tracking systems that are managed by, or on behalf of, the 59 | Licensor for the purpose of discussing and improving the Work, but 60 | excluding communication that is conspicuously marked or otherwise 61 | designated in writing by the copyright owner as "Not a Contribution." 62 | 63 | "Contributor" shall mean Licensor and any individual or Legal Entity 64 | on behalf of whom a Contribution has been received by Licensor and 65 | subsequently incorporated within the Work. 66 | 67 | 2. Grant of Copyright License. Subject to the terms and conditions of 68 | this License, each Contributor hereby grants to You a perpetual, 69 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 70 | copyright license to reproduce, prepare Derivative Works of, 71 | publicly display, publicly perform, sublicense, and distribute the 72 | Work and such Derivative Works in Source or Object form. 73 | 74 | 3. Grant of Patent License. Subject to the terms and conditions of 75 | this License, each Contributor hereby grants to You a perpetual, 76 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 77 | (except as stated in this section) patent license to make, have made, 78 | use, offer to sell, sell, import, and otherwise transfer the Work, 79 | where such license applies only to those patent claims licensable 80 | by such Contributor that are necessarily infringed by their 81 | Contribution(s) alone or by combination of their Contribution(s) 82 | with the Work to which such Contribution(s) was submitted. If You 83 | institute patent litigation against any entity (including a 84 | cross-claim or counterclaim in a lawsuit) alleging that the Work 85 | or a Contribution incorporated within the Work constitutes direct 86 | or contributory patent infringement, then any patent licenses 87 | granted to You under this License for that Work shall terminate 88 | as of the date such litigation is filed. 89 | 90 | 4. Redistribution. You may reproduce and distribute copies of the 91 | Work or Derivative Works thereof in any medium, with or without 92 | modifications, and in Source or Object form, provided that You 93 | meet the following conditions: 94 | 95 | (a) You must give any other recipients of the Work or 96 | Derivative Works a copy of this License; and 97 | 98 | (b) You must cause any modified files to carry prominent notices 99 | stating that You changed the files; and 100 | 101 | (c) You must retain, in the Source form of any Derivative Works 102 | that You distribute, all copyright, patent, trademark, and 103 | attribution notices from the Source form of the Work, 104 | excluding those notices that do not pertain to any part of 105 | the Derivative Works; and 106 | 107 | (d) If the Work includes a "NOTICE" text file as part of its 108 | distribution, then any Derivative Works that You distribute must 109 | include a readable copy of the attribution notices contained 110 | within such NOTICE file, excluding those notices that do not 111 | pertain to any part of the Derivative Works, in at least one 112 | of the following places: within a NOTICE text file distributed 113 | as part of the Derivative Works; within the Source form or 114 | documentation, if provided along with the Derivative Works; or, 115 | within a display generated by the Derivative Works, if and 116 | wherever such third-party notices normally appear. The contents 117 | of the NOTICE file are for informational purposes only and 118 | do not modify the License. You may add Your own attribution 119 | notices within Derivative Works that You distribute, alongside 120 | or as an addendum to the NOTICE text from the Work, provided 121 | that such additional attribution notices cannot be construed 122 | as modifying the License. 123 | 124 | You may add Your own copyright statement to Your modifications and 125 | may provide additional or different license terms and conditions 126 | for use, reproduction, or distribution of Your modifications, or 127 | for any such Derivative Works as a whole, provided Your use, 128 | reproduction, and distribution of the Work otherwise complies with 129 | the conditions stated in this License. 130 | 131 | 5. Submission of Contributions. Unless You explicitly state otherwise, 132 | any Contribution intentionally submitted for inclusion in the Work 133 | by You to the Licensor shall be under the terms and conditions of 134 | this License, without any additional terms or conditions. 135 | Notwithstanding the above, nothing herein shall supersede or modify 136 | the terms of any separate license agreement you may have executed 137 | with Licensor regarding such Contributions. 138 | 139 | 6. Trademarks. This License does not grant permission to use the trade 140 | names, trademarks, service marks, or product names of the Licensor, 141 | except as required for reasonable and customary use in describing the 142 | origin of the Work and reproducing the content of the NOTICE file. 143 | 144 | 7. Disclaimer of Warranty. Unless required by applicable law or 145 | agreed to in writing, Licensor provides the Work (and each 146 | Contributor provides its Contributions) on an "AS IS" BASIS, 147 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 148 | implied, including, without limitation, any warranties or conditions 149 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 150 | PARTICULAR PURPOSE. You are solely responsible for determining the 151 | appropriateness of using or redistributing the Work and assume any 152 | risks associated with Your exercise of permissions under this License. 153 | 154 | 8. Limitation of Liability. In no event and under no legal theory, 155 | whether in tort (including negligence), contract, or otherwise, 156 | unless required by applicable law (such as deliberate and grossly 157 | negligent acts) or agreed to in writing, shall any Contributor be 158 | liable to You for damages, including any direct, indirect, special, 159 | incidental, or consequential damages of any character arising as a 160 | result of this License or out of the use or inability to use the 161 | Work (including but not limited to damages for loss of goodwill, 162 | work stoppage, computer failure or malfunction, or any and all 163 | other commercial damages or losses), even if such Contributor 164 | has been advised of the possibility of such damages. 165 | 166 | 9. Accepting Warranty or Additional Liability. While redistributing 167 | the Work or Derivative Works thereof, You may choose to offer, 168 | and charge a fee for, acceptance of support, warranty, indemnity, 169 | or other liability obligations and/or rights consistent with this 170 | License. However, in accepting such obligations, You may act only 171 | on Your own behalf and on Your sole responsibility, not on behalf 172 | of any other Contributor, and only if You agree to indemnify, 173 | defend, and hold each Contributor harmless for any liability 174 | incurred by, or claims asserted against, such Contributor by reason 175 | of your accepting any such warranty or additional liability. 176 | 177 | END OF TERMS AND CONDITIONS 178 | 179 | APPENDIX: How to apply the Apache License to your work. 180 | 181 | To apply the Apache License to your work, attach the following 182 | boilerplate notice, with the fields enclosed by brackets "[]" 183 | replaced with your own identifying information. (Don't include 184 | the brackets!) The text should be enclosed in the appropriate 185 | comment syntax for the file format. We also recommend that a 186 | file or class name and description of purpose be included on the 187 | same "printed page" as the copyright notice for easier 188 | identification within third-party archives. 189 | 190 | Copyright [yyyy] [name of copyright owner] 191 | 192 | Licensed under the Apache License, Version 2.0 (the "License"); 193 | you may not use this file except in compliance with the License. 194 | You may obtain a copy of the License at 195 | 196 | http://www.apache.org/licenses/LICENSE-2.0 197 | 198 | Unless required by applicable law or agreed to in writing, software 199 | distributed under the License is distributed on an "AS IS" BASIS, 200 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 201 | See the License for the specific language governing permissions and 202 | limitations under the License. 203 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include README.* ChangeLog.* setup.py setup.cfg LICENSE.txt 2 | include *-requirements.txt tox.ini 3 | include pymemcache/py.typed 4 | recursive-include pymemcache *.py 5 | recursive-include pymemcache/test *.crt 6 | recursive-include pymemcache/test *.key 7 | recursive-include docs *.rst 8 | global-exclude *.pyc 9 | global-exclude *.pyo 10 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | pymemcache 2 | ========== 3 | 4 | .. image:: https://img.shields.io/pypi/v/pymemcache.svg 5 | :target: https://pypi.python.org/pypi/pymemcache 6 | 7 | .. image:: https://readthedocs.org/projects/pymemcache/badge/?version=master 8 | :target: https://pymemcache.readthedocs.io/en/latest/ 9 | :alt: Master Documentation Status 10 | 11 | A comprehensive, fast, pure-Python memcached client. 12 | 13 | pymemcache supports the following features: 14 | 15 | * Complete implementation of the memcached text protocol. 16 | * Connections using UNIX sockets, or TCP over IPv4 or IPv6. 17 | * Configurable timeouts for socket connect and send/recv calls. 18 | * Access to the "noreply" flag, which can significantly increase the speed of writes. 19 | * Flexible, modular and simple approach to serialization and deserialization. 20 | * The (optional) ability to treat network and memcached errors as cache misses. 21 | 22 | Installing pymemcache 23 | ===================== 24 | 25 | Install from pip: 26 | 27 | .. code-block:: bash 28 | 29 | pip install pymemcache 30 | 31 | For development, clone from github and run the tests: 32 | 33 | .. code-block:: bash 34 | 35 | git clone https://github.com/pinterest/pymemcache.git 36 | cd pymemcache 37 | 38 | Run the tests (make sure you have a local memcached server running): 39 | 40 | .. code-block:: bash 41 | 42 | tox 43 | 44 | Usage 45 | ===== 46 | 47 | See the documentation here: https://pymemcache.readthedocs.io/en/latest/ 48 | 49 | Django 50 | ------ 51 | 52 | Since version 3.2, Django has included a pymemcache-based cache backend. 53 | See `its documentation 54 | `__. 55 | 56 | On older Django versions, you can use 57 | `django-pymemcache `_. 58 | 59 | Comparison with Other Libraries 60 | =============================== 61 | 62 | pylibmc 63 | ------- 64 | 65 | The pylibmc library is a wrapper around libmemcached, implemented in C. It is 66 | fast, implements consistent hashing, the full memcached protocol and timeouts. 67 | It does not provide access to the "noreply" flag. It also isn't pure Python, 68 | so using it with libraries like gevent is out of the question, and its 69 | dependency on libmemcached poses challenges (e.g., it must be built against 70 | the same version of libmemcached that it will use at runtime). 71 | 72 | python-memcached 73 | ---------------- 74 | 75 | The python-memcached library implements the entire memcached text protocol, has 76 | a single timeout for all socket calls and has a flexible approach to 77 | serialization and deserialization. It is also written entirely in Python, so 78 | it works well with libraries like gevent. However, it is tied to using thread 79 | locals, doesn't implement "noreply", can't treat errors as cache misses and is 80 | slower than both pylibmc and pymemcache. It is also tied to a specific method 81 | for handling clusters of memcached servers. 82 | 83 | memcache_client 84 | --------------- 85 | 86 | The team at mixpanel put together a pure Python memcached client as well. It 87 | has more fine grained support for socket timeouts, only connects to a single 88 | host. However, it doesn't support most of the memcached API (just get, set, 89 | delete and stats), doesn't support "noreply", has no serialization or 90 | deserialization support and can't treat errors as cache misses. 91 | 92 | External Links 93 | ============== 94 | 95 | The memcached text protocol reference page: 96 | https://github.com/memcached/memcached/blob/master/doc/protocol.txt 97 | 98 | The python-memcached library (another pure-Python library): 99 | https://github.com/linsomniac/python-memcached 100 | 101 | Mixpanel's Blog post about their memcached client for Python: 102 | https://engineering.mixpanel.com/we-went-down-so-we-wrote-a-better-pure-python-memcache-client-b409a9fe07a9 103 | 104 | Mixpanel's pure Python memcached client: 105 | https://github.com/mixpanel/memcache_client 106 | 107 | Bye-bye python-memcached, hello pymemcache (migration guide) 108 | https://jugmac00.github.io/blog/bye-bye-python-memcached-hello-pymemcache/ 109 | 110 | Credits 111 | ======= 112 | 113 | * `Charles Gordon `_ 114 | * `Dave Dash `_ 115 | * `Dan Crosta `_ 116 | * `Julian Berman `_ 117 | * `Mark Shirley `_ 118 | * `Tim Bart `_ 119 | * `Thomas Orozco `_ 120 | * `Marc Abramowitz `_ 121 | * `Marc-Andre Courtois `_ 122 | * `Julien Danjou `_ 123 | * `INADA Naoki `_ 124 | * `James Socol `_ 125 | * `Joshua Harlow `_ 126 | * `John Anderson `_ 127 | * `Adam Chainz `_ 128 | * `Ernest W. Durbin III `_ 129 | * `Remco van Oosterhout `_ 130 | * `Nicholas Charriere `_ 131 | * `Joe Gordon `_ 132 | * `Jon Parise `_ 133 | * `Stephen Rosen `_ 134 | * `Feras Alazzeh `_ 135 | * `Moisés Guimarães de Medeiros `_ 136 | * `Nick Pope `_ 137 | * `Hervé Beraud `_ 138 | * `Martin Jørgensen `_ 139 | * `Matej Spiller Muys `_ 140 | * `misuzu `_ 141 | 142 | We're Hiring! 143 | ============= 144 | Are you really excited about open-source? Or great software engineering? 145 | Pinterest is `hiring `_! 146 | -------------------------------------------------------------------------------- /docs-requirements.txt: -------------------------------------------------------------------------------- 1 | sphinx==6.2.1 2 | sphinx_rtd_theme==1.2.2 3 | sphinxcontrib-apidoc==0.5.0 4 | sphinxcontrib-napoleon==0.7 5 | pytest 6 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | PAPER = 8 | BUILDDIR = _build 9 | 10 | # Internal variables. 11 | PAPEROPT_a4 = -D latex_paper_size=a4 12 | PAPEROPT_letter = -D latex_paper_size=letter 13 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 14 | # the i18n builder cannot share the environment and doctrees with the others 15 | I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 16 | 17 | .PHONY: help 18 | help: 19 | @echo "Please use \`make ' where is one of" 20 | @echo " html to make standalone HTML files" 21 | @echo " dirhtml to make HTML files named index.html in directories" 22 | @echo " singlehtml to make a single large HTML file" 23 | @echo " pickle to make pickle files" 24 | @echo " json to make JSON files" 25 | @echo " htmlhelp to make HTML files and a HTML help project" 26 | @echo " qthelp to make HTML files and a qthelp project" 27 | @echo " applehelp to make an Apple Help Book" 28 | @echo " devhelp to make HTML files and a Devhelp project" 29 | @echo " epub to make an epub" 30 | @echo " epub3 to make an epub3" 31 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" 32 | @echo " latexpdf to make LaTeX files and run them through pdflatex" 33 | @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" 34 | @echo " text to make text files" 35 | @echo " man to make manual pages" 36 | @echo " texinfo to make Texinfo files" 37 | @echo " info to make Texinfo files and run them through makeinfo" 38 | @echo " gettext to make PO message catalogs" 39 | @echo " changes to make an overview of all changed/added/deprecated items" 40 | @echo " xml to make Docutils-native XML files" 41 | @echo " pseudoxml to make pseudoxml-XML files for display purposes" 42 | @echo " linkcheck to check all external links for integrity" 43 | @echo " doctest to run all doctests embedded in the documentation (if enabled)" 44 | @echo " coverage to run coverage check of the documentation (if enabled)" 45 | @echo " dummy to check syntax errors of document sources" 46 | 47 | .PHONY: clean 48 | clean: 49 | rm -rf $(BUILDDIR)/* 50 | 51 | .PHONY: html 52 | html: 53 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html 54 | @echo 55 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." 56 | 57 | .PHONY: dirhtml 58 | dirhtml: 59 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml 60 | @echo 61 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." 62 | 63 | .PHONY: singlehtml 64 | singlehtml: 65 | $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml 66 | @echo 67 | @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." 68 | 69 | .PHONY: pickle 70 | pickle: 71 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle 72 | @echo 73 | @echo "Build finished; now you can process the pickle files." 74 | 75 | .PHONY: json 76 | json: 77 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json 78 | @echo 79 | @echo "Build finished; now you can process the JSON files." 80 | 81 | .PHONY: htmlhelp 82 | htmlhelp: 83 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp 84 | @echo 85 | @echo "Build finished; now you can run HTML Help Workshop with the" \ 86 | ".hhp project file in $(BUILDDIR)/htmlhelp." 87 | 88 | .PHONY: qthelp 89 | qthelp: 90 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp 91 | @echo 92 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \ 93 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:" 94 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/pymemcache.qhcp" 95 | @echo "To view the help file:" 96 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/pymemcache.qhc" 97 | 98 | .PHONY: applehelp 99 | applehelp: 100 | $(SPHINXBUILD) -b applehelp $(ALLSPHINXOPTS) $(BUILDDIR)/applehelp 101 | @echo 102 | @echo "Build finished. The help book is in $(BUILDDIR)/applehelp." 103 | @echo "N.B. You won't be able to view it unless you put it in" \ 104 | "~/Library/Documentation/Help or install it in your application" \ 105 | "bundle." 106 | 107 | .PHONY: devhelp 108 | devhelp: 109 | $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp 110 | @echo 111 | @echo "Build finished." 112 | @echo "To view the help file:" 113 | @echo "# mkdir -p $$HOME/.local/share/devhelp/pymemcache" 114 | @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/pymemcache" 115 | @echo "# devhelp" 116 | 117 | .PHONY: epub 118 | epub: 119 | $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub 120 | @echo 121 | @echo "Build finished. The epub file is in $(BUILDDIR)/epub." 122 | 123 | .PHONY: epub3 124 | epub3: 125 | $(SPHINXBUILD) -b epub3 $(ALLSPHINXOPTS) $(BUILDDIR)/epub3 126 | @echo 127 | @echo "Build finished. The epub3 file is in $(BUILDDIR)/epub3." 128 | 129 | .PHONY: latex 130 | latex: 131 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 132 | @echo 133 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." 134 | @echo "Run \`make' in that directory to run these through (pdf)latex" \ 135 | "(use \`make latexpdf' here to do that automatically)." 136 | 137 | .PHONY: latexpdf 138 | latexpdf: 139 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 140 | @echo "Running LaTeX files through pdflatex..." 141 | $(MAKE) -C $(BUILDDIR)/latex all-pdf 142 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 143 | 144 | .PHONY: latexpdfja 145 | latexpdfja: 146 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 147 | @echo "Running LaTeX files through platex and dvipdfmx..." 148 | $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja 149 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 150 | 151 | .PHONY: text 152 | text: 153 | $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text 154 | @echo 155 | @echo "Build finished. The text files are in $(BUILDDIR)/text." 156 | 157 | .PHONY: man 158 | man: 159 | $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man 160 | @echo 161 | @echo "Build finished. The manual pages are in $(BUILDDIR)/man." 162 | 163 | .PHONY: texinfo 164 | texinfo: 165 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 166 | @echo 167 | @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." 168 | @echo "Run \`make' in that directory to run these through makeinfo" \ 169 | "(use \`make info' here to do that automatically)." 170 | 171 | .PHONY: info 172 | info: 173 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 174 | @echo "Running Texinfo files through makeinfo..." 175 | make -C $(BUILDDIR)/texinfo info 176 | @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." 177 | 178 | .PHONY: gettext 179 | gettext: 180 | $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale 181 | @echo 182 | @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." 183 | 184 | .PHONY: changes 185 | changes: 186 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes 187 | @echo 188 | @echo "The overview file is in $(BUILDDIR)/changes." 189 | 190 | .PHONY: linkcheck 191 | linkcheck: 192 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck 193 | @echo 194 | @echo "Link check complete; look for any errors in the above output " \ 195 | "or in $(BUILDDIR)/linkcheck/output.txt." 196 | 197 | .PHONY: doctest 198 | doctest: 199 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest 200 | @echo "Testing of doctests in the sources finished, look at the " \ 201 | "results in $(BUILDDIR)/doctest/output.txt." 202 | 203 | .PHONY: coverage 204 | coverage: 205 | $(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage 206 | @echo "Testing of coverage in the sources finished, look at the " \ 207 | "results in $(BUILDDIR)/coverage/python.txt." 208 | 209 | .PHONY: xml 210 | xml: 211 | $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml 212 | @echo 213 | @echo "Build finished. The XML files are in $(BUILDDIR)/xml." 214 | 215 | .PHONY: pseudoxml 216 | pseudoxml: 217 | $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml 218 | @echo 219 | @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." 220 | 221 | .PHONY: dummy 222 | dummy: 223 | $(SPHINXBUILD) -b dummy $(ALLSPHINXOPTS) $(BUILDDIR)/dummy 224 | @echo 225 | @echo "Build finished. Dummy builder generates no files." 226 | -------------------------------------------------------------------------------- /docs/changelog.rst: -------------------------------------------------------------------------------- 1 | .. include:: ../ChangeLog.rst 2 | -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | # 2 | # pymemcache documentation build configuration file, created by 3 | # sphinx-quickstart on Wed Aug 3 11:15:43 2016. 4 | # 5 | # This file is execfile()d with the current directory set to its 6 | # containing dir. 7 | # 8 | # Note that not all possible configuration values are present in this 9 | # autogenerated file. 10 | # 11 | # All configuration values have a default; values that are commented out 12 | # serve to show the default. 13 | 14 | # If extensions (or modules to document with autodoc) are in another directory, 15 | # add these directories to sys.path here. If the directory is relative to the 16 | # documentation root, use os.path.abspath to make it absolute, like shown here. 17 | # 18 | import os 19 | import subprocess 20 | import sys 21 | 22 | sys.path.insert(0, os.path.abspath("..")) 23 | 24 | # -- General configuration ------------------------------------------------ 25 | 26 | # If your documentation needs a minimal Sphinx version, state it here. 27 | # 28 | needs_sphinx = "3.0.0" 29 | 30 | # Add any Sphinx extension module names here, as strings. They can be 31 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 32 | # ones. 33 | extensions = [ 34 | "sphinx.ext.autodoc", 35 | "sphinx.ext.intersphinx", 36 | "sphinx.ext.napoleon", 37 | "sphinx.ext.ifconfig", 38 | "sphinxcontrib.apidoc", 39 | ] 40 | 41 | # Add any paths that contain templates here, relative to this directory. 42 | templates_path = ["_templates"] 43 | 44 | # The suffix(es) of source filenames. 45 | # You can specify multiple suffix as a list of string: 46 | # 47 | # source_suffix = ['.rst', '.md'] 48 | source_suffix = ".rst" 49 | 50 | # The encoding of source files. 51 | # 52 | # source_encoding = 'utf-8-sig' 53 | 54 | # The master toctree document. 55 | master_doc = "index" 56 | 57 | # General information about the project. 58 | project = "pymemcache" 59 | copyright = "Pinterest" 60 | author = "Charles Gordon, Jon Parise, Joe Gordon" 61 | 62 | # The version info for the project you're documenting, acts as replacement for 63 | # |version| and |release|, also used in various other places throughout the 64 | # built documents. 65 | # 66 | # The short X.Y version. 67 | version = "3.5" 68 | # The full version, including alpha/beta/rc tags. 69 | release = "3.5.2" 70 | 71 | # The language for content autogenerated by Sphinx. Refer to documentation 72 | # for a list of supported languages. 73 | # 74 | # This is also used if you do content translation via gettext catalogs. 75 | # Usually you set "language" from the command line for these cases. 76 | language = None 77 | 78 | # There are two options for replacing |today|: either, you set today to some 79 | # non-false value, then it is used: 80 | # 81 | # today = '' 82 | # 83 | # Else, today_fmt is used as the format for a strftime call. 84 | # 85 | # today_fmt = '%B %d, %Y' 86 | 87 | # List of patterns, relative to source directory, that match files and 88 | # directories to ignore when looking for source files. 89 | # This patterns also effect to html_static_path and html_extra_path 90 | exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"] 91 | 92 | # The reST default role (used for this markup: `text`) to use for all 93 | # documents. 94 | # 95 | # default_role = None 96 | 97 | # If true, '()' will be appended to :func: etc. cross-reference text. 98 | # 99 | # add_function_parentheses = True 100 | 101 | # If true, the current module name will be prepended to all description 102 | # unit titles (such as .. function::). 103 | # 104 | # add_module_names = True 105 | 106 | # If true, sectionauthor and moduleauthor directives will be shown in the 107 | # output. They are ignored by default. 108 | # 109 | # show_authors = False 110 | 111 | # The name of the Pygments (syntax highlighting) style to use. 112 | pygments_style = "sphinx" 113 | 114 | # A list of ignored prefixes for module index sorting. 115 | # modindex_common_prefix = [] 116 | 117 | # If true, keep warnings as "system message" paragraphs in the built documents. 118 | # keep_warnings = False 119 | 120 | # If true, `todo` and `todoList` produce output, else they produce nothing. 121 | todo_include_todos = False 122 | 123 | 124 | # -- Options for HTML output ---------------------------------------------- 125 | 126 | # The theme to use for HTML and HTML Help pages. See the documentation for 127 | # a list of builtin themes. 128 | # 129 | html_theme = "sphinx_rtd_theme" 130 | 131 | # Theme options are theme-specific and customize the look and feel of a theme 132 | # further. For a list of options available for each theme, see the 133 | # documentation. 134 | # 135 | # html_theme_options = {} 136 | 137 | # Add any paths that contain custom themes here, relative to this directory. 138 | # html_theme_path = [] 139 | 140 | # The name for this set of Sphinx documents. 141 | # " v documentation" by default. 142 | # 143 | # html_title = u'pymemcache v1.3.6' 144 | 145 | # A shorter title for the navigation bar. Default is the same as html_title. 146 | # 147 | # html_short_title = None 148 | 149 | # The name of an image file (relative to this directory) to place at the top 150 | # of the sidebar. 151 | # 152 | # html_logo = None 153 | 154 | # The name of an image file (relative to this directory) to use as a favicon of 155 | # the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 156 | # pixels large. 157 | # 158 | # html_favicon = None 159 | 160 | # Add any paths that contain custom static files (such as style sheets) here, 161 | # relative to this directory. They are copied after the builtin static files, 162 | # so a file named "default.css" will overwrite the builtin "default.css". 163 | html_static_path = ["_static"] 164 | 165 | # Add any extra paths that contain custom files (such as robots.txt or 166 | # .htaccess) here, relative to this directory. These files are copied 167 | # directly to the root of the documentation. 168 | # 169 | # html_extra_path = [] 170 | 171 | # If not None, a 'Last updated on:' timestamp is inserted at every page 172 | # bottom, using the given strftime format. 173 | # The empty string is equivalent to '%b %d, %Y'. 174 | # 175 | # html_last_updated_fmt = None 176 | 177 | # If true, SmartyPants will be used to convert quotes and dashes to 178 | # typographically correct entities. 179 | # 180 | # html_use_smartypants = True 181 | 182 | # Custom sidebar templates, maps document names to template names. 183 | # 184 | # html_sidebars = {} 185 | 186 | # Additional templates that should be rendered to pages, maps page names to 187 | # template names. 188 | # 189 | # html_additional_pages = {} 190 | 191 | # If false, no module index is generated. 192 | # 193 | # html_domain_indices = True 194 | 195 | # If false, no index is generated. 196 | # 197 | # html_use_index = True 198 | 199 | # If true, the index is split into individual pages for each letter. 200 | # 201 | # html_split_index = False 202 | 203 | # If true, links to the reST sources are added to the pages. 204 | # 205 | # html_show_sourcelink = True 206 | 207 | # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. 208 | # 209 | # html_show_sphinx = True 210 | 211 | # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. 212 | # 213 | # html_show_copyright = True 214 | 215 | # If true, an OpenSearch description file will be output, and all pages will 216 | # contain a tag referring to it. The value of this option must be the 217 | # base URL from which the finished HTML is served. 218 | # 219 | # html_use_opensearch = '' 220 | 221 | # This is the file name suffix for HTML files (e.g. ".xhtml"). 222 | # html_file_suffix = None 223 | 224 | # Language to be used for generating the HTML full-text search index. 225 | # Sphinx supports the following languages: 226 | # 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' 227 | # 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr', 'zh' 228 | # 229 | # html_search_language = 'en' 230 | 231 | # A dictionary with options for the search language support, empty by default. 232 | # 'ja' uses this config value. 233 | # 'zh' user can custom change `jieba` dictionary path. 234 | # 235 | # html_search_options = {'type': 'default'} 236 | 237 | # The name of a javascript file (relative to the configuration directory) that 238 | # implements a search results scorer. If empty, the default will be used. 239 | # 240 | # html_search_scorer = 'scorer.js' 241 | 242 | # Output file base name for HTML help builder. 243 | htmlhelp_basename = "pymemcachedoc" 244 | 245 | # -- Options for LaTeX output --------------------------------------------- 246 | 247 | latex_elements = { 248 | # The paper size ('letterpaper' or 'a4paper'). 249 | # 250 | # 'papersize': 'letterpaper', 251 | # The font size ('10pt', '11pt' or '12pt'). 252 | # 253 | # 'pointsize': '10pt', 254 | # Additional stuff for the LaTeX preamble. 255 | # 256 | # 'preamble': '', 257 | # Latex figure (float) alignment 258 | # 259 | # 'figure_align': 'htbp', 260 | } 261 | 262 | # Grouping the document tree into LaTeX files. List of tuples 263 | # (source start file, target name, title, 264 | # author, documentclass [howto, manual, or own class]). 265 | latex_documents = [ 266 | ( 267 | master_doc, 268 | "pymemcache.tex", 269 | "pymemcache Documentation", 270 | "Charles Gordon, Jon Parise, Joe Gordon", 271 | "manual", 272 | ), 273 | ] 274 | 275 | # The name of an image file (relative to this directory) to place at the top of 276 | # the title page. 277 | # 278 | # latex_logo = None 279 | 280 | # For "manual" documents, if this is true, then toplevel headings are parts, 281 | # not chapters. 282 | # 283 | # latex_use_parts = False 284 | 285 | # If true, show page references after internal links. 286 | # 287 | # latex_show_pagerefs = False 288 | 289 | # If true, show URL addresses after external links. 290 | # 291 | # latex_show_urls = False 292 | 293 | # Documents to append as an appendix to all manuals. 294 | # 295 | # latex_appendices = [] 296 | 297 | # It false, will not define \strong, \code, itleref, \crossref ... but only 298 | # \sphinxstrong, ..., \sphinxtitleref, ... To help avoid clash with user added 299 | # packages. 300 | # 301 | # latex_keep_old_macro_names = True 302 | 303 | # If false, no module index is generated. 304 | # 305 | # latex_domain_indices = True 306 | 307 | 308 | # -- Options for manual page output --------------------------------------- 309 | 310 | # One entry per manual page. List of tuples 311 | # (source start file, name, description, authors, manual section). 312 | man_pages = [(master_doc, "pymemcache", "pymemcache Documentation", [author], 1)] 313 | 314 | # If true, show URL addresses after external links. 315 | # 316 | # man_show_urls = False 317 | 318 | 319 | # -- Options for Texinfo output ------------------------------------------- 320 | 321 | # Grouping the document tree into Texinfo files. List of tuples 322 | # (source start file, target name, title, author, 323 | # dir menu entry, description, category) 324 | texinfo_documents = [ 325 | ( 326 | master_doc, 327 | "pymemcache", 328 | "pymemcache Documentation", 329 | author, 330 | "pymemcache", 331 | "One line description of project.", 332 | "Miscellaneous", 333 | ), 334 | ] 335 | 336 | # Documents to append as an appendix to all manuals. 337 | # 338 | # texinfo_appendices = [] 339 | 340 | # If false, no module index is generated. 341 | # 342 | # texinfo_domain_indices = True 343 | 344 | # How to display URL addresses: 'footnote', 'no', or 'inline'. 345 | # 346 | # texinfo_show_urls = 'footnote' 347 | 348 | # If true, do not generate a @detailmenu in the "Top" node's menu. 349 | # 350 | # texinfo_no_detailmenu = False 351 | 352 | 353 | # Example configuration for intersphinx: refer to the Python standard library. 354 | intersphinx_mapping = {"https://docs.python.org/": None} 355 | 356 | 357 | # autodoc include __init__ 358 | autodoc_default_options = { 359 | "special-members": "__init__", 360 | } 361 | 362 | 363 | # Automate building apidoc when building with readthedocs 364 | apidoc_module_dir = os.path.join("..", "pymemcache") 365 | apidoc_output_dir = "apidoc" 366 | apidoc_excluded_paths = ["test", "setup.py"] 367 | apidoc_separate_modules = True 368 | -------------------------------------------------------------------------------- /docs/getting_started.rst: -------------------------------------------------------------------------------- 1 | Getting started! 2 | ================ 3 | A comprehensive, fast, pure-Python memcached client library. 4 | 5 | Basic Usage 6 | ------------ 7 | 8 | .. code-block:: python 9 | 10 | from pymemcache.client.base import Client 11 | 12 | client = Client('localhost') 13 | client.set('some_key', 'some_value') 14 | result = client.get('some_key') 15 | 16 | The server to connect to can be specified in a number of ways. 17 | 18 | If using TCP connections over IPv4 or IPv6, the ``server`` parameter can be 19 | passed a ``host`` string, a ``host:port`` string, or a ``(host, port)`` 20 | 2-tuple. The host part may be a domain name, an IPv4 address, or an IPv6 21 | address. The port may be omitted, in which case it will default to ``11211``. 22 | 23 | .. code-block:: python 24 | 25 | ipv4_client = Client('127.0.0.1') 26 | ipv4_client_with_port = Client('127.0.0.1:11211') 27 | ipv4_client_using_tuple = Client(('127.0.0.1', 11211)) 28 | 29 | ipv6_client = Client('[::1]') 30 | ipv6_client_with_port = Client('[::1]:11211') 31 | ipv6_client_using_tuple = Client(('::1', 11211)) 32 | 33 | domain_client = Client('localhost') 34 | domain_client_with_port = Client('localhost:11211') 35 | domain_client_using_tuple = Client(('localhost', 11211)) 36 | 37 | Note that IPv6 may be used in preference to IPv4 when passing a domain name as 38 | the host if an IPv6 address can be resolved for that domain. 39 | 40 | You can also connect to a local memcached server over a UNIX domain socket by 41 | passing the socket's path to the client's ``server`` parameter. An optional 42 | ``unix:`` prefix may be used for compatibility in code that uses other client 43 | libraries that require it. 44 | 45 | .. code-block:: python 46 | 47 | client = Client('/run/memcached/memcached.sock') 48 | client_with_prefix = Client('unix:/run/memcached/memcached.sock') 49 | 50 | Using a client pool 51 | ------------------- 52 | :class:`pymemcache.client.base.PooledClient` is a thread-safe client pool 53 | that provides the same API as :class:`pymemcache.client.base.Client`. It's 54 | useful in for cases when you want to maintain a pool of already-connected 55 | clients for improved performance. 56 | 57 | .. code-block:: python 58 | 59 | from pymemcache.client.base import PooledClient 60 | 61 | client = PooledClient('127.0.0.1', max_pool_size=4) 62 | 63 | Using a memcached cluster 64 | ------------------------- 65 | This will use a consistent hashing algorithm to choose which server to 66 | set/get the values from. It will also automatically rebalance depending 67 | on if a server goes down. 68 | 69 | .. code-block:: python 70 | 71 | from pymemcache.client.hash import HashClient 72 | 73 | client = HashClient([ 74 | '127.0.0.1:11211', 75 | '127.0.0.1:11212', 76 | ]) 77 | client.set('some_key', 'some value') 78 | result = client.get('some_key') 79 | 80 | Key distribution is handled by the ``hasher`` argument in the constructor. The 81 | default is the built-in :class:`pymemcache.client.rendezvous.RendezvousHash` 82 | hasher. It uses the built-in :class:`pymemcache.client.murmur3.murmur3_32` 83 | implementation to distribute keys on servers. Overriding these two parts can be 84 | used to change how keys are distributed. Changing the hashing algorithm can be 85 | done by setting the ``hash_function`` argument in the ``RendezvousHash`` 86 | constructor. 87 | 88 | Rebalancing in the :class:`pymemcache.client.hash.HashClient` functions as 89 | follows: 90 | 91 | 1. A :class:`pymemcache.client.hash.HashClient` is created with 3 nodes, 92 | ``node1``, ``node2`` and ``node3``. 93 | 2. A number of values are set in the client using ``set`` and ``set_many``. 94 | Example: 95 | 96 | - ``key1`` -> ``node2`` 97 | - ``key2`` -> ``node3`` 98 | - ``key3`` -> ``node3`` 99 | - ``key4`` -> ``node1`` 100 | - ``key5`` -> ``node2`` 101 | 102 | 3. Subsequent ``get`` calls will hash to the correct server and requests are routed 103 | accordingly. 104 | 4. ``node3`` goes down. 105 | 5. The hashclient tries to ``get("key2")`` but detects the node as down. This 106 | causes it to mark the node as down. Removing it from the hasher. 107 | The hasclient can attempt to retry the operation based on the 108 | ``retry_attempts`` and ``retry_timeout`` arguments. 109 | If ``ignore_exc`` is set, this is treated as a miss, if not, an exception 110 | will be raised. 111 | 6. Any ``get``/``set`` for ``key2`` and ``key3`` will now hash differently, 112 | example: 113 | 114 | - ``key2`` -> ``node2`` 115 | - ``key3`` -> ``node1`` 116 | 117 | 7. After the amount of time specified in the ``dead_timeout`` argument, 118 | ``node3`` is added back into the hasher and will be retried for any future 119 | operations. 120 | 121 | Using the built-in retrying mechanism 122 | ------------------------------------- 123 | The library comes with retry mechanisms that can be used to wrap all kinds of 124 | pymemcache clients. The wrapper allows you to define the exceptions that you want 125 | to handle with retries, which exceptions to exclude, how many attempts to make 126 | and how long to wait between attempts. 127 | 128 | The ``RetryingClient`` wraps around any of the other included clients and will 129 | have the same methods. For this example, we're just using the base ``Client``. 130 | 131 | .. code-block:: python 132 | 133 | from pymemcache.client.base import Client 134 | from pymemcache.client.retrying import RetryingClient 135 | from pymemcache.exceptions import MemcacheUnexpectedCloseError 136 | 137 | base_client = Client(("localhost", 11211)) 138 | client = RetryingClient( 139 | base_client, 140 | attempts=3, 141 | retry_delay=0.01, 142 | retry_for=[MemcacheUnexpectedCloseError] 143 | ) 144 | client.set('some_key', 'some value') 145 | result = client.get('some_key') 146 | 147 | The above client will attempt each call three times with a wait of 10ms between 148 | each attempt, as long as the exception is a ``MemcacheUnexpectedCloseError``. 149 | 150 | Using TLS 151 | --------- 152 | **Memcached** `supports `_ 153 | authentication and encryption via TLS since version **1.5.13**. 154 | 155 | A Memcached server running with TLS enabled will only accept TLS connections. 156 | 157 | To enable TLS in pymemcache, pass a valid TLS context to the client's 158 | ``tls_context`` parameter: 159 | 160 | .. code-block:: python 161 | 162 | import ssl 163 | from pymemcache.client.base import Client 164 | 165 | context = ssl.create_default_context( 166 | cafile="my-ca-root.crt", 167 | ) 168 | 169 | client = Client('localhost', tls_context=context) 170 | client.set('some_key', 'some_value') 171 | result = client.get('some_key') 172 | 173 | 174 | Serialization 175 | -------------- 176 | 177 | .. code-block:: python 178 | 179 | import json 180 | from pymemcache.client.base import Client 181 | 182 | class JsonSerde(object): 183 | def serialize(self, key, value): 184 | if isinstance(value, str): 185 | return value, 1 186 | return json.dumps(value), 2 187 | 188 | def deserialize(self, key, value, flags): 189 | if flags == 1: 190 | return value 191 | if flags == 2: 192 | return json.loads(value) 193 | raise Exception("Unknown serialization format") 194 | 195 | client = Client('localhost', serde=JsonSerde()) 196 | client.set('key', {'a':'b', 'c':'d'}) 197 | result = client.get('key') 198 | 199 | pymemcache provides a default 200 | `pickle `_-based serializer: 201 | 202 | .. code-block:: python 203 | 204 | from pymemcache.client.base import Client 205 | from pymemcache import serde 206 | 207 | class Foo(object): 208 | pass 209 | 210 | client = Client('localhost', serde=serde.pickle_serde) 211 | client.set('key', Foo()) 212 | result = client.get('key') 213 | 214 | The serializer uses the highest pickle protocol available. In order to make 215 | sure multiple versions of Python can read the protocol version, you can specify 216 | the version by explicitly instantiating :class:`pymemcache.serde.PickleSerde`: 217 | 218 | .. code-block:: python 219 | 220 | client = Client('localhost', serde=serde.PickleSerde(pickle_version=2)) 221 | 222 | 223 | Deserialization with Python 3 224 | ----------------------------- 225 | 226 | Values passed to the `serde.deserialize()` method will be bytestrings. It is 227 | therefore necessary to encode and decode them correctly. Here's a version of 228 | the `JsonSerde` from above which is more careful with encodings: 229 | 230 | .. code-block:: python 231 | 232 | class JsonSerde(object): 233 | def serialize(self, key, value): 234 | if isinstance(value, str): 235 | return value.encode('utf-8'), 1 236 | return json.dumps(value).encode('utf-8'), 2 237 | 238 | def deserialize(self, key, value, flags): 239 | if flags == 1: 240 | return value.decode('utf-8') 241 | if flags == 2: 242 | return json.loads(value.decode('utf-8')) 243 | raise Exception("Unknown serialization format") 244 | 245 | 246 | Interacting with pymemcache 247 | --------------------------- 248 | 249 | For testing purpose pymemcache can be used in an interactive mode by using 250 | the python interpreter or again ipython and tools like tox. 251 | 252 | One main advantage of using `tox` to interact with `pymemcache` is that it 253 | comes with its own virtual environments. It will automatically install 254 | pymemcache and fetch all the needed requirements at run. See the example below: 255 | 256 | .. code-block:: 257 | 258 | $ podman run --publish 11211:11211 -it --rm --name memcached memcached 259 | $ tox -e venv -- python 260 | >>> from pymemcache.client.base import Client 261 | >>> client = Client('127.0.0.1') 262 | >>> client.set('some_key', 'some_value') 263 | True 264 | >>> client.get('some_key') 265 | b'some_value' 266 | >>> print(client.get.__doc__) 267 | The memcached "get" command, but only for one key, as a convenience. 268 | Args: 269 | key: str, see class docs for details. 270 | default: value that will be returned if the key was not found. 271 | Returns: 272 | The value for the key, or default if the key wasn't found. 273 | 274 | You can instantiate all the classes and clients offered by pymemcache. 275 | 276 | Your client will remain open until you decide to close it or until you decide 277 | to quit your interpreter. It can allow you to see what happens if your server 278 | is abruptly closed. Below is an example. 279 | 280 | Starting your server: 281 | 282 | .. code-block:: shell 283 | 284 | $ podman run --publish 11211:11211 -it --name memcached memcached 285 | 286 | Starting your client and set some keys: 287 | 288 | .. code-block:: shell 289 | 290 | $ tox -e venv -- python 291 | >>> from pymemcache.client.base import Client 292 | >>> client = Client('127.0.0.1') 293 | >>> client.set('some_key', 'some_value') 294 | True 295 | 296 | Restarting the server: 297 | 298 | .. code-block:: shell 299 | 300 | $ podman restart memcached 301 | 302 | The previous client is still open, now try to retrieve some keys: 303 | 304 | .. code-block:: shell 305 | 306 | >>> print(client.get('some_key')) 307 | Traceback (most recent call last): 308 | File "", line 1, in 309 | File "/home/user/pymemcache/pymemcache/client/base.py", line 535, in get 310 | return self._fetch_cmd(b'get', [key], False).get(key, default) 311 | File "/home/user/pymemcache/pymemcache/client/base.py", line 910, in _fetch_cmd 312 | buf, line = _readline(self.sock, buf) 313 | File "/home/user/pymemcache/pymemcache/client/base.py", line 1305, in _readline 314 | raise MemcacheUnexpectedCloseError() 315 | pymemcache.exceptions.MemcacheUnexpectedCloseError 316 | 317 | We can see that the connection has been closed. 318 | 319 | You can also pass a command directly from CLI parameters and get output 320 | directly: 321 | 322 | .. code-block:: shell 323 | 324 | $ tox -e venv -- python -c "from pymemcache.client.base import Client; client = Client('127.0.01'); print(client.get('some_key'))" 325 | b'some_value' 326 | 327 | This kind of usage is useful for debug sessions or to dig manually into your 328 | server. 329 | 330 | Key Constraints 331 | --------------- 332 | This client implements the ASCII protocol of memcached. This means keys should not 333 | contain any of the following illegal characters: 334 | 335 | Keys cannot have spaces, new lines, carriage returns, or null characters. 336 | We suggest that if you have unicode characters, or long keys, you use an 337 | effective hashing mechanism before calling this client. 338 | 339 | At Pinterest, we have found that murmur3 hash is a great candidate for this. 340 | Alternatively you can set `allow_unicode_keys` to support unicode keys, but 341 | beware of what unicode encoding you use to make sure multiple clients can find 342 | the same key. 343 | 344 | Best Practices 345 | --------------- 346 | 347 | - Always set the ``connect_timeout`` and ``timeout`` arguments in the 348 | :py:class:`pymemcache.client.base.Client` constructor to avoid blocking 349 | your process when memcached is slow. You might also want to enable the 350 | ``no_delay`` option, which sets the TCP_NODELAY flag on the connection's 351 | socket. 352 | - Use the ``noreply`` flag for a significant performance boost. The ``noreply`` 353 | flag is enabled by default for "set", "add", "replace", "append", "prepend", 354 | and "delete". It is disabled by default for "cas", "incr" and "decr". It 355 | obviously doesn't apply to any get calls. 356 | - Use :func:`pymemcache.client.base.Client.get_many` and 357 | :func:`pymemcache.client.base.Client.gets_many` whenever possible, as they 358 | result in fewer round trip times for fetching multiple keys. 359 | - Use the ``ignore_exc`` flag to treat memcache/network errors as cache misses 360 | on calls to the get* methods. This prevents failures in memcache, or network 361 | errors, from killing your web requests. Do not use this flag if you need to 362 | know about errors from memcache, and make sure you have some other way to 363 | detect memcache server failures. 364 | - Unless you have a known reason to do otherwise, use the provided serializer 365 | in `pymemcache.serde.pickle_serde` for any de/serialization of objects. 366 | 367 | .. WARNING:: 368 | 369 | ``noreply`` will not read errors returned from the memcached server. 370 | 371 | If a function with ``noreply=True`` causes an error on the server, it will 372 | still succeed and your next call which reads a response from memcached may 373 | fail unexpectedly. 374 | 375 | ``pymemcached`` will try to catch and stop you from sending malformed 376 | inputs to memcached, but if you are having unexplained errors, setting 377 | ``noreply=False`` may help you troubleshoot the issue. 378 | -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | .. pymemcache documentation master file, created by 2 | sphinx-quickstart on Wed Aug 3 11:15:43 2016. 3 | You can adapt this file completely to your liking, but it should at least 4 | contain the root `toctree` directive. 5 | 6 | Welcome to pymemcache's documentation! 7 | ====================================== 8 | 9 | Contents: 10 | 11 | .. toctree:: 12 | :maxdepth: 1 13 | 14 | Getting Started 15 | Source Code 16 | ChangeLog 17 | 18 | 19 | 20 | Indices and tables 21 | ================== 22 | 23 | * :ref:`genindex` 24 | * :ref:`modindex` 25 | * :ref:`search` 26 | 27 | -------------------------------------------------------------------------------- /docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | REM Command file for Sphinx documentation 4 | 5 | if "%SPHINXBUILD%" == "" ( 6 | set SPHINXBUILD=sphinx-build 7 | ) 8 | set BUILDDIR=_build 9 | set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% . 10 | set I18NSPHINXOPTS=%SPHINXOPTS% . 11 | if NOT "%PAPER%" == "" ( 12 | set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% 13 | set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS% 14 | ) 15 | 16 | if "%1" == "" goto help 17 | 18 | if "%1" == "help" ( 19 | :help 20 | echo.Please use `make ^` where ^ is one of 21 | echo. html to make standalone HTML files 22 | echo. dirhtml to make HTML files named index.html in directories 23 | echo. singlehtml to make a single large HTML file 24 | echo. pickle to make pickle files 25 | echo. json to make JSON files 26 | echo. htmlhelp to make HTML files and a HTML help project 27 | echo. qthelp to make HTML files and a qthelp project 28 | echo. devhelp to make HTML files and a Devhelp project 29 | echo. epub to make an epub 30 | echo. epub3 to make an epub3 31 | echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter 32 | echo. text to make text files 33 | echo. man to make manual pages 34 | echo. texinfo to make Texinfo files 35 | echo. gettext to make PO message catalogs 36 | echo. changes to make an overview over all changed/added/deprecated items 37 | echo. xml to make Docutils-native XML files 38 | echo. pseudoxml to make pseudoxml-XML files for display purposes 39 | echo. linkcheck to check all external links for integrity 40 | echo. doctest to run all doctests embedded in the documentation if enabled 41 | echo. coverage to run coverage check of the documentation if enabled 42 | echo. dummy to check syntax errors of document sources 43 | goto end 44 | ) 45 | 46 | if "%1" == "clean" ( 47 | for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i 48 | del /q /s %BUILDDIR%\* 49 | goto end 50 | ) 51 | 52 | 53 | REM Check if sphinx-build is available and fallback to Python version if any 54 | %SPHINXBUILD% 1>NUL 2>NUL 55 | if errorlevel 9009 goto sphinx_python 56 | goto sphinx_ok 57 | 58 | :sphinx_python 59 | 60 | set SPHINXBUILD=python -m sphinx.__init__ 61 | %SPHINXBUILD% 2> nul 62 | if errorlevel 9009 ( 63 | echo. 64 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 65 | echo.installed, then set the SPHINXBUILD environment variable to point 66 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 67 | echo.may add the Sphinx directory to PATH. 68 | echo. 69 | echo.If you don't have Sphinx installed, grab it from 70 | echo.http://sphinx-doc.org/ 71 | exit /b 1 72 | ) 73 | 74 | :sphinx_ok 75 | 76 | 77 | if "%1" == "html" ( 78 | %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html 79 | if errorlevel 1 exit /b 1 80 | echo. 81 | echo.Build finished. The HTML pages are in %BUILDDIR%/html. 82 | goto end 83 | ) 84 | 85 | if "%1" == "dirhtml" ( 86 | %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml 87 | if errorlevel 1 exit /b 1 88 | echo. 89 | echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. 90 | goto end 91 | ) 92 | 93 | if "%1" == "singlehtml" ( 94 | %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml 95 | if errorlevel 1 exit /b 1 96 | echo. 97 | echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. 98 | goto end 99 | ) 100 | 101 | if "%1" == "pickle" ( 102 | %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle 103 | if errorlevel 1 exit /b 1 104 | echo. 105 | echo.Build finished; now you can process the pickle files. 106 | goto end 107 | ) 108 | 109 | if "%1" == "json" ( 110 | %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json 111 | if errorlevel 1 exit /b 1 112 | echo. 113 | echo.Build finished; now you can process the JSON files. 114 | goto end 115 | ) 116 | 117 | if "%1" == "htmlhelp" ( 118 | %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp 119 | if errorlevel 1 exit /b 1 120 | echo. 121 | echo.Build finished; now you can run HTML Help Workshop with the ^ 122 | .hhp project file in %BUILDDIR%/htmlhelp. 123 | goto end 124 | ) 125 | 126 | if "%1" == "qthelp" ( 127 | %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp 128 | if errorlevel 1 exit /b 1 129 | echo. 130 | echo.Build finished; now you can run "qcollectiongenerator" with the ^ 131 | .qhcp project file in %BUILDDIR%/qthelp, like this: 132 | echo.^> qcollectiongenerator %BUILDDIR%\qthelp\pymemcache.qhcp 133 | echo.To view the help file: 134 | echo.^> assistant -collectionFile %BUILDDIR%\qthelp\pymemcache.ghc 135 | goto end 136 | ) 137 | 138 | if "%1" == "devhelp" ( 139 | %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp 140 | if errorlevel 1 exit /b 1 141 | echo. 142 | echo.Build finished. 143 | goto end 144 | ) 145 | 146 | if "%1" == "epub" ( 147 | %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub 148 | if errorlevel 1 exit /b 1 149 | echo. 150 | echo.Build finished. The epub file is in %BUILDDIR%/epub. 151 | goto end 152 | ) 153 | 154 | if "%1" == "epub3" ( 155 | %SPHINXBUILD% -b epub3 %ALLSPHINXOPTS% %BUILDDIR%/epub3 156 | if errorlevel 1 exit /b 1 157 | echo. 158 | echo.Build finished. The epub3 file is in %BUILDDIR%/epub3. 159 | goto end 160 | ) 161 | 162 | if "%1" == "latex" ( 163 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 164 | if errorlevel 1 exit /b 1 165 | echo. 166 | echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. 167 | goto end 168 | ) 169 | 170 | if "%1" == "latexpdf" ( 171 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 172 | cd %BUILDDIR%/latex 173 | make all-pdf 174 | cd %~dp0 175 | echo. 176 | echo.Build finished; the PDF files are in %BUILDDIR%/latex. 177 | goto end 178 | ) 179 | 180 | if "%1" == "latexpdfja" ( 181 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 182 | cd %BUILDDIR%/latex 183 | make all-pdf-ja 184 | cd %~dp0 185 | echo. 186 | echo.Build finished; the PDF files are in %BUILDDIR%/latex. 187 | goto end 188 | ) 189 | 190 | if "%1" == "text" ( 191 | %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text 192 | if errorlevel 1 exit /b 1 193 | echo. 194 | echo.Build finished. The text files are in %BUILDDIR%/text. 195 | goto end 196 | ) 197 | 198 | if "%1" == "man" ( 199 | %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man 200 | if errorlevel 1 exit /b 1 201 | echo. 202 | echo.Build finished. The manual pages are in %BUILDDIR%/man. 203 | goto end 204 | ) 205 | 206 | if "%1" == "texinfo" ( 207 | %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo 208 | if errorlevel 1 exit /b 1 209 | echo. 210 | echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo. 211 | goto end 212 | ) 213 | 214 | if "%1" == "gettext" ( 215 | %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale 216 | if errorlevel 1 exit /b 1 217 | echo. 218 | echo.Build finished. The message catalogs are in %BUILDDIR%/locale. 219 | goto end 220 | ) 221 | 222 | if "%1" == "changes" ( 223 | %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes 224 | if errorlevel 1 exit /b 1 225 | echo. 226 | echo.The overview file is in %BUILDDIR%/changes. 227 | goto end 228 | ) 229 | 230 | if "%1" == "linkcheck" ( 231 | %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck 232 | if errorlevel 1 exit /b 1 233 | echo. 234 | echo.Link check complete; look for any errors in the above output ^ 235 | or in %BUILDDIR%/linkcheck/output.txt. 236 | goto end 237 | ) 238 | 239 | if "%1" == "doctest" ( 240 | %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest 241 | if errorlevel 1 exit /b 1 242 | echo. 243 | echo.Testing of doctests in the sources finished, look at the ^ 244 | results in %BUILDDIR%/doctest/output.txt. 245 | goto end 246 | ) 247 | 248 | if "%1" == "coverage" ( 249 | %SPHINXBUILD% -b coverage %ALLSPHINXOPTS% %BUILDDIR%/coverage 250 | if errorlevel 1 exit /b 1 251 | echo. 252 | echo.Testing of coverage in the sources finished, look at the ^ 253 | results in %BUILDDIR%/coverage/python.txt. 254 | goto end 255 | ) 256 | 257 | if "%1" == "xml" ( 258 | %SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml 259 | if errorlevel 1 exit /b 1 260 | echo. 261 | echo.Build finished. The XML files are in %BUILDDIR%/xml. 262 | goto end 263 | ) 264 | 265 | if "%1" == "pseudoxml" ( 266 | %SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml 267 | if errorlevel 1 exit /b 1 268 | echo. 269 | echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml. 270 | goto end 271 | ) 272 | 273 | if "%1" == "dummy" ( 274 | %SPHINXBUILD% -b dummy %ALLSPHINXOPTS% %BUILDDIR%/dummy 275 | if errorlevel 1 exit /b 1 276 | echo. 277 | echo.Build finished. Dummy builder generates no files. 278 | goto end 279 | ) 280 | 281 | :end 282 | -------------------------------------------------------------------------------- /lint-requirements.txt: -------------------------------------------------------------------------------- 1 | black==24.4.2 2 | docutils==0.21.2 3 | flake8==7.1.1 4 | pygments==2.18.0 5 | setuptools; python_version >= "3.12" 6 | -------------------------------------------------------------------------------- /mypy-requirements.txt: -------------------------------------------------------------------------------- 1 | -r test-requirements.txt 2 | mypy==1.8.0 3 | -------------------------------------------------------------------------------- /pymemcache/__init__.py: -------------------------------------------------------------------------------- 1 | __version__ = "4.0.0" 2 | 3 | from pymemcache.client.base import Client # noqa 4 | from pymemcache.client.base import PooledClient # noqa 5 | from pymemcache.client.hash import HashClient # noqa 6 | from pymemcache.client.base import KeepaliveOpts # noqa 7 | 8 | from pymemcache.exceptions import MemcacheError # noqa 9 | from pymemcache.exceptions import MemcacheClientError # noqa 10 | from pymemcache.exceptions import MemcacheUnknownCommandError # noqa 11 | from pymemcache.exceptions import MemcacheIllegalInputError # noqa 12 | from pymemcache.exceptions import MemcacheServerError # noqa 13 | from pymemcache.exceptions import MemcacheUnknownError # noqa 14 | from pymemcache.exceptions import MemcacheUnexpectedCloseError # noqa 15 | -------------------------------------------------------------------------------- /pymemcache/client/__init__.py: -------------------------------------------------------------------------------- 1 | # API Backwards compatibility 2 | 3 | from pymemcache.client.base import Client # noqa 4 | from pymemcache.client.base import PooledClient # noqa 5 | from pymemcache.client.hash import HashClient # noqa 6 | from pymemcache.client.retrying import RetryingClient # noqa 7 | 8 | from pymemcache.exceptions import MemcacheError # noqa 9 | from pymemcache.exceptions import MemcacheClientError # noqa 10 | from pymemcache.exceptions import MemcacheUnknownCommandError # noqa 11 | from pymemcache.exceptions import MemcacheIllegalInputError # noqa 12 | from pymemcache.exceptions import MemcacheServerError # noqa 13 | from pymemcache.exceptions import MemcacheUnknownError # noqa 14 | from pymemcache.exceptions import MemcacheUnexpectedCloseError # noqa 15 | -------------------------------------------------------------------------------- /pymemcache/client/ext/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pinterest/pymemcache/bd43018c1e6c04681987c00a8bfcad3d4cf9ffb8/pymemcache/client/ext/__init__.py -------------------------------------------------------------------------------- /pymemcache/client/ext/aws_ec_client.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import operator 3 | import socket 4 | import re 5 | import time 6 | 7 | from pymemcache import MemcacheUnknownCommandError 8 | from pymemcache.client import Client 9 | from pymemcache.client.base import normalize_server_spec 10 | from pymemcache.client.hash import HashClient 11 | from pymemcache.client.rendezvous import RendezvousHash 12 | 13 | 14 | logger = logging.getLogger(__name__) 15 | 16 | _RE_AWS_ENDPOINT = re.compile( 17 | r"^(?:(?:[\w\d-]{0,61}[\w\d]\.)+[\w]{1,6}|\[(?:[\d]{1,3}\.){3}[\d]{1,3}\])\:\d{1,5}$" 18 | ) 19 | 20 | 21 | class AWSElastiCacheHashClient(HashClient): 22 | """ 23 | This class is a subclass of HashClient and represents a client for interacting with an AWS ElastiCache cluster 24 | using a hash-based algorithm for key distribution. 25 | 26 | *Connection * 27 | 28 | Supports version 1.4.14 or higher 29 | 30 | Example: 31 | >>> client = AWSElastiCacheServerlessClient('cluster.abcxyz.cfg.use1.cache.amazonaws.com') 32 | """ 33 | 34 | def __init__( 35 | self, 36 | cfg_node: object, 37 | hasher: object = RendezvousHash, 38 | serde: object = None, 39 | serializer: object = None, 40 | deserializer: object = None, 41 | connect_timeout: object = None, 42 | timeout: object = None, 43 | no_delay: object = False, 44 | socket_module: object = socket, 45 | socket_keepalive: object = None, 46 | key_prefix: object = b"", 47 | max_pool_size: object = None, 48 | pool_idle_timeout: object = 0, 49 | lock_generator: object = None, 50 | retry_attempts: object = 2, 51 | retry_timeout: object = 1, 52 | dead_timeout: object = 60, 53 | use_pooling: object = False, 54 | ignore_exc: object = False, 55 | allow_unicode_keys: object = False, 56 | default_noreply: object = True, 57 | encoding: object = "ascii", 58 | tls_context: object = None, 59 | use_vpc: object = True, 60 | ) -> object: 61 | """ 62 | Constructor. 63 | 64 | Args: 65 | cfg_node: formatted string containing endpoint and port of the 66 | ElastiCache cluster endpoint. Ex.: 67 | `test-cluster.2os1zk.cfg.use1.cache.amazonaws.com:11211` 68 | serde: optional serializer object, see notes in the class docs. 69 | serializer: deprecated serialization function 70 | deserializer: deprecated deserialization function 71 | connect_timeout: optional float, seconds to wait for a connection to 72 | the memcached server. Defaults to "forever" (uses the underlying 73 | default socket timeout, which can be very long). 74 | timeout: optional float, seconds to wait for send or recv calls on 75 | the socket connected to memcached. Defaults to "forever" (uses the 76 | underlying default socket timeout, which can be very long). 77 | no_delay: optional bool, set the TCP_NODELAY flag, which may help 78 | with performance in some cases. Defaults to False. 79 | ignore_exc: optional bool, True to cause the "get", "gets", 80 | "get_many" and "gets_many" calls to treat any errors as cache 81 | misses. Defaults to False. 82 | socket_module: socket module to use, e.g. gevent.socket. Defaults to 83 | the standard library's socket module. 84 | socket_keepalive: Activate the socket keepalive feature by passing 85 | a KeepaliveOpts structure in this parameter. Disabled by default 86 | (None). This feature is only supported on Linux platforms. 87 | key_prefix: Prefix of key. You can use this as namespace. Defaults 88 | to b''. 89 | default_noreply: bool, the default value for 'noreply' as passed to 90 | store commands (except from cas, incr, and decr, which default to 91 | False). 92 | allow_unicode_keys: bool, support unicode (utf8) keys 93 | encoding: optional str, controls data encoding (defaults to 'ascii'). 94 | use_vpc: optional bool, if set False (defaults to True), the client 95 | will use FQDN to connect to nodes instead of IP addresses. See 96 | AWS Docs for extra info 97 | https://docs.aws.amazon.com/AmazonElastiCache/latest/mem-ug/ClientConfig.DNS.html 98 | 99 | Notes: 100 | The constructor does not make a connection to memcached. The first 101 | call to a method on the object will do that. 102 | """ 103 | if not (_RE_AWS_ENDPOINT.fullmatch(cfg_node) and isinstance(cfg_node, str)): 104 | raise ValueError("Invalid AWS ElastiCache endpoint value '%s'" % cfg_node) 105 | 106 | self._cfg_node = cfg_node 107 | self.clients = {} 108 | self.retry_attempts = retry_attempts 109 | self.retry_timeout = retry_timeout 110 | self.dead_timeout = dead_timeout 111 | self.use_pooling = use_pooling 112 | self.key_prefix = key_prefix 113 | self.ignore_exc = ignore_exc 114 | self.allow_unicode_keys = allow_unicode_keys 115 | self._failed_clients = {} 116 | self._dead_clients = {} 117 | self._last_dead_check_time = time.time() 118 | 119 | self.hasher = hasher() 120 | 121 | self.default_kwargs = { 122 | "connect_timeout": connect_timeout, 123 | "timeout": timeout, 124 | "no_delay": no_delay, 125 | "socket_module": socket_module, 126 | "socket_keepalive": socket_keepalive, 127 | "key_prefix": key_prefix, 128 | "serde": serde, 129 | "serializer": serializer, 130 | "deserializer": deserializer, 131 | "allow_unicode_keys": allow_unicode_keys, 132 | "default_noreply": default_noreply, 133 | "encoding": encoding, 134 | "tls_context": tls_context, 135 | } 136 | 137 | if use_pooling is True: 138 | self.default_kwargs.update( 139 | { 140 | "max_pool_size": max_pool_size, 141 | "pool_idle_timeout": pool_idle_timeout, 142 | "lock_generator": lock_generator, 143 | } 144 | ) 145 | 146 | # server config returns as `[fqdn, ip, port]` if it's VPC installation you need to use ip 147 | self._use_vpc = int(use_vpc) 148 | 149 | self.reconfigure_nodes() 150 | 151 | self.encoding = encoding 152 | self.tls_context = tls_context 153 | 154 | def reconfigure_nodes(self): 155 | """ 156 | Reconfigures the nodes in the server cluster based on the provided configuration node. 157 | 158 | May useful on error handling during cluster scale down or scale up 159 | """ 160 | old_clients = self.clients.copy() 161 | self.clients.clear() 162 | 163 | for server in self._get_nodes_list(): 164 | self.add_server(normalize_server_spec(server)) 165 | 166 | for client in old_clients.values(): 167 | client.close() 168 | 169 | def _get_nodes_list(self) -> list[tuple[str, int]]: 170 | """ 171 | Get the list of nodes from the cluster configuration. 172 | 173 | Returns: 174 | A list of tuples containing the address and port of each node in the cluster. 175 | Each tuple has the format (address: str, port: int). 176 | """ 177 | addr, port = self._cfg_node.rsplit(":", maxsplit=1) 178 | client = Client((addr, port), **self.default_kwargs) 179 | 180 | # https://docs.aws.amazon.com/AmazonElastiCache/latest/mem-ug/AutoDiscovery.AddingToYourClientLibrary.html 181 | try: 182 | *_, config_line = client.raw_command( 183 | b"config get cluster", 184 | end_tokens=b"\n\r\nEND\r\n", 185 | ).splitlines() 186 | except MemcacheUnknownCommandError: 187 | logger.exception( 188 | "Can't retrieve cluster configuration from '%s:%s' " 189 | "Seems like it is ElastiCache Serverless or even isn't ElastiCache at all.", 190 | client.server, 191 | ) 192 | finally: 193 | client.close() 194 | 195 | servers = [ 196 | (server[self._use_vpc], server[2]) 197 | for server in map( 198 | operator.methodcaller("split", "|"), 199 | config_line.decode().split(" "), 200 | ) 201 | ] 202 | 203 | logger.debug("Got the next nodes from cluster config: %s", servers) 204 | 205 | return servers 206 | -------------------------------------------------------------------------------- /pymemcache/client/hash.py: -------------------------------------------------------------------------------- 1 | import collections 2 | import socket 3 | import time 4 | import logging 5 | 6 | from pymemcache.client.base import ( 7 | Client, 8 | PooledClient, 9 | check_key_helper, 10 | normalize_server_spec, 11 | ) 12 | from pymemcache.client.rendezvous import RendezvousHash 13 | from pymemcache.exceptions import MemcacheError 14 | 15 | logger = logging.getLogger(__name__) 16 | 17 | 18 | class HashClient: 19 | """ 20 | A client for communicating with a cluster of memcached servers 21 | """ 22 | 23 | #: :class:`Client` class used to create new clients 24 | client_class = Client 25 | 26 | def __init__( 27 | self, 28 | servers, 29 | hasher=RendezvousHash, 30 | serde=None, 31 | serializer=None, 32 | deserializer=None, 33 | connect_timeout=None, 34 | timeout=None, 35 | no_delay=False, 36 | socket_module=socket, 37 | socket_keepalive=None, 38 | key_prefix=b"", 39 | max_pool_size=None, 40 | pool_idle_timeout=0, 41 | lock_generator=None, 42 | retry_attempts=2, 43 | retry_timeout=1, 44 | dead_timeout=60, 45 | use_pooling=False, 46 | ignore_exc=False, 47 | allow_unicode_keys=False, 48 | default_noreply=True, 49 | encoding="ascii", 50 | tls_context=None, 51 | ): 52 | """ 53 | Constructor. 54 | 55 | Args: 56 | servers: list() of tuple(hostname, port) or string containing a UNIX 57 | socket path. 58 | hasher: optional class three functions ``get_node``, ``add_node``, 59 | and ``remove_node`` 60 | defaults to Rendezvous (HRW) hash. 61 | 62 | use_pooling: use py:class:`.PooledClient` as the default underlying 63 | class. ``max_pool_size`` and ``lock_generator`` can 64 | be used with this. default: False 65 | 66 | retry_attempts: Amount of times a client should be tried before it 67 | is marked dead and removed from the pool. 68 | retry_timeout (float): Time in seconds that should pass between retry 69 | attempts. 70 | dead_timeout (float): Time in seconds before attempting to add a node 71 | back in the pool. 72 | encoding: optional str, controls data encoding (defaults to 'ascii'). 73 | 74 | Further arguments are interpreted as for :py:class:`.Client` 75 | constructor. 76 | """ 77 | self.clients = {} 78 | self.retry_attempts = retry_attempts 79 | self.retry_timeout = retry_timeout 80 | self.dead_timeout = dead_timeout 81 | self.use_pooling = use_pooling 82 | self.key_prefix = key_prefix 83 | self.ignore_exc = ignore_exc 84 | self.allow_unicode_keys = allow_unicode_keys 85 | self._failed_clients = {} 86 | self._dead_clients = {} 87 | self._last_dead_check_time = time.time() 88 | 89 | self.hasher = hasher() 90 | 91 | self.default_kwargs = { 92 | "connect_timeout": connect_timeout, 93 | "timeout": timeout, 94 | "no_delay": no_delay, 95 | "socket_module": socket_module, 96 | "socket_keepalive": socket_keepalive, 97 | "key_prefix": key_prefix, 98 | "serde": serde, 99 | "serializer": serializer, 100 | "deserializer": deserializer, 101 | "allow_unicode_keys": allow_unicode_keys, 102 | "default_noreply": default_noreply, 103 | "encoding": encoding, 104 | "tls_context": tls_context, 105 | } 106 | 107 | if use_pooling is True: 108 | self.default_kwargs.update( 109 | { 110 | "max_pool_size": max_pool_size, 111 | "pool_idle_timeout": pool_idle_timeout, 112 | "lock_generator": lock_generator, 113 | } 114 | ) 115 | 116 | for server in servers: 117 | self.add_server(normalize_server_spec(server)) 118 | self.encoding = encoding 119 | self.tls_context = tls_context 120 | 121 | def _make_client_key(self, server): 122 | if isinstance(server, (list, tuple)) and len(server) == 2: 123 | return "%s:%s" % server 124 | return server 125 | 126 | def add_server(self, server, port=None) -> None: 127 | # To maintain backward compatibility, if a port is provided, assume 128 | # that server wasn't provided as a (host, port) tuple. 129 | if port is not None: 130 | if not isinstance(server, str): 131 | raise TypeError("Server must be a string when passing port.") 132 | server = (server, port) 133 | 134 | _class = PooledClient if self.use_pooling else self.client_class 135 | client = _class(server, **self.default_kwargs) 136 | if self.use_pooling: 137 | client.client_class = self.client_class 138 | 139 | key = self._make_client_key(server) 140 | self.clients[key] = client 141 | self.hasher.add_node(key) 142 | 143 | def remove_server(self, server, port=None) -> None: 144 | # To maintain backward compatibility, if a port is provided, assume 145 | # that server wasn't provided as a (host, port) tuple. 146 | if port is not None: 147 | if not isinstance(server, str): 148 | raise TypeError("Server must be a string when passing port.") 149 | server = (server, port) 150 | 151 | key = self._make_client_key(server) 152 | dead_time = time.time() 153 | self._failed_clients.pop(server) 154 | self._dead_clients[server] = dead_time 155 | self.hasher.remove_node(key) 156 | 157 | def _retry_dead(self) -> None: 158 | current_time = time.time() 159 | ldc = self._last_dead_check_time 160 | # We have reached the retry timeout 161 | if current_time - ldc > self.dead_timeout: 162 | candidates = [] 163 | for server, dead_time in self._dead_clients.items(): 164 | if current_time - dead_time > self.dead_timeout: 165 | candidates.append(server) 166 | for server in candidates: 167 | logger.debug("bringing server back into rotation %s", server) 168 | self.add_server(server) 169 | del self._dead_clients[server] 170 | self._last_dead_check_time = current_time 171 | 172 | def _get_client(self, key): 173 | # If key is tuple use first item as server key 174 | if isinstance(key, tuple) and len(key) == 2: 175 | server_key, key = key 176 | else: 177 | server_key = key 178 | 179 | check_key_helper(server_key, self.allow_unicode_keys, self.key_prefix) 180 | if self._dead_clients: 181 | self._retry_dead() 182 | 183 | server = self.hasher.get_node(server_key) 184 | # We've ran out of servers to try 185 | if server is None: 186 | if self.ignore_exc is True: 187 | return None, key 188 | raise MemcacheError("All servers seem to be down right now") 189 | 190 | return self.clients[server], key 191 | 192 | def _safely_run_func(self, client, func, default_val, *args, **kwargs): 193 | try: 194 | if client.server in self._failed_clients: 195 | # This server is currently failing, lets check if it is in 196 | # retry or marked as dead 197 | failed_metadata = self._failed_clients[client.server] 198 | 199 | # we haven't tried our max amount yet, if it has been enough 200 | # time lets just retry using it 201 | if failed_metadata["attempts"] < self.retry_attempts: 202 | failed_time = failed_metadata["failed_time"] 203 | if time.time() - failed_time > self.retry_timeout: 204 | logger.debug("retrying failed server: %s", client.server) 205 | result = func(*args, **kwargs) 206 | # we were successful, lets remove it from the failed 207 | # clients 208 | self._failed_clients.pop(client.server) 209 | return result 210 | return default_val 211 | else: 212 | # We've reached our max retry attempts, we need to mark 213 | # the sever as dead 214 | logger.debug("marking server as dead: %s", client.server) 215 | self.remove_server(client.server) 216 | 217 | result = func(*args, **kwargs) 218 | return result 219 | 220 | # Connecting to the server fail, we should enter 221 | # retry mode 222 | except OSError: 223 | self._mark_failed_server(client.server) 224 | 225 | # if we haven't enabled ignore_exc, don't move on gracefully, just 226 | # raise the exception 227 | if not self.ignore_exc: 228 | raise 229 | 230 | return default_val 231 | except Exception: 232 | # any exceptions that aren't socket.error we need to handle 233 | # gracefully as well 234 | if not self.ignore_exc: 235 | raise 236 | 237 | return default_val 238 | 239 | def _safely_run_set_many(self, client, values, *args, **kwargs): 240 | failed = [] 241 | succeeded = [] 242 | try: 243 | if client.server in self._failed_clients: 244 | # This server is currently failing, lets check if it is in 245 | # retry or marked as dead 246 | failed_metadata = self._failed_clients[client.server] 247 | 248 | # we haven't tried our max amount yet, if it has been enough 249 | # time lets just retry using it 250 | if failed_metadata["attempts"] < self.retry_attempts: 251 | failed_time = failed_metadata["failed_time"] 252 | if time.time() - failed_time > self.retry_timeout: 253 | logger.debug("retrying failed server: %s", client.server) 254 | succeeded, failed, err = self._set_many( 255 | client, values, *args, **kwargs 256 | ) 257 | if err is not None: 258 | raise err 259 | # we were successful, lets remove it from the failed 260 | # clients 261 | self._failed_clients.pop(client.server) 262 | return failed 263 | return values.keys() 264 | else: 265 | # We've reached our max retry attempts, we need to mark 266 | # the sever as dead 267 | logger.debug("marking server as dead: %s", client.server) 268 | self.remove_server(client.server) 269 | 270 | succeeded, failed, err = self._set_many(client, values, *args, **kwargs) 271 | if err is not None: 272 | raise err 273 | 274 | return failed 275 | 276 | # Connecting to the server fail, we should enter 277 | # retry mode 278 | except OSError: 279 | self._mark_failed_server(client.server) 280 | 281 | # if we haven't enabled ignore_exc, don't move on gracefully, just 282 | # raise the exception 283 | if not self.ignore_exc: 284 | raise 285 | 286 | return list(set(values.keys()) - set(succeeded)) 287 | except Exception: 288 | # any exceptions that aren't socket.error we need to handle 289 | # gracefully as well 290 | if not self.ignore_exc: 291 | raise 292 | 293 | return list(set(values.keys()) - set(succeeded)) 294 | 295 | def _mark_failed_server(self, server): 296 | # This client has never failed, lets mark it for failure 297 | if server not in self._failed_clients and self.retry_attempts > 0: 298 | self._failed_clients[server] = { 299 | "failed_time": time.time(), 300 | "attempts": 0, 301 | } 302 | # We aren't allowing any retries, we should mark the server as 303 | # dead immediately 304 | elif server not in self._failed_clients and self.retry_attempts <= 0: 305 | self._failed_clients[server] = { 306 | "failed_time": time.time(), 307 | "attempts": 0, 308 | } 309 | logger.debug("marking server as dead %s", server) 310 | self.remove_server(server) 311 | # This client has failed previously, we need to update the metadata 312 | # to reflect that we have attempted it again 313 | else: 314 | failed_metadata = self._failed_clients[server] 315 | failed_metadata["attempts"] += 1 316 | failed_metadata["failed_time"] = time.time() 317 | self._failed_clients[server] = failed_metadata 318 | 319 | def _run_cmd(self, cmd, key, default_val, *args, **kwargs): 320 | client, key = self._get_client(key) 321 | 322 | if client is None: 323 | return default_val 324 | 325 | func = getattr(client, cmd) 326 | args = list(args) 327 | args.insert(0, key) 328 | return self._safely_run_func(client, func, default_val, *args, **kwargs) 329 | 330 | def _set_many(self, client, values, *args, **kwargs): 331 | failed = [] 332 | succeeded = [] 333 | 334 | try: 335 | failed = client.set_many(values, *args, **kwargs) 336 | except Exception as e: 337 | if not self.ignore_exc: 338 | return succeeded, failed, e 339 | 340 | succeeded = [key for key in values if key not in failed] 341 | return succeeded, failed, None 342 | 343 | def close(self): 344 | for client in self.clients.values(): 345 | self._safely_run_func(client, client.close, False) 346 | 347 | disconnect_all = close 348 | 349 | def set(self, key, *args, **kwargs): 350 | return self._run_cmd("set", key, False, *args, **kwargs) 351 | 352 | def get(self, key, default=None, **kwargs): 353 | return self._run_cmd("get", key, default, default=default, **kwargs) 354 | 355 | def gat(self, key, default=None, **kwargs): 356 | return self._run_cmd("gat", key, default, default=default, **kwargs) 357 | 358 | def gats(self, key, default=None, **kwargs): 359 | return self._run_cmd("gats", key, default, default=default, **kwargs) 360 | 361 | def incr(self, key, *args, **kwargs): 362 | return self._run_cmd("incr", key, None, *args, **kwargs) 363 | 364 | def decr(self, key, *args, **kwargs): 365 | return self._run_cmd("decr", key, None, *args, **kwargs) 366 | 367 | def set_many(self, values, *args, **kwargs): 368 | client_batches = collections.defaultdict(dict) 369 | failed = [] 370 | 371 | for key, value in values.items(): 372 | client, key = self._get_client(key) 373 | 374 | if client is None: 375 | failed.append(key) 376 | continue 377 | 378 | client_batches[client.server][key] = value 379 | 380 | for server, values in client_batches.items(): 381 | client = self.clients[self._make_client_key(server)] 382 | failed += self._safely_run_set_many(client, values, *args, **kwargs) 383 | 384 | return failed 385 | 386 | set_multi = set_many 387 | 388 | def get_many(self, keys, gets=False, *args, **kwargs): 389 | client_batches = collections.defaultdict(list) 390 | end = {} 391 | 392 | for key in keys: 393 | client, key = self._get_client(key) 394 | 395 | if client is None: 396 | continue 397 | 398 | client_batches[client.server].append(key) 399 | 400 | for server, keys in client_batches.items(): 401 | client = self.clients[self._make_client_key(server)] 402 | new_args = list(args) 403 | new_args.insert(0, keys) 404 | 405 | if gets: 406 | get_func = client.gets_many 407 | else: 408 | get_func = client.get_many 409 | 410 | result = self._safely_run_func(client, get_func, {}, *new_args, **kwargs) 411 | end.update(result) 412 | 413 | return end 414 | 415 | get_multi = get_many 416 | 417 | def gets(self, key, *args, **kwargs): 418 | return self._run_cmd("gets", key, None, *args, **kwargs) 419 | 420 | def gets_many(self, keys, *args, **kwargs): 421 | return self.get_many(keys, gets=True, *args, **kwargs) 422 | 423 | gets_multi = gets_many 424 | 425 | def add(self, key, *args, **kwargs): 426 | return self._run_cmd("add", key, False, *args, **kwargs) 427 | 428 | def prepend(self, key, *args, **kwargs): 429 | return self._run_cmd("prepend", key, False, *args, **kwargs) 430 | 431 | def append(self, key, *args, **kwargs): 432 | return self._run_cmd("append", key, False, *args, **kwargs) 433 | 434 | def delete(self, key, *args, **kwargs): 435 | return self._run_cmd("delete", key, False, *args, **kwargs) 436 | 437 | def delete_many(self, keys, *args, **kwargs) -> bool: 438 | for key in keys: 439 | self._run_cmd("delete", key, False, *args, **kwargs) 440 | return True 441 | 442 | delete_multi = delete_many 443 | 444 | def cas(self, key, *args, **kwargs): 445 | return self._run_cmd("cas", key, False, *args, **kwargs) 446 | 447 | def replace(self, key, *args, **kwargs): 448 | return self._run_cmd("replace", key, False, *args, **kwargs) 449 | 450 | def touch(self, key, *args, **kwargs): 451 | return self._run_cmd("touch", key, False, *args, **kwargs) 452 | 453 | def stats(self, *args, **kwargs): 454 | result = list() 455 | for key, client in self.clients.items(): 456 | result.append( 457 | ( 458 | key, 459 | self._safely_run_func(client, client.stats, False, *args, **kwargs), 460 | ) 461 | ) 462 | return result 463 | 464 | def flush_all(self, *args, **kwargs) -> None: 465 | for client in self.clients.values(): 466 | self._safely_run_func(client, client.flush_all, False, *args, **kwargs) 467 | 468 | def quit(self) -> None: 469 | for client in self.clients.values(): 470 | self._safely_run_func(client, client.quit, False) 471 | -------------------------------------------------------------------------------- /pymemcache/client/murmur3.py: -------------------------------------------------------------------------------- 1 | def murmur3_32(data, seed=0): 2 | """MurmurHash3 was written by Austin Appleby, and is placed in the 3 | public domain. The author hereby disclaims copyright to this source 4 | code.""" 5 | 6 | c1 = 0xCC9E2D51 7 | c2 = 0x1B873593 8 | 9 | length = len(data) 10 | h1 = seed 11 | roundedEnd = length & 0xFFFFFFFC # round down to 4 byte block 12 | for i in range(0, roundedEnd, 4): 13 | # little endian load order 14 | k1 = ( 15 | (ord(data[i]) & 0xFF) 16 | | ((ord(data[i + 1]) & 0xFF) << 8) 17 | | ((ord(data[i + 2]) & 0xFF) << 16) 18 | | (ord(data[i + 3]) << 24) 19 | ) 20 | k1 *= c1 21 | k1 = (k1 << 15) | ((k1 & 0xFFFFFFFF) >> 17) # ROTL32(k1,15) 22 | k1 *= c2 23 | 24 | h1 ^= k1 25 | h1 = (h1 << 13) | ((h1 & 0xFFFFFFFF) >> 19) # ROTL32(h1,13) 26 | h1 = h1 * 5 + 0xE6546B64 27 | 28 | # tail 29 | k1 = 0 30 | 31 | val = length & 0x03 32 | if val == 3: 33 | k1 = (ord(data[roundedEnd + 2]) & 0xFF) << 16 34 | # fallthrough 35 | if val in [2, 3]: 36 | k1 |= (ord(data[roundedEnd + 1]) & 0xFF) << 8 37 | # fallthrough 38 | if val in [1, 2, 3]: 39 | k1 |= ord(data[roundedEnd]) & 0xFF 40 | k1 *= c1 41 | k1 = (k1 << 15) | ((k1 & 0xFFFFFFFF) >> 17) # ROTL32(k1,15) 42 | k1 *= c2 43 | h1 ^= k1 44 | 45 | # finalization 46 | h1 ^= length 47 | 48 | # fmix(h1) 49 | h1 ^= (h1 & 0xFFFFFFFF) >> 16 50 | h1 *= 0x85EBCA6B 51 | h1 ^= (h1 & 0xFFFFFFFF) >> 13 52 | h1 *= 0xC2B2AE35 53 | h1 ^= (h1 & 0xFFFFFFFF) >> 16 54 | 55 | return h1 & 0xFFFFFFFF 56 | -------------------------------------------------------------------------------- /pymemcache/client/rendezvous.py: -------------------------------------------------------------------------------- 1 | from pymemcache.client.murmur3 import murmur3_32 2 | 3 | 4 | class RendezvousHash: 5 | """ 6 | Implements the Highest Random Weight (HRW) hashing algorithm most 7 | commonly referred to as rendezvous hashing. 8 | 9 | Originally developed as part of python-clandestined. 10 | 11 | Copyright (c) 2014 Ernest W. Durbin III 12 | """ 13 | 14 | def __init__(self, nodes=None, seed=0, hash_function=murmur3_32): 15 | """ 16 | Constructor. 17 | """ 18 | self.nodes = [] 19 | self.seed = seed 20 | if nodes is not None: 21 | self.nodes = nodes 22 | self.hash_function = lambda x: hash_function(x, seed) 23 | 24 | def add_node(self, node): 25 | if node not in self.nodes: 26 | self.nodes.append(node) 27 | 28 | def remove_node(self, node): 29 | if node in self.nodes: 30 | self.nodes.remove(node) 31 | else: 32 | raise ValueError("No such node %s to remove" % (node)) 33 | 34 | def get_node(self, key): 35 | high_score = -1 36 | winner = None 37 | 38 | for node in self.nodes: 39 | score = self.hash_function(f"{node}-{key}") 40 | 41 | if score > high_score: 42 | (high_score, winner) = (score, node) 43 | elif score == high_score: 44 | (high_score, winner) = (score, max(str(node), str(winner))) 45 | 46 | return winner 47 | -------------------------------------------------------------------------------- /pymemcache/client/retrying.py: -------------------------------------------------------------------------------- 1 | """ Module containing the RetryingClient wrapper class. """ 2 | 3 | from time import sleep 4 | 5 | 6 | def _ensure_tuple_argument(argument_name, argument_value): 7 | """ 8 | Helper function to ensure the given arguments are tuples of Exceptions (or 9 | subclasses), or can at least be converted to such. 10 | 11 | Args: 12 | argument_name: str, name of the argument we're checking, only used for 13 | raising meaningful exceptions. 14 | argument: any, the argument itself. 15 | 16 | Returns: 17 | tuple[Exception]: A tuple with the elements from the argument if they are 18 | valid. 19 | 20 | Exceptions: 21 | ValueError: If the argument was not None, tuple or Iterable. 22 | ValueError: If any of the elements of the argument is not a subclass of 23 | Exception. 24 | """ 25 | 26 | # Ensure the argument is a tuple, set or list. 27 | if argument_value is None: 28 | return tuple() 29 | elif not isinstance(argument_value, (tuple, set, list)): 30 | raise ValueError("%s must be either a tuple, a set or a list." % argument_name) 31 | 32 | # Convert the argument before checking contents. 33 | argument_tuple = tuple(argument_value) 34 | 35 | # Check that all the elements are actually inherited from Exception. 36 | # (Catchable) 37 | if not all([issubclass(arg, Exception) for arg in argument_tuple]): 38 | raise ValueError( 39 | "%s is only allowed to contain elements that are subclasses of " 40 | "Exception." % argument_name 41 | ) 42 | 43 | return argument_tuple 44 | 45 | 46 | class RetryingClient: 47 | """ 48 | Client that allows retrying calls for the other clients. 49 | """ 50 | 51 | def __init__( 52 | self, client, attempts=2, retry_delay=0, retry_for=None, do_not_retry_for=None 53 | ): 54 | """ 55 | Constructor for RetryingClient. 56 | 57 | Args: 58 | client: Client|PooledClient|HashClient, inner client to use for 59 | performing actual work. 60 | attempts: optional int, how many times to attempt an action before 61 | failing. Must be 1 or above. Defaults to 2. 62 | retry_delay: optional int|float, how many seconds to sleep between 63 | each attempt. 64 | Defaults to 0. 65 | 66 | retry_for: optional None|tuple|set|list, what exceptions to 67 | allow retries for. Will allow retries for all exceptions if None. 68 | Example: 69 | `(MemcacheClientError, MemcacheUnexpectedCloseError)` 70 | Accepts any class that is a subclass of Exception. 71 | Defaults to None. 72 | 73 | do_not_retry_for: optional None|tuple|set|list, what 74 | exceptions should be retried. Will not block retries for any 75 | Exception if None. 76 | Example: 77 | `(IOError, MemcacheIllegalInputError)` 78 | Accepts any class that is a subclass of Exception. 79 | Defaults to None. 80 | 81 | Exceptions: 82 | ValueError: If `attempts` is not 1 or above. 83 | ValueError: If `retry_for` or `do_not_retry_for` is not None, tuple or 84 | Iterable. 85 | ValueError: If any of the elements of `retry_for` or 86 | `do_not_retry_for` is not a subclass of Exception. 87 | ValueError: If there is any overlap between `retry_for` and 88 | `do_not_retry_for`. 89 | """ 90 | 91 | if attempts < 1: 92 | raise ValueError( 93 | "`attempts` argument must be at least 1. " 94 | "Otherwise no attempts are made." 95 | ) 96 | 97 | self._client = client 98 | self._attempts = attempts 99 | self._retry_delay = retry_delay 100 | self._retry_for = _ensure_tuple_argument("retry_for", retry_for) 101 | self._do_not_retry_for = _ensure_tuple_argument( 102 | "do_not_retry_for", do_not_retry_for 103 | ) 104 | 105 | # Verify no overlap in the go/no-go exception collections. 106 | for exc_class in self._retry_for: 107 | if exc_class in self._do_not_retry_for: 108 | raise ValueError( 109 | 'Exception class "%s" was present in both `retry_for` ' 110 | "and `do_not_retry_for`. Any exception class is only " 111 | "allowed in a single argument." % repr(exc_class) 112 | ) 113 | 114 | # Take dir from the client to speed up future checks. 115 | self._client_dir = dir(self._client) 116 | 117 | def _retry(self, name, func, *args, **kwargs): 118 | """ 119 | Workhorse function, handles retry logic. 120 | 121 | Args: 122 | name: str, Name of the function called. 123 | func: callable, the function to retry. 124 | *args: args, array arguments to pass to the function. 125 | **kwargs: kwargs, keyword arguments to pass to the function. 126 | """ 127 | for attempt in range(self._attempts): 128 | try: 129 | result = func(*args, **kwargs) 130 | return result 131 | 132 | except Exception as exc: 133 | # Raise the exception to caller if either is met: 134 | # - We've used the last attempt. 135 | # - self._retry_for is set, and we do not match. 136 | # - self._do_not_retry_for is set, and we do match. 137 | # - name is not actually a member of the client class. 138 | if ( 139 | attempt >= self._attempts - 1 140 | or (self._retry_for and not isinstance(exc, self._retry_for)) 141 | or ( 142 | self._do_not_retry_for 143 | and isinstance(exc, self._do_not_retry_for) 144 | ) 145 | or name not in self._client_dir 146 | ): 147 | raise exc 148 | 149 | # Sleep and try again. 150 | sleep(self._retry_delay) 151 | 152 | # This is the real magic soup of the class, we catch anything that isn't 153 | # strictly defined for ourselves and pass it on to whatever client we've 154 | # been given. 155 | def __getattr__(self, name): 156 | return lambda *args, **kwargs: self._retry( 157 | name, self._client.__getattribute__(name), *args, **kwargs 158 | ) 159 | 160 | # We implement these explicitly because they're "magic" functions and won't 161 | # get passed on by __getattr__. 162 | 163 | def __dir__(self): 164 | return self._client_dir 165 | 166 | # These magics are copied from the base client. 167 | def __setitem__(self, key, value): 168 | self.set(key, value, noreply=True) 169 | 170 | def __getitem__(self, key): 171 | value = self.get(key) 172 | if value is None: 173 | raise KeyError 174 | return value 175 | 176 | def __delitem__(self, key): 177 | self.delete(key, noreply=True) 178 | -------------------------------------------------------------------------------- /pymemcache/exceptions.py: -------------------------------------------------------------------------------- 1 | class MemcacheError(Exception): 2 | "Base exception class" 3 | pass 4 | 5 | 6 | class MemcacheClientError(MemcacheError): 7 | """Raised when memcached fails to parse the arguments to a request, likely 8 | due to a malformed key and/or value, a bug in this library, or a version 9 | mismatch with memcached.""" 10 | 11 | pass 12 | 13 | 14 | class MemcacheUnknownCommandError(MemcacheClientError): 15 | """Raised when memcached fails to parse a request, likely due to a bug in 16 | this library or a version mismatch with memcached.""" 17 | 18 | pass 19 | 20 | 21 | class MemcacheIllegalInputError(MemcacheClientError): 22 | """Raised when a key or value is not legal for Memcache (see the class docs 23 | for Client for more details).""" 24 | 25 | pass 26 | 27 | 28 | class MemcacheServerError(MemcacheError): 29 | """Raised when memcached reports a failure while processing a request, 30 | likely due to a bug or transient issue in memcached.""" 31 | 32 | pass 33 | 34 | 35 | class MemcacheUnknownError(MemcacheError): 36 | """Raised when this library receives a response from memcached that it 37 | cannot parse, likely due to a bug in this library or a version mismatch 38 | with memcached.""" 39 | 40 | pass 41 | 42 | 43 | class MemcacheUnexpectedCloseError(MemcacheServerError): 44 | "Raised when the connection with memcached closes unexpectedly." 45 | pass 46 | -------------------------------------------------------------------------------- /pymemcache/fallback.py: -------------------------------------------------------------------------------- 1 | # Copyright 2012 Pinterest.com 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | """ 16 | A client for falling back to older memcached servers when performing reads. 17 | 18 | It is sometimes necessary to deploy memcached on new servers, or with a 19 | different configuration. In these cases, it is undesirable to start up an 20 | empty memcached server and point traffic to it, since the cache will be cold, 21 | and the backing store will have a large increase in traffic. 22 | 23 | This class attempts to solve that problem by providing an interface identical 24 | to the Client interface, but which can fall back to older memcached servers 25 | when reads to the primary server fail. The approach for upgrading memcached 26 | servers or configuration then becomes: 27 | 28 | 1. Deploy a new host (or fleet) with memcached, possibly with a new 29 | configuration. 30 | 2. From your application servers, use FallbackClient to write and read from 31 | the new cluster, and to read from the old cluster when there is a miss in 32 | the new cluster. 33 | 3. Wait until the new cache is warm enough to support the load. 34 | 4. Switch from FallbackClient to a regular Client library for doing all 35 | reads and writes to the new cluster. 36 | 5. Take down the old cluster. 37 | 38 | Best Practices: 39 | --------------- 40 | - Make sure that the old client has "ignore_exc" set to True, so that it 41 | treats failures like cache misses. That will allow you to take down the 42 | old cluster before you switch away from FallbackClient. 43 | """ 44 | 45 | 46 | class FallbackClient: 47 | def __init__(self, caches): 48 | assert len(caches) > 0 49 | self.caches = caches 50 | 51 | def close(self): 52 | "Close each of the memcached clients" 53 | for cache in self.caches: 54 | cache.close() 55 | 56 | def set(self, key, value, expire=0, noreply=True): 57 | self.caches[0].set(key, value, expire, noreply) 58 | 59 | def add(self, key, value, expire=0, noreply=True): 60 | self.caches[0].add(key, value, expire, noreply) 61 | 62 | def replace(self, key, value, expire=0, noreply=True): 63 | self.caches[0].replace(key, value, expire, noreply) 64 | 65 | def append(self, key, value, expire=0, noreply=True): 66 | self.caches[0].append(key, value, expire, noreply) 67 | 68 | def prepend(self, key, value, expire=0, noreply=True): 69 | self.caches[0].prepend(key, value, expire, noreply) 70 | 71 | def cas(self, key, value, cas, expire=0, noreply=True): 72 | self.caches[0].cas(key, value, cas, expire, noreply) 73 | 74 | def get(self, key): 75 | for cache in self.caches: 76 | result = cache.get(key) 77 | if result is not None: 78 | return result 79 | return None 80 | 81 | def get_many(self, keys): 82 | for cache in self.caches: 83 | result = cache.get_many(keys) 84 | if result: 85 | return result 86 | return [] 87 | 88 | def gets(self, key): 89 | for cache in self.caches: 90 | result = cache.gets(key) 91 | if result is not None: 92 | return result 93 | return None 94 | 95 | def gets_many(self, keys): 96 | for cache in self.caches: 97 | result = cache.gets_many(keys) 98 | if result: 99 | return result 100 | return [] 101 | 102 | def delete(self, key, noreply=True): 103 | self.caches[0].delete(key, noreply) 104 | 105 | def incr(self, key, value, noreply=True): 106 | self.caches[0].incr(key, value, noreply) 107 | 108 | def decr(self, key, value, noreply=True): 109 | self.caches[0].decr(key, value, noreply) 110 | 111 | def touch(self, key, expire=0, noreply=True): 112 | self.caches[0].touch(key, expire, noreply) 113 | 114 | def stats(self): 115 | # TODO: ?? 116 | pass 117 | 118 | def flush_all(self, delay=0, noreply=True): 119 | self.caches[0].flush_all(delay, noreply) 120 | 121 | def quit(self): 122 | # TODO: ?? 123 | pass 124 | -------------------------------------------------------------------------------- /pymemcache/pool.py: -------------------------------------------------------------------------------- 1 | # Copyright 2015 Yahoo.com 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | import collections 16 | import contextlib 17 | import threading 18 | import time 19 | from typing import Callable, Optional, TypeVar, Deque, Generic 20 | from collections.abc import Iterator 21 | 22 | 23 | T = TypeVar("T") 24 | 25 | 26 | class ObjectPool(Generic[T]): 27 | """A pool of objects that release/creates/destroys as needed.""" 28 | 29 | def __init__( 30 | self, 31 | obj_creator: Callable[[], T], 32 | after_remove: Optional[Callable] = None, 33 | max_size: Optional[int] = None, 34 | idle_timeout: int = 0, 35 | lock_generator: Optional[Callable] = None, 36 | ): 37 | self._used_objs: Deque[T] = collections.deque() 38 | self._free_objs: Deque[T] = collections.deque() 39 | self._obj_creator = obj_creator 40 | if lock_generator is None: 41 | self._lock = threading.Lock() 42 | else: 43 | self._lock = lock_generator() 44 | self._after_remove = after_remove 45 | max_size = max_size or 2**31 46 | if not isinstance(max_size, int) or max_size < 0: 47 | raise ValueError('"max_size" must be a positive integer') 48 | self.max_size = max_size 49 | self.idle_timeout = idle_timeout 50 | if idle_timeout: 51 | self._idle_clock = time.time 52 | else: 53 | self._idle_clock = float 54 | 55 | @property 56 | def used(self): 57 | return tuple(self._used_objs) 58 | 59 | @property 60 | def free(self): 61 | return tuple(self._free_objs) 62 | 63 | @contextlib.contextmanager 64 | def get_and_release(self, destroy_on_fail=False) -> Iterator[T]: 65 | obj = self.get() 66 | try: 67 | yield obj 68 | except Exception: 69 | if not destroy_on_fail: 70 | self.release(obj) 71 | else: 72 | self.destroy(obj) 73 | raise 74 | self.release(obj) 75 | 76 | def get(self): 77 | with self._lock: 78 | # Find a free object, removing any that have idled for too long. 79 | now = self._idle_clock() 80 | while self._free_objs: 81 | obj = self._free_objs.popleft() 82 | if now - obj._last_used <= self.idle_timeout: 83 | break 84 | 85 | if self._after_remove is not None: 86 | self._after_remove(obj) 87 | else: 88 | # No free objects, create a new one. 89 | curr_count = len(self._used_objs) 90 | if curr_count >= self.max_size: 91 | raise RuntimeError( 92 | "Too many objects," " %s >= %s" % (curr_count, self.max_size) 93 | ) 94 | obj = self._obj_creator() 95 | 96 | self._used_objs.append(obj) 97 | obj._last_used = now 98 | return obj 99 | 100 | def destroy(self, obj, silent=True) -> None: 101 | was_dropped = False 102 | with self._lock: 103 | try: 104 | self._used_objs.remove(obj) 105 | was_dropped = True 106 | except ValueError: 107 | if not silent: 108 | raise 109 | if was_dropped and self._after_remove is not None: 110 | self._after_remove(obj) 111 | 112 | def release(self, obj, silent=True) -> None: 113 | with self._lock: 114 | try: 115 | self._used_objs.remove(obj) 116 | self._free_objs.append(obj) 117 | obj._last_used = self._idle_clock() 118 | except ValueError: 119 | if not silent: 120 | raise 121 | 122 | def clear(self) -> None: 123 | if self._after_remove is not None: 124 | needs_destroy: list[T] = [] 125 | with self._lock: 126 | needs_destroy.extend(self._used_objs) 127 | needs_destroy.extend(self._free_objs) 128 | self._free_objs.clear() 129 | self._used_objs.clear() 130 | for obj in needs_destroy: 131 | self._after_remove(obj) 132 | else: 133 | with self._lock: 134 | self._free_objs.clear() 135 | self._used_objs.clear() 136 | -------------------------------------------------------------------------------- /pymemcache/py.typed: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pinterest/pymemcache/bd43018c1e6c04681987c00a8bfcad3d4cf9ffb8/pymemcache/py.typed -------------------------------------------------------------------------------- /pymemcache/serde.py: -------------------------------------------------------------------------------- 1 | # Copyright 2012 Pinterest.com 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | import logging 16 | import pickle 17 | import zlib 18 | from functools import partial 19 | from io import BytesIO 20 | 21 | FLAG_BYTES = 0 22 | FLAG_PICKLE = 1 << 0 23 | FLAG_INTEGER = 1 << 1 24 | FLAG_LONG = 1 << 2 25 | FLAG_COMPRESSED = 1 << 3 26 | FLAG_TEXT = 1 << 4 27 | 28 | # Pickle protocol version (highest available to runtime) 29 | # Warning with `0`: If somewhere in your value lies a slotted object, 30 | # ie defines `__slots__`, even if you do not include it in your pickleable 31 | # state via `__getstate__`, python will complain with something like: 32 | # TypeError: a class that defines __slots__ without defining __getstate__ 33 | # cannot be pickled 34 | DEFAULT_PICKLE_VERSION = pickle.HIGHEST_PROTOCOL 35 | 36 | 37 | def _python_memcache_serializer(key, value, pickle_version=None): 38 | flags = 0 39 | value_type = type(value) 40 | 41 | # Check against exact types so that subclasses of native types will be 42 | # restored as their native type 43 | if value_type is bytes: 44 | pass 45 | 46 | elif value_type is str: 47 | flags |= FLAG_TEXT 48 | value = value.encode("utf8") 49 | 50 | elif value_type is int: 51 | flags |= FLAG_INTEGER 52 | value = "%d" % value 53 | 54 | else: 55 | flags |= FLAG_PICKLE 56 | output = BytesIO() 57 | pickler = pickle.Pickler(output, pickle_version) 58 | pickler.dump(value) 59 | value = output.getvalue() 60 | 61 | return value, flags 62 | 63 | 64 | def get_python_memcache_serializer(pickle_version: int = DEFAULT_PICKLE_VERSION): 65 | """Return a serializer using a specific pickle version""" 66 | return partial(_python_memcache_serializer, pickle_version=pickle_version) 67 | 68 | 69 | python_memcache_serializer = get_python_memcache_serializer() 70 | 71 | 72 | def python_memcache_deserializer(key, value, flags): 73 | if flags == 0: 74 | return value 75 | 76 | elif flags & FLAG_TEXT: 77 | return value.decode("utf8") 78 | 79 | elif flags & FLAG_INTEGER: 80 | return int(value) 81 | 82 | elif flags & FLAG_LONG: 83 | return int(value) 84 | 85 | elif flags & FLAG_PICKLE: 86 | try: 87 | buf = BytesIO(value) 88 | unpickler = pickle.Unpickler(buf) 89 | return unpickler.load() 90 | except Exception: 91 | logging.info("Pickle error", exc_info=True) 92 | return None 93 | 94 | return value 95 | 96 | 97 | class PickleSerde: 98 | """ 99 | An object which implements the serialization/deserialization protocol for 100 | :py:class:`pymemcache.client.base.Client` and its descendants using the 101 | :mod:`pickle` module. 102 | 103 | Serialization and deserialization are implemented as methods of this class. 104 | To implement a custom serialization/deserialization method for pymemcache, 105 | you should implement the same interface as the one provided by this object 106 | -- :py:meth:`pymemcache.serde.PickleSerde.serialize` and 107 | :py:meth:`pymemcache.serde.PickleSerde.deserialize`. Then, 108 | pass your custom object to the pymemcache client object in place of 109 | `PickleSerde`. 110 | 111 | For more details on the serialization protocol, see the class documentation 112 | for :py:class:`pymemcache.client.base.Client` 113 | """ 114 | 115 | def __init__(self, pickle_version: int = DEFAULT_PICKLE_VERSION) -> None: 116 | self._serialize_func = get_python_memcache_serializer(pickle_version) 117 | 118 | def serialize(self, key, value): 119 | return self._serialize_func(key, value) 120 | 121 | def deserialize(self, key, value, flags): 122 | return python_memcache_deserializer(key, value, flags) 123 | 124 | 125 | pickle_serde = PickleSerde() 126 | 127 | 128 | class CompressedSerde: 129 | """ 130 | An object which implements the serialization/deserialization protocol for 131 | :py:class:`pymemcache.client.base.Client` and its descendants with 132 | configurable compression. 133 | """ 134 | 135 | def __init__( 136 | self, 137 | compress=zlib.compress, 138 | decompress=zlib.decompress, 139 | serde=pickle_serde, 140 | # Discovered via the `test_optimal_compression_length` test. 141 | min_compress_len=400, 142 | ): 143 | self._serde = serde 144 | self._compress = compress 145 | self._decompress = decompress 146 | self._min_compress_len = min_compress_len 147 | 148 | def serialize(self, key, value): 149 | value, flags = self._serde.serialize(key, value) 150 | 151 | if len(value) > self._min_compress_len > 0: 152 | old_value = value 153 | value = self._compress(value) 154 | # Don't use the compressed value if our end result is actually 155 | # larger uncompressed. 156 | if len(old_value) < len(value): 157 | value = old_value 158 | else: 159 | flags |= FLAG_COMPRESSED 160 | 161 | return value, flags 162 | 163 | def deserialize(self, key, value, flags): 164 | if flags & FLAG_COMPRESSED: 165 | value = self._decompress(value) 166 | 167 | value = self._serde.deserialize(key, value, flags) 168 | return value 169 | 170 | 171 | compressed_serde = CompressedSerde() 172 | 173 | 174 | class LegacyWrappingSerde: 175 | """ 176 | This class defines how to wrap legacy de/serialization functions into a 177 | 'serde' object which implements '.serialize' and '.deserialize' methods. 178 | It is used automatically by pymemcache.client.base.Client when the 179 | 'serializer' or 'deserializer' arguments are given. 180 | 181 | The serializer_func and deserializer_func are expected to be None in the 182 | case that they are missing. 183 | """ 184 | 185 | def __init__(self, serializer_func, deserializer_func) -> None: 186 | self.serialize = serializer_func or self._default_serialize 187 | self.deserialize = deserializer_func or self._default_deserialize 188 | 189 | def _default_serialize(self, key, value): 190 | return value, 0 191 | 192 | def _default_deserialize(self, key, value, flags): 193 | return value 194 | -------------------------------------------------------------------------------- /pymemcache/test/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pinterest/pymemcache/bd43018c1e6c04681987c00a8bfcad3d4cf9ffb8/pymemcache/test/__init__.py -------------------------------------------------------------------------------- /pymemcache/test/certs/ca-root.crt: -------------------------------------------------------------------------------- 1 | -----BEGIN CERTIFICATE----- 2 | MIIGozCCBIugAwIBAgIJAM58RO9sXvoHMA0GCSqGSIb3DQEBCwUAMIGNMQswCQYD 3 | VQQGEwJDWjEaMBgGA1UECAwRSmlob21vcmF2c2t5IGtyYWoxDTALBgNVBAcMBEJy 4 | bm8xGzAZBgNVBAoMElNjb3JpYSBDb3Jwb3JhdGlvbjE2MDQGA1UEAwwtU2Nvcmlh 5 | IENvcnBvcmF0aW9uIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5MB4XDTIwMDQw 6 | MzE0NDAzOVoXDTQwMDMyOTE0NDAzOVowgY0xCzAJBgNVBAYTAkNaMRowGAYDVQQI 7 | DBFKaWhvbW9yYXZza3kga3JhajENMAsGA1UEBwwEQnJubzEbMBkGA1UECgwSU2Nv 8 | cmlhIENvcnBvcmF0aW9uMTYwNAYDVQQDDC1TY29yaWEgQ29ycG9yYXRpb24gUm9v 9 | dCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAw 10 | ggIKAoICAQDJqBOt19LipwyEq8YYnWe8SOJcDSE6fc+3gSggOSisJvDcjDZfgER2 11 | eJmVdDutRbbeHoCTlA57buIy+3Dr1BkHbWpNrSlcBD3fgja6BhDZiH6Cuq3BvL5b 12 | y2Yin96lk5JXmjNT5SP6vBmIe68lt+2BwjHgrbI6s8vOJwOy6gGZ8rVKGR6lHtbY 13 | S7DznswyGoDuOlzHdf/9PNfbf1Jd72qn6qpAkf7GGvzqJaxqamhtB+V4QjSuv2Ts 14 | em61+/7aeIN+MIF7IkiyVm+FwoVz505oAoeP8obXLFi2VKifinOrTMMMIoDd9I2m 15 | FHraS5OhmlD4XaGNV9YhOYYu/gFgiHkQyjGBjtH+a4pZPwi9SyhsBHDRWx8HsWZV 16 | 6DWLjUyUhoM9yCUUYIPv+dA6zPhs5LKsmUfM5ASuhjTN/BBx+zpTUurX6Fmnz2Io 17 | ypfiYjGWMdrwUdMLa6pY/5RcCysJHkrVLZSQi6hiC3yPqg0TlPVYBIcGP3vbkEcU 18 | f7MBqdH6Tc8wdSAWSc+zgVD0ql5+TZ6MUXnL5wf2NYwuuzQDa1gT/VfjOZOjkv3H 19 | lPC8isg926R6XuywPL4CynrL/qn6DRwNVelp31aD95HBS6YAVhJg7S4odQHDar4P 20 | bA+qXqx0+syMyF9+c6liV2fmCHMKgRFFi6SfuwmpQ92gU53bFXPa1QIDAQABo4IB 21 | AjCB/zAdBgNVHQ4EFgQUhVz9eXfMmqIaA4m3NVpJpI1tz1AwgcIGA1UdIwSBujCB 22 | t4AUhVz9eXfMmqIaA4m3NVpJpI1tz1ChgZOkgZAwgY0xCzAJBgNVBAYTAkNaMRow 23 | GAYDVQQIDBFKaWhvbW9yYXZza3kga3JhajENMAsGA1UEBwwEQnJubzEbMBkGA1UE 24 | CgwSU2NvcmlhIENvcnBvcmF0aW9uMTYwNAYDVQQDDC1TY29yaWEgQ29ycG9yYXRp 25 | b24gUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHmCCQDOfETvbF76BzAMBgNVHRME 26 | BTADAQH/MAsGA1UdDwQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAHkqrlcn7pzr/ 27 | UOsWkwtJkZaUgnejrryMsS24Oj7sWmpH23ZG//97gLibAjIhngZm3AOS4K7TVxvW 28 | rkirvaRq5ZbehOnMqLhEBbAjumK2RjeM8SBzRqYBsvU7iELyN/IMgsHzeul/5/0R 29 | vsBr0vtI6acKOAkUfMbpxN7m/gOL2CvGUmDy1NXtHWQTeDf6wxWkNGBb4E66sK66 30 | auSP205xxKzlMCzRaf8nfDAx7oy4zQtjJKunMtglxjrpGDCEFMixT8wqIUbf46o+ 31 | +uK2AWqprBFL42+qGiu68gzMz1WS1iMmzbM0DUmAc3piDnBOz9YZa9iMegZekch5 32 | OL52DDd6tId/eWVFrj/IcHYoCg7KNHQteZ004zUInCpjAT/e78IZFxG8k0lZR1Lc 33 | 87s8QXfhqm/GMzDIFMdZACrH8R90ubocK06iMcTahvI5EilH6LcLut28GGrRH8Og 34 | C0YBAPaZ5cjhflc0grSjPK1dKqj/Vre3CQH/+lJ8qTOBPurXlxFL759bsi9Auath 35 | GZ4bWhFTnykKCXJyzFbFgJObN/r/KrU4LI8q5MrkCseX5UTZ+P345WU6ZykjQqhJ 36 | GPi/z+dXZDy8TQJD8gg07t/oyFlzlaqDkJNWOvU+Bf/zSUyY+WxvGKXb2l9Gd7/s 37 | e2XISxvCzZK32s1mBNWSfl/tX0iw340= 38 | -----END CERTIFICATE----- 39 | -------------------------------------------------------------------------------- /pymemcache/test/certs/client.crt: -------------------------------------------------------------------------------- 1 | -----BEGIN CERTIFICATE----- 2 | MIIEyzCCArOgAwIBAgIJAPPSvsWCQbfFMA0GCSqGSIb3DQEBCwUAMIGUMQswCQYD 3 | VQQGEwJDWjEaMBgGA1UECAwRSmlob21vcmF2c2t5IGtyYWoxDTALBgNVBAcMBEJy 4 | bm8xGzAZBgNVBAoMElNjb3JpYSBDb3Jwb3JhdGlvbjE9MDsGA1UEAww0U2Nvcmlh 5 | IENvcnBvcmF0aW9uIENsaWVudCBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eTAe 6 | Fw0yMDA0MDMxNDQwNDBaFw0yMjA0MDMxNDQwNDBaMIGFMQswCQYDVQQGEwJDWjEa 7 | MBgGA1UECAwRSmlob21vcmF2c2t5IGtyYWoxDTALBgNVBAcMBEJybm8xGzAZBgNV 8 | BAoMElNjb3JpYSBDb3Jwb3JhdGlvbjEuMCwGA1UEAwwlU2NvcmlhIENvcnBvcmF0 9 | aW9uIENsaWVudCBDZXJ0aWZpY2F0ZTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCC 10 | AQoCggEBALAJe+CxlDH9ajw9q7rpYOaXBZ7Z2t2qmRFChR9rySQVFft2mTsyeF9W 11 | 0zVNiR7wg1W74VvrrcQsv8OkbgEVeWt7e9lKIoIFzrQ1dJGUAs+vF4IQOKmlanWt 12 | jjz42fuJVlwTn71rXHCxoyqd0jCaRd7BHtf/fl7Po9WEFRjUr5O1iZWHBIwIn7q+ 13 | edIwEUBs6qJN3vO42nqYmY7mQ/hG+vVzq7cL2WkN/EMGvj9SRVl0OMbmKnfxmUUi 14 | FoVnB6KiREHt4Kb/4y1plZzAmEMI2QDpPp/keLSmHw55U2waTEo+BKJ//G4dp7Rs 15 | K+CkdlOTIAEDM/AYvbM0/0rkPceovCMCAwEAAaMtMCswCQYDVR0TBAIwADARBglg 16 | hkgBhvhCAQEEBAMCB4AwCwYDVR0PBAQDAgXgMA0GCSqGSIb3DQEBCwUAA4ICAQBV 17 | M9wSpuC4zt5LhhXBHmxHuUVdIEIU+XXLTzMms3IC8r56rH4fFD6wfyVqvTlLVIyk 18 | UeX/FrZ9P1uOt1H1nDeNLlK8ihVdw+JSLplCfjX7SevD8tXdnokcl95p3RMMHjXU 19 | d46pY1StAU9fIm46WVsbtzfIPhejNlhn2L3DW3V2tkVXEKzdvaiFvmLWVlalxawY 20 | CoyDh4m9E5s6l/B9RoLCAajSGeXQxMCm2L9DwAyUJhFPQYLO4YJT1fM7cvl7Irms 21 | qjRAPq0rroebSP3bZDP0PXe7hwd01JcSnuLcQg6cOnsL9UOla8UpqJrMxG+rBD9o 22 | nnIOoFA/2pjNsa0xTarRXa7C75H0f4TWlEzhsEvlTqT1eTVu/XfUcv2r2mL+jSVW 23 | 7iSQ37tlR8hN9L8/iYjIMlsf++3pdK1rvP0Mk8042pL8eqB+OYUQe/88KaNxTBeN 24 | q1sqzkXtcJk7DqTBPXfHFJgzASpy7UR56sa/P7XmqTmBrpNDMP2XUkdNoAQjGae1 25 | qiRmTiHP9e7d3bfWjW+odjbCxxZz5v4vfYY8FB6w2FfgLknfmnYKTOVR5ewT0d3T 26 | 01mLiKVtNDlMNHSBsOWvv72sH8Y1viQ09AzzrsCEFmyCGvQXQ4bps0ObIAITS98f 27 | S1D9f+XM2TZJ/WxEB5VQP30iegfqEuKrwUTk8Lh6+g== 28 | -----END CERTIFICATE----- 29 | -------------------------------------------------------------------------------- /pymemcache/test/certs/client.key: -------------------------------------------------------------------------------- 1 | -----BEGIN RSA PRIVATE KEY----- 2 | MIIEowIBAAKCAQEAsAl74LGUMf1qPD2ruulg5pcFntna3aqZEUKFH2vJJBUV+3aZ 3 | OzJ4X1bTNU2JHvCDVbvhW+utxCy/w6RuARV5a3t72UoiggXOtDV0kZQCz68XghA4 4 | qaVqda2OPPjZ+4lWXBOfvWtccLGjKp3SMJpF3sEe1/9+Xs+j1YQVGNSvk7WJlYcE 5 | jAifur550jARQGzqok3e87jaepiZjuZD+Eb69XOrtwvZaQ38Qwa+P1JFWXQ4xuYq 6 | d/GZRSIWhWcHoqJEQe3gpv/jLWmVnMCYQwjZAOk+n+R4tKYfDnlTbBpMSj4Eon/8 7 | bh2ntGwr4KR2U5MgAQMz8Bi9szT/SuQ9x6i8IwIDAQABAoIBAHiziATgvcQpBhaY 8 | Eo/uRUrWcjwhFDi5KIr1GWIZ/aiH7LKm9xnn2TFFzzvVFhfowaSfVj44ssS4CiST 9 | Mfn8R2yzFpA+jLqqULivjmXjHqpYW74KcU+g5AYcIlMcLhqSaGxp6DVwz8lVg5NM 10 | 8znwDchWkld4D6XiqWtVTUHhUyHrS74RR5KNEDSTJO+hwwWrviz9nzn5XO4vBa2C 11 | w+SxFbQ3b4A/BCAIxEawYmBunizns29PFEgTqbmu+obRnjCHGzDH88Ob6R1uXn5f 12 | 4ofVOIGYpJi1X+0I9Io2fS9oOoaRU82gz26YLxKuE1XbZXrSchUGnfWpsVF0+yqi 13 | TSy6cAECgYEA47OBhS1sDDg/TPwT26SVokGLhK30UxcOpxIaW9Dv+JnCGyfSffRD 14 | BYBj2aiFLTZghJlqsumHjgRuZ4ZWW5tasioSbZ4IidIjtCkRTCv/M+eNVfaEjbZJ 15 | Bg7uP3WnzcztYqdIbqgmyAq6ExqPr6WsICXka3SlEordOn1wuNT4NyMCgYEAxeo9 16 | +sRyihydkNBrrcAJB5xCfPVG+THLAfUdTCZ9vC/GU31SN4CRsivvi6pwT0OKBFnz 17 | OFjojW7Gb9c1SVgljMLubbpZfiDwT/JNzh6meEJTQnvsm3MrdNx6Zo7p2LDuOIZJ 18 | 2LQZzFKGckMxvk2xJXWHCzoBvAxecSxDe79INwECgYANE944e+dcvE5GaaPqVYWS 19 | kBknQaZqr0RULCH/a/ycVphjXuIkAcdnpXwWoCsl8Z2RgA40wFzctzxwDbMgB8gp 20 | u2jbitwKrlsGmeU4br51iLMBYOs0CGghRPJCCsvccgygQeNTF61Ch/sv5bKi7+z2 21 | 27ZGxahFbFxQY6v5saGf6QKBgACYTKllT8bUgTC/P6OdESnhsV14y0bSfH68AuOI 22 | thYLurfjh4y9KTL06Nptn7rNRCvxLUb9FW3faF9LsVBQIITEzTytM7mqVa6X1t4I 23 | v41a/a8UekiZVwcZ5pBKW6+YEI9A8BXjrLQth1Pumcatqxumt8oz2W98RghnDqjf 24 | kVMBAoGBALbsVnmLnLiP2KnaYvYQyos8v7z43vdU1tknz04OxrMzPkBL7K0Mvk/0 25 | yqD5jsR0cM/Fzc2RE7QBaSOkaShltIWIXlseO+kqPJ4XlLXmse3nmW8YG1ryokcG 26 | LByhR57Kr6jHFGVcLqxrj2Bcgt6+oiCeREIjPgQMUH90W0wPM7XT 27 | -----END RSA PRIVATE KEY----- 28 | -------------------------------------------------------------------------------- /pymemcache/test/certs/update.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # extracting client credentials 4 | docker run --rm scoriacorp/tls_memcached cat /opt/certs/key/client.key > client.key 5 | docker run --rm scoriacorp/tls_memcached cat /opt/certs/crt/client.crt > client.crt 6 | 7 | # extracting CA certificate 8 | docker run --rm scoriacorp/tls_memcached cat /opt/certs/crt/ca-root.crt > ca-root.crt 9 | -------------------------------------------------------------------------------- /pymemcache/test/conftest.py: -------------------------------------------------------------------------------- 1 | import os.path 2 | import socket 3 | import ssl 4 | 5 | import pytest 6 | 7 | 8 | def pytest_addoption(parser): 9 | parser.addoption( 10 | "--server", action="store", default="localhost", help="memcached server" 11 | ) 12 | 13 | parser.addoption( 14 | "--port", action="store", default="11211", help="memcached server port" 15 | ) 16 | 17 | parser.addoption( 18 | "--tls-server", action="store", default="localhost", help="TLS memcached server" 19 | ) 20 | 21 | parser.addoption( 22 | "--tls-port", action="store", default="11212", help="TLS memcached server port" 23 | ) 24 | 25 | parser.addoption( 26 | "--size", action="store", default=1024, help="size of data in benchmarks" 27 | ) 28 | 29 | parser.addoption( 30 | "--count", 31 | action="store", 32 | default=10000, 33 | help="number of iterations to run each benchmark", 34 | ) 35 | 36 | parser.addoption( 37 | "--keys", 38 | action="store", 39 | default=20, 40 | help="number of keys to use for multi benchmarks", 41 | ) 42 | 43 | 44 | @pytest.fixture(scope="session") 45 | def host(request): 46 | return request.config.option.server 47 | 48 | 49 | @pytest.fixture(scope="session") 50 | def port(request): 51 | return int(request.config.option.port) 52 | 53 | 54 | @pytest.fixture(scope="session") 55 | def tls_host(request): 56 | return request.config.option.tls_server 57 | 58 | 59 | @pytest.fixture(scope="session") 60 | def tls_port(request): 61 | return int(request.config.option.tls_port) 62 | 63 | 64 | @pytest.fixture(scope="session") 65 | def size(request): 66 | return int(request.config.option.size) 67 | 68 | 69 | @pytest.fixture(scope="session") 70 | def count(request): 71 | return int(request.config.option.count) 72 | 73 | 74 | @pytest.fixture(scope="session") 75 | def keys(request): 76 | return int(request.config.option.keys) 77 | 78 | 79 | @pytest.fixture(scope="session") 80 | def pairs(size, keys): 81 | return {"pymemcache_test:%d" % i: "X" * size for i in range(keys)} 82 | 83 | 84 | @pytest.fixture(scope="session") 85 | def tls_context(): 86 | return ssl.create_default_context( 87 | cafile=os.path.join(os.path.dirname(__file__), "certs/ca-root.crt") 88 | ) 89 | 90 | 91 | def pytest_generate_tests(metafunc): 92 | if "socket_module" in metafunc.fixturenames: 93 | socket_modules = [socket] 94 | try: 95 | from gevent import socket as gevent_socket # type: ignore 96 | except ImportError: 97 | print("Skipping gevent (not installed)") 98 | else: 99 | socket_modules.append(gevent_socket) 100 | 101 | metafunc.parametrize("socket_module", socket_modules) 102 | 103 | if "client_class" in metafunc.fixturenames: 104 | from pymemcache.client.base import Client, PooledClient 105 | from pymemcache.client.hash import HashClient 106 | 107 | class HashClientSingle(HashClient): 108 | def __init__(self, server, *args, **kwargs): 109 | super().__init__([server], *args, **kwargs) 110 | 111 | metafunc.parametrize("client_class", [Client, PooledClient, HashClientSingle]) 112 | 113 | if "key_prefix" in metafunc.fixturenames: 114 | mark = metafunc.definition.get_closest_marker("parametrize") 115 | if not mark or "key_prefix" not in mark.args[0]: 116 | metafunc.parametrize("key_prefix", [b"", b"prefix"]) 117 | -------------------------------------------------------------------------------- /pymemcache/test/test_benchmark.py: -------------------------------------------------------------------------------- 1 | # Copyright 2012 Pinterest.com 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | import time 16 | import pytest 17 | 18 | try: 19 | import pylibmc # type: ignore 20 | 21 | HAS_PYLIBMC = True 22 | except Exception: 23 | HAS_PYLIBMC = False 24 | 25 | try: 26 | import memcache # type: ignore 27 | 28 | HAS_MEMCACHE = True 29 | except Exception: 30 | HAS_MEMCACHE = False 31 | 32 | 33 | try: 34 | import pymemcache.client 35 | 36 | HAS_PYMEMCACHE = True 37 | except Exception: 38 | HAS_PYMEMCACHE = False 39 | 40 | 41 | @pytest.fixture( 42 | params=[ 43 | "pylibmc", 44 | "memcache", 45 | "pymemcache", 46 | ] 47 | ) 48 | def client(request, host, port): 49 | if request.param == "pylibmc": 50 | if not HAS_PYLIBMC: 51 | pytest.skip("requires pylibmc") 52 | client = pylibmc.Client([f"{host}:{port}"]) 53 | client.behaviors = {"tcp_nodelay": True} 54 | 55 | elif request.param == "memcache": 56 | if not HAS_MEMCACHE: 57 | pytest.skip("requires python-memcached") 58 | client = memcache.Client([f"{host}:{port}"]) 59 | 60 | elif request.param == "pymemcache": 61 | if not HAS_PYMEMCACHE: 62 | pytest.skip("requires pymemcache") 63 | client = pymemcache.client.Client((host, port)) 64 | 65 | else: 66 | pytest.skip(f"unknown library {request.param}") 67 | 68 | client.flush_all() 69 | return client 70 | 71 | 72 | def benchmark(count, func, *args, **kwargs): 73 | start = time.time() 74 | 75 | for _ in range(count): 76 | result = func(*args, **kwargs) 77 | 78 | duration = time.time() - start 79 | print(str(duration)) 80 | 81 | return result 82 | 83 | 84 | @pytest.mark.benchmark() 85 | def test_bench_get(request, client, pairs, count): 86 | key = "pymemcache_test:0" 87 | value = pairs[key] 88 | client.set(key, value) 89 | benchmark(count, client.get, key) 90 | 91 | 92 | @pytest.mark.benchmark() 93 | def test_bench_set(request, client, pairs, count): 94 | key = "pymemcache_test:0" 95 | value = pairs[key] 96 | benchmark(count, client.set, key, value) 97 | 98 | 99 | @pytest.mark.benchmark() 100 | def test_bench_get_multi(request, client, pairs, count): 101 | client.set_multi(pairs) 102 | benchmark(count, client.get_multi, list(pairs)) 103 | 104 | 105 | @pytest.mark.benchmark() 106 | def test_bench_set_multi(request, client, pairs, count): 107 | benchmark(count, client.set_multi, pairs) 108 | 109 | 110 | @pytest.mark.benchmark() 111 | def test_bench_delete(request, client, pairs, count): 112 | benchmark(count, client.delete, next(pairs)) 113 | 114 | 115 | @pytest.mark.benchmark() 116 | def test_bench_delete_multi(request, client, pairs, count): 117 | # deleting missing key takes the same work client-side as real keys 118 | benchmark(count, client.delete_multi, list(pairs.keys())) 119 | -------------------------------------------------------------------------------- /pymemcache/test/test_client_retry.py: -------------------------------------------------------------------------------- 1 | """ Test collection for the RetryingClient. """ 2 | 3 | import functools 4 | import unittest 5 | from unittest import mock 6 | 7 | import pytest 8 | 9 | from .test_client import ClientTestMixin, MockSocket 10 | from pymemcache.client.retrying import RetryingClient 11 | from pymemcache.client.base import Client 12 | from pymemcache.exceptions import MemcacheUnknownError, MemcacheClientError 13 | 14 | 15 | # Test pure passthroughs with no retry action. 16 | class TestRetryingClientPassthrough(ClientTestMixin, unittest.TestCase): 17 | def make_base_client(self, mock_socket_values, **kwargs): 18 | base_client = Client("localhost", **kwargs) 19 | # mock out client._connect() rather than hard-setting client.sock to 20 | # ensure methods are checking whether self.sock is None before 21 | # attempting to use it 22 | sock = MockSocket(list(mock_socket_values)) 23 | base_client._connect = mock.Mock( 24 | side_effect=functools.partial(setattr, base_client, "sock", sock) 25 | ) 26 | return base_client 27 | 28 | def make_client(self, mock_socket_values, **kwargs): 29 | # Create a base client to wrap. 30 | base_client = self.make_base_client( 31 | mock_socket_values=mock_socket_values, **kwargs 32 | ) 33 | 34 | # Wrap the client in the retrying class, disable retries. 35 | client = RetryingClient(base_client, attempts=1) 36 | return client 37 | 38 | 39 | # Retry specific tests. 40 | @pytest.mark.unit() 41 | class TestRetryingClient: 42 | def make_base_client(self, mock_socket_values, **kwargs): 43 | """Creates a regular mock client to wrap in the RetryClient.""" 44 | base_client = Client("localhost", **kwargs) 45 | # mock out client._connect() rather than hard-setting client.sock to 46 | # ensure methods are checking whether self.sock is None before 47 | # attempting to use it 48 | sock = MockSocket(list(mock_socket_values)) 49 | base_client._connect = mock.Mock( 50 | side_effect=functools.partial(setattr, base_client, "sock", sock) 51 | ) 52 | return base_client 53 | 54 | def make_client(self, mock_socket_values, **kwargs): 55 | """ 56 | Creates a RetryingClient that will respond with the given values, 57 | configured using kwargs. 58 | """ 59 | # Create a base client to wrap. 60 | base_client = self.make_base_client(mock_socket_values=mock_socket_values) 61 | 62 | # Wrap the client in the retrying class, and pass kwargs on. 63 | client = RetryingClient(base_client, **kwargs) 64 | return client 65 | 66 | # Start testing. 67 | def test_constructor_default(self): 68 | base_client = self.make_base_client([]) 69 | RetryingClient(base_client) 70 | 71 | with pytest.raises(TypeError): 72 | RetryingClient() 73 | 74 | def test_constructor_attempts(self): 75 | base_client = self.make_base_client([]) 76 | rc = RetryingClient(base_client, attempts=1) 77 | assert rc._attempts == 1 78 | 79 | with pytest.raises(ValueError): 80 | RetryingClient(base_client, attempts=0) 81 | 82 | def test_constructor_retry_for(self): 83 | base_client = self.make_base_client([]) 84 | 85 | # Try none/default. 86 | rc = RetryingClient(base_client, retry_for=None) 87 | assert rc._retry_for == tuple() 88 | 89 | # Try with tuple. 90 | rc = RetryingClient(base_client, retry_for=tuple([Exception])) 91 | assert rc._retry_for == tuple([Exception]) 92 | 93 | # Try with list. 94 | rc = RetryingClient(base_client, retry_for=[Exception]) 95 | assert rc._retry_for == tuple([Exception]) 96 | 97 | # Try with multi element list. 98 | rc = RetryingClient(base_client, retry_for=[Exception, IOError]) 99 | assert rc._retry_for == (Exception, IOError) 100 | 101 | # With string? 102 | with pytest.raises(ValueError): 103 | RetryingClient(base_client, retry_for="haha!") 104 | 105 | # With collection of string and exceptions? 106 | with pytest.raises(ValueError): 107 | RetryingClient(base_client, retry_for=[Exception, str]) 108 | 109 | def test_constructor_do_no_retry_for(self): 110 | base_client = self.make_base_client([]) 111 | 112 | # Try none/default. 113 | rc = RetryingClient(base_client, do_not_retry_for=None) 114 | assert rc._do_not_retry_for == tuple() 115 | 116 | # Try with tuple. 117 | rc = RetryingClient(base_client, do_not_retry_for=tuple([Exception])) 118 | assert rc._do_not_retry_for == tuple([Exception]) 119 | 120 | # Try with list. 121 | rc = RetryingClient(base_client, do_not_retry_for=[Exception]) 122 | assert rc._do_not_retry_for == tuple([Exception]) 123 | 124 | # Try with multi element list. 125 | rc = RetryingClient(base_client, do_not_retry_for=[Exception, IOError]) 126 | assert rc._do_not_retry_for == (Exception, IOError) 127 | 128 | # With string? 129 | with pytest.raises(ValueError): 130 | RetryingClient(base_client, do_not_retry_for="haha!") 131 | 132 | # With collection of string and exceptions? 133 | with pytest.raises(ValueError): 134 | RetryingClient(base_client, do_not_retry_for=[Exception, str]) 135 | 136 | def test_constructor_both_filters(self): 137 | base_client = self.make_base_client([]) 138 | 139 | # Try none/default. 140 | rc = RetryingClient(base_client, retry_for=None, do_not_retry_for=None) 141 | assert rc._retry_for == tuple() 142 | assert rc._do_not_retry_for == tuple() 143 | 144 | # Try a valid config. 145 | rc = RetryingClient( 146 | base_client, 147 | retry_for=[Exception, IOError], 148 | do_not_retry_for=[ValueError, MemcacheUnknownError], 149 | ) 150 | assert rc._retry_for == (Exception, IOError) 151 | assert rc._do_not_retry_for == (ValueError, MemcacheUnknownError) 152 | 153 | # Try with overlapping filters 154 | with pytest.raises(ValueError): 155 | rc = RetryingClient( 156 | base_client, 157 | retry_for=[Exception, IOError, MemcacheUnknownError], 158 | do_not_retry_for=[ValueError, MemcacheUnknownError], 159 | ) 160 | 161 | def test_dir_passthrough(self): 162 | base = self.make_base_client([]) 163 | client = RetryingClient(base) 164 | 165 | assert dir(base) == dir(client) 166 | 167 | def test_retry_dict_set_is_supported(self): 168 | client = self.make_client([b"UNKNOWN\r\n", b"STORED\r\n"]) 169 | client[b"key"] = b"value" 170 | 171 | def test_retry_dict_get_is_supported(self): 172 | client = self.make_client( 173 | [b"UNKNOWN\r\n", b"VALUE key 0 5\r\nvalue\r\nEND\r\n"] 174 | ) 175 | assert client[b"key"] == b"value" 176 | 177 | def test_retry_dict_get_not_found_is_supported(self): 178 | client = self.make_client([b"UNKNOWN\r\n", b"END\r\n"]) 179 | 180 | with pytest.raises(KeyError): 181 | client[b"key"] 182 | 183 | def test_retry_dict_del_is_supported(self): 184 | client = self.make_client([b"UNKNOWN\r\n", b"DELETED\r\n"]) 185 | del client[b"key"] 186 | 187 | def test_retry_get_found(self): 188 | client = self.make_client( 189 | [b"UNKNOWN\r\n", b"VALUE key 0 5\r\nvalue\r\nEND\r\n"], attempts=2 190 | ) 191 | result = client.get("key") 192 | assert result == b"value" 193 | 194 | def test_retry_get_not_found(self): 195 | client = self.make_client([b"UNKNOWN\r\n", b"END\r\n"], attempts=2) 196 | result = client.get("key") 197 | assert result is None 198 | 199 | def test_retry_get_exception(self): 200 | client = self.make_client([b"UNKNOWN\r\n", b"UNKNOWN\r\n"], attempts=2) 201 | with pytest.raises(MemcacheUnknownError): 202 | client.get("key") 203 | 204 | def test_retry_set_success(self): 205 | client = self.make_client([b"UNKNOWN\r\n", b"STORED\r\n"], attempts=2) 206 | result = client.set("key", "value", noreply=False) 207 | assert result is True 208 | 209 | def test_retry_set_fail(self): 210 | client = self.make_client( 211 | [b"UNKNOWN\r\n", b"UNKNOWN\r\n", b"STORED\r\n"], attempts=2 212 | ) 213 | with pytest.raises(MemcacheUnknownError): 214 | client.set("key", "value", noreply=False) 215 | 216 | def test_no_retry(self): 217 | client = self.make_client( 218 | [b"UNKNOWN\r\n", b"VALUE key 0 5\r\nvalue\r\nEND\r\n"], attempts=1 219 | ) 220 | 221 | with pytest.raises(MemcacheUnknownError): 222 | client.get("key") 223 | 224 | def test_retry_for_exception_success(self): 225 | # Test that we retry for the exception specified. 226 | client = self.make_client( 227 | [MemcacheClientError("Whoops."), b"VALUE key 0 5\r\nvalue\r\nEND\r\n"], 228 | attempts=2, 229 | retry_for=tuple([MemcacheClientError]), 230 | ) 231 | result = client.get("key") 232 | assert result == b"value" 233 | 234 | def test_retry_for_exception_fail(self): 235 | # Test that we do not retry for unapproved exception. 236 | client = self.make_client( 237 | [MemcacheUnknownError("Whoops."), b"VALUE key 0 5\r\nvalue\r\nEND\r\n"], 238 | attempts=2, 239 | retry_for=tuple([MemcacheClientError]), 240 | ) 241 | 242 | with pytest.raises(MemcacheUnknownError): 243 | client.get("key") 244 | 245 | def test_do_not_retry_for_exception_success(self): 246 | # Test that we retry for exceptions not specified. 247 | client = self.make_client( 248 | [MemcacheClientError("Whoops."), b"VALUE key 0 5\r\nvalue\r\nEND\r\n"], 249 | attempts=2, 250 | do_not_retry_for=tuple([MemcacheUnknownError]), 251 | ) 252 | result = client.get("key") 253 | assert result == b"value" 254 | 255 | def test_do_not_retry_for_exception_fail(self): 256 | # Test that we do not retry for the exception specified. 257 | client = self.make_client( 258 | [MemcacheClientError("Whoops."), b"VALUE key 0 5\r\nvalue\r\nEND\r\n"], 259 | attempts=2, 260 | do_not_retry_for=tuple([MemcacheClientError]), 261 | ) 262 | 263 | with pytest.raises(MemcacheClientError): 264 | client.get("key") 265 | 266 | def test_both_exception_filters(self): 267 | # Test interaction between both exception filters. 268 | client = self.make_client( 269 | [ 270 | MemcacheClientError("Whoops."), 271 | b"VALUE key 0 5\r\nvalue\r\nEND\r\n", 272 | MemcacheUnknownError("Whoops."), 273 | b"VALUE key 0 5\r\nvalue\r\nEND\r\n", 274 | ], 275 | attempts=2, 276 | retry_for=tuple([MemcacheClientError]), 277 | do_not_retry_for=tuple([MemcacheUnknownError]), 278 | ) 279 | 280 | # Check that we succeed where allowed. 281 | result = client.get("key") 282 | assert result == b"value" 283 | 284 | # Check that no retries are attempted for the banned exception. 285 | with pytest.raises(MemcacheUnknownError): 286 | client.get("key") 287 | -------------------------------------------------------------------------------- /pymemcache/test/test_compression.py: -------------------------------------------------------------------------------- 1 | from pymemcache.client.base import Client 2 | from pymemcache.serde import ( 3 | CompressedSerde, 4 | pickle_serde, 5 | ) 6 | 7 | from faker import Faker 8 | 9 | import pytest 10 | import random 11 | import string 12 | import time 13 | import zstd # type: ignore 14 | import zlib 15 | 16 | fake = Faker(["it_IT", "en_US", "ja_JP"]) 17 | 18 | 19 | def get_random_string(length): 20 | letters = string.ascii_letters 21 | chars = string.punctuation 22 | digits = string.digits 23 | total = letters + chars + digits 24 | result_str = "".join(random.choice(total) for i in range(length)) 25 | return result_str 26 | 27 | 28 | class CustomObject: 29 | """ 30 | Custom class for verifying serialization 31 | """ 32 | 33 | def __init__(self): 34 | self.number = random.randint(0, 100) 35 | self.string = fake.text() 36 | self.object = fake.profile() 37 | 38 | 39 | class CustomObjectValue: 40 | def __init__(self, value): 41 | self.value = value 42 | 43 | 44 | def benchmark(count, func, *args, **kwargs): 45 | start = time.time() 46 | 47 | for _ in range(count): 48 | result = func(*args, **kwargs) 49 | 50 | duration = time.time() - start 51 | print(str(duration)) 52 | 53 | return result 54 | 55 | 56 | @pytest.fixture(scope="session") 57 | def names(): 58 | names = [] 59 | for _ in range(15): 60 | names.append(fake.name()) 61 | 62 | return names 63 | 64 | 65 | @pytest.fixture(scope="session") 66 | def paragraphs(): 67 | paragraphs = [] 68 | for _ in range(15): 69 | paragraphs.append(fake.text()) 70 | 71 | return paragraphs 72 | 73 | 74 | @pytest.fixture(scope="session") 75 | def objects(): 76 | objects = [] 77 | for _ in range(15): 78 | objects.append(CustomObject()) 79 | 80 | return objects 81 | 82 | 83 | # Always run compression for the benchmarks 84 | min_compress_len = 1 85 | 86 | default_serde = CompressedSerde(min_compress_len=min_compress_len) 87 | 88 | zlib_serde = CompressedSerde( 89 | compress=lambda value: zlib.compress(value, 9), 90 | decompress=lambda value: zlib.decompress(value), 91 | min_compress_len=min_compress_len, 92 | ) 93 | 94 | zstd_serde = CompressedSerde( 95 | compress=lambda value: zstd.compress(value), 96 | decompress=lambda value: zstd.decompress(value), 97 | min_compress_len=min_compress_len, 98 | ) 99 | 100 | serializers = [ 101 | None, 102 | default_serde, 103 | zlib_serde, 104 | zstd_serde, 105 | ] 106 | ids = ["none", "zlib ", "zlib9", "zstd "] 107 | 108 | 109 | @pytest.mark.benchmark() 110 | @pytest.mark.parametrize("serde", serializers, ids=ids) 111 | def test_bench_compress_set_strings(count, host, port, serde, names): 112 | client = Client((host, port), serde=serde, encoding="utf-8") 113 | 114 | def test(): 115 | for index, name in enumerate(names): 116 | key = f"name_{index}" 117 | client.set(key, name) 118 | 119 | benchmark(count, test) 120 | 121 | 122 | @pytest.mark.benchmark() 123 | @pytest.mark.parametrize("serde", serializers, ids=ids) 124 | def test_bench_compress_get_strings(count, host, port, serde, names): 125 | client = Client((host, port), serde=serde, encoding="utf-8") 126 | for index, name in enumerate(names): 127 | key = f"name_{index}" 128 | client.set(key, name) 129 | 130 | def test(): 131 | for index, _ in enumerate(names): 132 | key = f"name_{index}" 133 | client.get(key) 134 | 135 | benchmark(count, test) 136 | 137 | 138 | @pytest.mark.benchmark() 139 | @pytest.mark.parametrize("serde", serializers, ids=ids) 140 | def test_bench_compress_set_large_strings(count, host, port, serde, paragraphs): 141 | client = Client((host, port), serde=serde, encoding="utf-8") 142 | 143 | def test(): 144 | for index, p in enumerate(paragraphs): 145 | key = f"paragraph_{index}" 146 | client.set(key, p) 147 | 148 | benchmark(count, test) 149 | 150 | 151 | @pytest.mark.benchmark() 152 | @pytest.mark.parametrize("serde", serializers, ids=ids) 153 | def test_bench_compress_get_large_strings(count, host, port, serde, paragraphs): 154 | client = Client((host, port), serde=serde, encoding="utf-8") 155 | for index, p in enumerate(paragraphs): 156 | key = f"paragraphs_{index}" 157 | client.set(key, p) 158 | 159 | def test(): 160 | for index, _ in enumerate(paragraphs): 161 | key = f"paragraphs_{index}" 162 | client.get(key) 163 | 164 | benchmark(count, test) 165 | 166 | 167 | @pytest.mark.benchmark() 168 | @pytest.mark.parametrize("serde", serializers, ids=ids) 169 | def test_bench_compress_set_objects(count, host, port, serde, objects): 170 | client = Client((host, port), serde=serde, encoding="utf-8") 171 | 172 | def test(): 173 | for index, o in enumerate(objects): 174 | key = f"objects_{index}" 175 | client.set(key, o) 176 | 177 | benchmark(count, test) 178 | 179 | 180 | @pytest.mark.benchmark() 181 | @pytest.mark.parametrize("serde", serializers, ids=ids) 182 | def test_bench_compress_get_objects(count, host, port, serde, objects): 183 | client = Client((host, port), serde=serde, encoding="utf-8") 184 | for index, o in enumerate(objects): 185 | key = f"objects_{index}" 186 | client.set(key, o) 187 | 188 | def test(): 189 | for index, _ in enumerate(objects): 190 | key = f"objects_{index}" 191 | client.get(key) 192 | 193 | benchmark(count, test) 194 | 195 | 196 | @pytest.mark.benchmark() 197 | def test_optimal_compression_length(): 198 | for length in range(5, 2000): 199 | input_data = get_random_string(length) 200 | start = len(input_data) 201 | 202 | for index, serializer in enumerate(serializers[1:]): 203 | name = ids[index + 1] 204 | value, _ = serializer.serialize("foo", input_data) 205 | end = len(value) 206 | print(f"serializer={name}\t start={start}\t end={end}") 207 | 208 | 209 | @pytest.mark.benchmark() 210 | def test_optimal_compression_length_objects(): 211 | for length in range(5, 2000): 212 | input_data = get_random_string(length) 213 | obj = CustomObjectValue(input_data) 214 | start = len(pickle_serde.serialize("foo", obj)[0]) 215 | 216 | for index, serializer in enumerate(serializers[1:]): 217 | name = ids[index + 1] 218 | value, _ = serializer.serialize("foo", obj) 219 | end = len(value) 220 | print(f"serializer={name}\t start={start}\t end={end}") 221 | -------------------------------------------------------------------------------- /pymemcache/test/test_ext_aws_ec_client.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from unittest.mock import MagicMock, patch 4 | 5 | from pymemcache import Client 6 | from pymemcache.client.ext.aws_ec_client import AWSElastiCacheHashClient 7 | 8 | from .test_client import MockSocketModule, MockMemcacheClient 9 | 10 | 11 | @pytest.mark.unit() 12 | @pytest.mark.parametrize( 13 | "connection_sting", 14 | ["cluster.abcxyz.cfg.use1.cache.amazonaws.com:11211", "1.1.1.1:11211"], 15 | ) 16 | def test_init_valid_node_endpoint(connection_sting, monkeypatch): 17 | with patch.object( 18 | AWSElastiCacheHashClient, "reconfigure_nodes", new=MagicMock() 19 | ) as mock: 20 | client = AWSElastiCacheHashClient( 21 | connection_sting, socket_module=MockSocketModule() 22 | ) 23 | 24 | assert client._cfg_node == connection_sting 25 | assert mock.called 26 | 27 | 28 | @pytest.mark.unit() 29 | @pytest.mark.parametrize( 30 | "connection_sting", 31 | [ 32 | "cluster.abcxyz.cfg.use1.cache.amazonaws.com:abc", 33 | "cluster.abcxyz.cfg.use1.cache.amazonaws.com", 34 | "cluster.abcxyz.cfg.use1.cache.amazonaws.com:123123", 35 | "1.1..1:11211", 36 | ], 37 | ) 38 | def test_init_invalid_node_endpoint(connection_sting, monkeypatch): 39 | with patch.object( 40 | AWSElastiCacheHashClient, "reconfigure_nodes", new=MagicMock() 41 | ) as mock: 42 | with pytest.raises(ValueError): 43 | AWSElastiCacheHashClient(connection_sting, socket_module=MockSocketModule()) 44 | 45 | 46 | @pytest.mark.parametrize( 47 | "server_configuration", 48 | [ 49 | (True, ["10.0.0.1:11211", "10.0.0.2:11211"]), 50 | ( 51 | False, 52 | [ 53 | "cluster.abcxyz.0001.use1.cache.amazonaws.com:11211", 54 | "cluster.abcxyz.0002.use1.cache.amazonaws.com:11211", 55 | ], 56 | ), 57 | ], 58 | ) 59 | @pytest.mark.unit() 60 | def test_get_cluster_config_command(server_configuration, monkeypatch): 61 | use_vpc, configuration_list = server_configuration 62 | 63 | raw_command = MagicMock( 64 | return_value=b"CONFIG cluster 0 139\r\n" 65 | b"4\n" 66 | b"cluster.abcxyz.0001.use1.cache.amazonaws.com|10.0.0.1|11211 " 67 | b"cluster.abcxyz.0002.use1.cache.amazonaws.com|10.0.0.2|11211" 68 | ) 69 | 70 | with monkeypatch.context() as ctx: 71 | ctx.setattr(Client, "raw_command", raw_command) 72 | ctx.setattr(AWSElastiCacheHashClient, "client_class", MockMemcacheClient) 73 | 74 | client = AWSElastiCacheHashClient( 75 | "cluster.abcxyz.cfg.use1.cache.amazonaws.com:11211", 76 | socket_module=MockSocketModule(), 77 | use_vpc=use_vpc, 78 | ) 79 | 80 | for name, client in client.clients.items(): 81 | assert isinstance(client, MockMemcacheClient) 82 | assert name in configuration_list 83 | 84 | assert raw_command.called 85 | -------------------------------------------------------------------------------- /pymemcache/test/test_integration.py: -------------------------------------------------------------------------------- 1 | # Copyright 2012 Pinterest.com 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | import json 16 | from collections import defaultdict 17 | 18 | import pytest 19 | from pymemcache.client.base import Client 20 | from pymemcache.exceptions import ( 21 | MemcacheClientError, 22 | MemcacheIllegalInputError, 23 | MemcacheServerError, 24 | ) 25 | from pymemcache.serde import PickleSerde, compressed_serde, pickle_serde 26 | 27 | 28 | def get_set_helper(client, key, value, key2, value2): 29 | result = client.get(key) 30 | assert result is None 31 | 32 | client.set(key, value, noreply=False) 33 | result = client.get(key) 34 | assert result == value 35 | 36 | client.set(key2, value2, noreply=True) 37 | result = client.get(key2) 38 | assert result == value2 39 | 40 | result = client.get_many([key, key2]) 41 | assert result == {key: value, key2: value2} 42 | 43 | result = client.get_many([]) 44 | assert result == {} 45 | 46 | 47 | @pytest.mark.integration() 48 | @pytest.mark.parametrize( 49 | "serde", 50 | [ 51 | pickle_serde, 52 | compressed_serde, 53 | ], 54 | ) 55 | def test_get_set(client_class, host, port, serde, socket_module, key_prefix): 56 | client = client_class( 57 | (host, port), serde=serde, socket_module=socket_module, key_prefix=key_prefix 58 | ) 59 | client.flush_all() 60 | 61 | key = b"key" 62 | value = b"value" 63 | key2 = b"key2" 64 | value2 = b"value2" 65 | get_set_helper(client, key, value, key2, value2) 66 | 67 | 68 | @pytest.mark.integration() 69 | @pytest.mark.parametrize( 70 | "serde", 71 | [ 72 | pickle_serde, 73 | compressed_serde, 74 | ], 75 | ) 76 | def test_get_set_unicode_key( 77 | client_class, host, port, serde, socket_module, key_prefix 78 | ): 79 | client = client_class( 80 | (host, port), 81 | serde=serde, 82 | socket_module=socket_module, 83 | allow_unicode_keys=True, 84 | key_prefix=key_prefix, 85 | ) 86 | client.flush_all() 87 | 88 | key = "こんにちは" 89 | value = b"hello" 90 | key2 = "my☃" 91 | value2 = b"value2" 92 | get_set_helper(client, key, value, key2, value2) 93 | 94 | 95 | @pytest.mark.integration() 96 | @pytest.mark.parametrize( 97 | "serde", 98 | [ 99 | pickle_serde, 100 | compressed_serde, 101 | ], 102 | ) 103 | def test_add_replace(client_class, host, port, serde, socket_module, key_prefix): 104 | client = client_class( 105 | (host, port), serde=serde, socket_module=socket_module, key_prefix=key_prefix 106 | ) 107 | client.flush_all() 108 | 109 | result = client.add(b"key", b"value", noreply=False) 110 | assert result is True 111 | result = client.get(b"key") 112 | assert result == b"value" 113 | 114 | result = client.add(b"key", b"value2", noreply=False) 115 | assert result is False 116 | result = client.get(b"key") 117 | assert result == b"value" 118 | 119 | result = client.replace(b"key1", b"value1", noreply=False) 120 | assert result is False 121 | result = client.get(b"key1") 122 | assert result is None 123 | 124 | result = client.replace(b"key", b"value2", noreply=False) 125 | assert result is True 126 | result = client.get(b"key") 127 | assert result == b"value2" 128 | 129 | 130 | @pytest.mark.integration() 131 | def test_append_prepend(client_class, host, port, socket_module, key_prefix): 132 | client = client_class( 133 | (host, port), socket_module=socket_module, key_prefix=key_prefix 134 | ) 135 | client.flush_all() 136 | 137 | result = client.append(b"key", b"value", noreply=False) 138 | assert result is False 139 | result = client.get(b"key") 140 | assert result is None 141 | 142 | result = client.set(b"key", b"value", noreply=False) 143 | assert result is True 144 | result = client.append(b"key", b"after", noreply=False) 145 | assert result is True 146 | result = client.get(b"key") 147 | assert result == b"valueafter" 148 | 149 | result = client.prepend(b"key1", b"value", noreply=False) 150 | assert result is False 151 | result = client.get(b"key1") 152 | assert result is None 153 | 154 | result = client.prepend(b"key", b"before", noreply=False) 155 | assert result is True 156 | result = client.get(b"key") 157 | assert result == b"beforevalueafter" 158 | 159 | 160 | @pytest.mark.integration() 161 | def test_cas(client_class, host, port, socket_module, key_prefix): 162 | client = client_class( 163 | (host, port), socket_module=socket_module, key_prefix=key_prefix 164 | ) 165 | client.flush_all() 166 | result = client.cas(b"key", b"value", b"1", noreply=False) 167 | assert result is None 168 | 169 | result = client.set(b"key", b"value", noreply=False) 170 | assert result is True 171 | 172 | # binary, string, and raw int all match -- should all be encoded as b'1' 173 | result = client.cas(b"key", b"value", b"1", noreply=False) 174 | assert result is False 175 | result = client.cas(b"key", b"value", "1", noreply=False) 176 | assert result is False 177 | result = client.cas(b"key", b"value", 1, noreply=False) 178 | assert result is False 179 | 180 | result, cas = client.gets(b"key") 181 | assert result == b"value" 182 | 183 | result = client.cas(b"key", b"value1", cas, noreply=False) 184 | assert result is True 185 | 186 | result = client.cas(b"key", b"value2", cas, noreply=False) 187 | assert result is False 188 | 189 | 190 | @pytest.mark.integration() 191 | def test_gets(client_class, host, port, socket_module, key_prefix): 192 | client = client_class( 193 | (host, port), socket_module=socket_module, key_prefix=key_prefix 194 | ) 195 | client.flush_all() 196 | 197 | result = client.gets(b"key") 198 | assert result == (None, None) 199 | 200 | result = client.set(b"key", b"value", noreply=False) 201 | assert result is True 202 | result = client.gets(b"key") 203 | assert result[0] == b"value" 204 | 205 | 206 | @pytest.mark.integration() 207 | def test_delete(client_class, host, port, socket_module, key_prefix): 208 | client = client_class( 209 | (host, port), socket_module=socket_module, key_prefix=key_prefix 210 | ) 211 | client.flush_all() 212 | 213 | result = client.delete(b"key", noreply=False) 214 | assert result is False 215 | 216 | result = client.get(b"key") 217 | assert result is None 218 | result = client.set(b"key", b"value", noreply=False) 219 | assert result is True 220 | result = client.delete(b"key", noreply=False) 221 | assert result is True 222 | result = client.get(b"key") 223 | assert result is None 224 | 225 | 226 | @pytest.mark.integration() 227 | def test_incr_decr(client_class, host, port, socket_module, key_prefix): 228 | client = Client((host, port), socket_module=socket_module, key_prefix=key_prefix) 229 | client.flush_all() 230 | 231 | result = client.incr(b"key", 1, noreply=False) 232 | assert result is None 233 | 234 | result = client.set(b"key", b"0", noreply=False) 235 | assert result is True 236 | result = client.incr(b"key", 1, noreply=False) 237 | assert result == 1 238 | 239 | def _bad_int(): 240 | client.incr(b"key", b"foobar") 241 | 242 | with pytest.raises(MemcacheClientError): 243 | _bad_int() 244 | 245 | result = client.decr(b"key1", 1, noreply=False) 246 | assert result is None 247 | 248 | result = client.decr(b"key", 1, noreply=False) 249 | assert result == 0 250 | result = client.get(b"key") 251 | assert result == b"0" 252 | 253 | 254 | @pytest.mark.integration() 255 | def test_touch(client_class, host, port, socket_module, key_prefix): 256 | client = client_class( 257 | (host, port), socket_module=socket_module, key_prefix=key_prefix 258 | ) 259 | client.flush_all() 260 | 261 | result = client.touch(b"key", noreply=False) 262 | assert result is False 263 | 264 | result = client.set(b"key", b"0", 1, noreply=False) 265 | assert result is True 266 | 267 | result = client.touch(b"key", noreply=False) 268 | assert result is True 269 | 270 | result = client.touch(b"key", 1, noreply=False) 271 | assert result is True 272 | 273 | 274 | @pytest.mark.integration() 275 | def test_gat_gats(client_class, host, port, socket_module, key_prefix): 276 | client = client_class( 277 | (host, port), socket_module=socket_module, key_prefix=key_prefix 278 | ) 279 | client.flush_all() 280 | 281 | direct_client = ( 282 | client if hasattr(client, "raw_command") else list(client.clients.values())[0] 283 | ) 284 | 285 | result = client.set(b"key", b"0", 10, noreply=False) 286 | assert result is True 287 | 288 | ttl1 = direct_client.raw_command(b"mg " + key_prefix + b"key t").replace( 289 | b"HD t", b"" 290 | ) 291 | 292 | result = client.gat(b"key", expire=1000) 293 | assert result == b"0" 294 | 295 | result, cas = client.gats(b"key", expire=1000) 296 | assert result == b"0" 297 | 298 | ttl2 = direct_client.raw_command(b"mg " + key_prefix + b"key t").replace( 299 | b"HD t", b"" 300 | ) 301 | 302 | assert int(ttl1) < 950 < int(ttl2) <= 1000 303 | 304 | 305 | @pytest.mark.integration() 306 | def test_misc(client_class, host, port, socket_module, key_prefix): 307 | client = Client((host, port), socket_module=socket_module, key_prefix=key_prefix) 308 | client.flush_all() 309 | 310 | # Ensure no exceptions are thrown 311 | client.stats("cachedump", "1", "1") 312 | 313 | success = client.cache_memlimit(50) 314 | assert success 315 | 316 | 317 | @pytest.mark.integration() 318 | def test_serialization_deserialization(host, port, socket_module): 319 | class JsonSerde: 320 | def serialize(self, key, value): 321 | return json.dumps(value).encode("ascii"), 1 322 | 323 | def deserialize(self, key, value, flags): 324 | if flags == 1: 325 | return json.loads(value.decode("ascii")) 326 | return value 327 | 328 | client = Client((host, port), serde=JsonSerde(), socket_module=socket_module) 329 | client.flush_all() 330 | 331 | value = {"a": "b", "c": ["d"]} 332 | client.set(b"key", value) 333 | result = client.get(b"key") 334 | assert result == value 335 | 336 | 337 | def serde_serialization_helper(client_class, host, port, socket_module, serde): 338 | def check(value): 339 | client.set(b"key", value, noreply=False) 340 | result = client.get(b"key") 341 | assert result == value 342 | assert type(result) is type(value) 343 | 344 | client = client_class((host, port), serde=serde, socket_module=socket_module) 345 | client.flush_all() 346 | 347 | check(b"byte string") 348 | check("unicode string") 349 | check("olé") 350 | check("olé") 351 | check(1) 352 | check(123123123123123123123) 353 | check({"a": "pickle"}) 354 | check(["one pickle", "two pickle"]) 355 | testdict = defaultdict(int) 356 | testdict["one pickle"] 357 | testdict[b"two pickle"] 358 | check(testdict) 359 | 360 | 361 | @pytest.mark.integration() 362 | @pytest.mark.parametrize( 363 | "serde", 364 | [ 365 | pickle_serde, 366 | compressed_serde, 367 | ], 368 | ) 369 | def test_serde_serialization(client_class, host, port, socket_module, serde): 370 | serde_serialization_helper(client_class, host, port, socket_module, serde) 371 | 372 | 373 | @pytest.mark.integration() 374 | def test_serde_serialization0(client_class, host, port, socket_module): 375 | serde_serialization_helper( 376 | client_class, host, port, socket_module, PickleSerde(pickle_version=0) 377 | ) 378 | 379 | 380 | @pytest.mark.integration() 381 | def test_serde_serialization2(client_class, host, port, socket_module): 382 | serde_serialization_helper( 383 | client_class, host, port, socket_module, PickleSerde(pickle_version=2) 384 | ) 385 | 386 | 387 | @pytest.mark.integration() 388 | def test_errors(client_class, host, port, socket_module): 389 | client = client_class((host, port), socket_module=socket_module) 390 | client.flush_all() 391 | 392 | def _key_with_ws(): 393 | client.set(b"key with spaces", b"value", noreply=False) 394 | 395 | with pytest.raises(MemcacheIllegalInputError): 396 | _key_with_ws() 397 | 398 | def _key_with_illegal_carriage_return(): 399 | client.set(b"\r\nflush_all", b"value", noreply=False) 400 | 401 | with pytest.raises(MemcacheIllegalInputError): 402 | _key_with_illegal_carriage_return() 403 | 404 | def _key_too_long(): 405 | client.set(b"x" * 1024, b"value", noreply=False) 406 | 407 | with pytest.raises(MemcacheClientError): 408 | _key_too_long() 409 | 410 | def _unicode_key_in_set(): 411 | client.set("\u0FFF", b"value", noreply=False) 412 | 413 | with pytest.raises(MemcacheClientError): 414 | _unicode_key_in_set() 415 | 416 | def _unicode_key_in_get(): 417 | client.get("\u0FFF") 418 | 419 | with pytest.raises(MemcacheClientError): 420 | _unicode_key_in_get() 421 | 422 | def _unicode_value_in_set(): 423 | client.set(b"key", "\u0FFF", noreply=False) 424 | 425 | with pytest.raises(MemcacheClientError): 426 | _unicode_value_in_set() 427 | 428 | 429 | @pytest.mark.skip("https://github.com/pinterest/pymemcache/issues/39") 430 | @pytest.mark.integration() 431 | def test_tls(client_class, tls_host, tls_port, socket_module, tls_context): 432 | client = client_class( 433 | (tls_host, tls_port), socket_module=socket_module, tls_context=tls_context 434 | ) 435 | client.flush_all() 436 | 437 | key = b"key" 438 | value = b"value" 439 | key2 = b"key2" 440 | value2 = b"value2" 441 | get_set_helper(client, key, value, key2, value2) 442 | 443 | 444 | @pytest.mark.integration() 445 | @pytest.mark.parametrize( 446 | "serde,should_fail", 447 | [ 448 | (pickle_serde, True), 449 | (compressed_serde, False), 450 | ], 451 | ) 452 | def test_get_set_large( 453 | client_class, 454 | host, 455 | port, 456 | serde, 457 | socket_module, 458 | should_fail, 459 | ): 460 | client = client_class((host, port), serde=serde, socket_module=socket_module) 461 | client.flush_all() 462 | 463 | key = b"key" 464 | value = b"value" * 1024 * 1024 465 | key2 = b"key2" 466 | value2 = b"value2" * 1024 * 1024 467 | 468 | if should_fail: 469 | with pytest.raises(MemcacheServerError): 470 | get_set_helper(client, key, value, key2, value2) 471 | else: 472 | get_set_helper(client, key, value, key2, value2) 473 | -------------------------------------------------------------------------------- /pymemcache/test/test_rendezvous.py: -------------------------------------------------------------------------------- 1 | from pymemcache.client.rendezvous import RendezvousHash 2 | import pytest 3 | 4 | 5 | @pytest.mark.unit() 6 | def test_init_no_options(): 7 | rendezvous = RendezvousHash() 8 | assert 0 == len(rendezvous.nodes) 9 | assert 1361238019 == rendezvous.hash_function("6666") 10 | 11 | 12 | @pytest.mark.unit() 13 | def test_init(): 14 | nodes = ["0", "1", "2"] 15 | rendezvous = RendezvousHash(nodes=nodes) 16 | assert 3 == len(rendezvous.nodes) 17 | assert 1361238019 == rendezvous.hash_function("6666") 18 | 19 | 20 | @pytest.mark.unit() 21 | def test_seed(): 22 | rendezvous = RendezvousHash(seed=10) 23 | assert 2981722772 == rendezvous.hash_function("6666") 24 | 25 | 26 | @pytest.mark.unit() 27 | def test_add_node(): 28 | rendezvous = RendezvousHash() 29 | rendezvous.add_node("1") 30 | 31 | assert 1 == len(rendezvous.nodes) 32 | rendezvous.add_node("1") 33 | 34 | assert 1 == len(rendezvous.nodes) 35 | rendezvous.add_node("2") 36 | 37 | assert 2 == len(rendezvous.nodes) 38 | rendezvous.add_node("1") 39 | 40 | assert 2 == len(rendezvous.nodes) 41 | 42 | 43 | @pytest.mark.unit() 44 | def test_remove_node(): 45 | nodes = ["0", "1", "2"] 46 | rendezvous = RendezvousHash(nodes=nodes) 47 | rendezvous.remove_node("2") 48 | 49 | assert 2 == len(rendezvous.nodes) 50 | 51 | with pytest.raises(ValueError): 52 | rendezvous.remove_node("2") 53 | 54 | assert 2 == len(rendezvous.nodes) 55 | 56 | rendezvous.remove_node("1") 57 | assert 1 == len(rendezvous.nodes) 58 | 59 | rendezvous.remove_node("0") 60 | assert 0 == len(rendezvous.nodes) 61 | 62 | 63 | @pytest.mark.unit() 64 | def test_get_node(): 65 | nodes = ["0", "1", "2"] 66 | rendezvous = RendezvousHash(nodes=nodes) 67 | assert "0" == rendezvous.get_node("ok") 68 | assert "1" == rendezvous.get_node("mykey") 69 | assert "2" == rendezvous.get_node("wat") 70 | 71 | 72 | @pytest.mark.unit() 73 | def test_get_node_after_removal(): 74 | nodes = ["0", "1", "2"] 75 | rendezvous = RendezvousHash(nodes=nodes) 76 | rendezvous.remove_node("1") 77 | 78 | assert "0" == rendezvous.get_node("ok") 79 | assert "0" == rendezvous.get_node("mykey") 80 | assert "2" == rendezvous.get_node("wat") 81 | 82 | 83 | @pytest.mark.unit() 84 | def test_get_node_after_addition(): 85 | nodes = ["0", "1", "2"] 86 | rendezvous = RendezvousHash(nodes=nodes) 87 | assert "0" == rendezvous.get_node("ok") 88 | assert "1" == rendezvous.get_node("mykey") 89 | assert "2" == rendezvous.get_node("wat") 90 | assert "2" == rendezvous.get_node("lol") 91 | rendezvous.add_node("3") 92 | 93 | assert "0" == rendezvous.get_node("ok") 94 | assert "1" == rendezvous.get_node("mykey") 95 | assert "2" == rendezvous.get_node("wat") 96 | assert "3" == rendezvous.get_node("lol") 97 | 98 | 99 | @pytest.mark.unit() 100 | def test_grow(): 101 | rendezvous = RendezvousHash() 102 | 103 | placements = {} 104 | 105 | for i in range(10): 106 | rendezvous.add_node(str(i)) 107 | placements[str(i)] = [] 108 | 109 | for i in range(1000): 110 | node = rendezvous.get_node(str(i)) 111 | placements[node].append(i) 112 | 113 | new_placements = {} 114 | 115 | for i in range(20): 116 | rendezvous.add_node(str(i)) 117 | new_placements[str(i)] = [] 118 | 119 | for i in range(1000): 120 | node = rendezvous.get_node(str(i)) 121 | new_placements[node].append(i) 122 | 123 | keys = [k for sublist in placements.values() for k in sublist] 124 | new_keys = [k for sublist in new_placements.values() for k in sublist] 125 | assert sorted(keys) == sorted(new_keys) 126 | 127 | added = 0 128 | removed = 0 129 | 130 | for node, assignments in new_placements.items(): 131 | after = set(assignments) 132 | before = set(placements.get(node, [])) 133 | removed += len(before.difference(after)) 134 | added += len(after.difference(before)) 135 | 136 | assert added == removed 137 | assert 1062 == (added + removed) 138 | 139 | 140 | @pytest.mark.unit() 141 | def test_shrink(): 142 | rendezvous = RendezvousHash() 143 | 144 | placements = {} 145 | for i in range(10): 146 | rendezvous.add_node(str(i)) 147 | placements[str(i)] = [] 148 | 149 | for i in range(1000): 150 | node = rendezvous.get_node(str(i)) 151 | placements[node].append(i) 152 | 153 | rendezvous.remove_node("9") 154 | new_placements = {} 155 | for i in range(9): 156 | new_placements[str(i)] = [] 157 | 158 | for i in range(1000): 159 | node = rendezvous.get_node(str(i)) 160 | new_placements[node].append(i) 161 | 162 | keys = [k for sublist in placements.values() for k in sublist] 163 | new_keys = [k for sublist in new_placements.values() for k in sublist] 164 | assert sorted(keys) == sorted(new_keys) 165 | 166 | added = 0 167 | removed = 0 168 | for node, assignments in placements.items(): 169 | after = set(assignments) 170 | before = set(new_placements.get(node, [])) 171 | removed += len(before.difference(after)) 172 | added += len(after.difference(before)) 173 | 174 | assert added == removed 175 | assert 202 == (added + removed) 176 | 177 | 178 | def collide(key, seed): 179 | return 1337 180 | 181 | 182 | @pytest.mark.unit() 183 | def test_rendezvous_collision(): 184 | nodes = ["c", "b", "a"] 185 | rendezvous = RendezvousHash(nodes, hash_function=collide) 186 | 187 | for i in range(1000): 188 | assert "c" == rendezvous.get_node(i) 189 | 190 | 191 | @pytest.mark.unit() 192 | def test_rendezvous_names(): 193 | nodes = [1, 2, 3, "a", "b", "lol.wat.com"] 194 | rendezvous = RendezvousHash(nodes, hash_function=collide) 195 | 196 | for i in range(10): 197 | assert "lol.wat.com" == rendezvous.get_node(i) 198 | 199 | nodes = [1, "a", "0"] 200 | rendezvous = RendezvousHash(nodes, hash_function=collide) 201 | 202 | for i in range(10): 203 | assert "a" == rendezvous.get_node(i) 204 | -------------------------------------------------------------------------------- /pymemcache/test/test_serde.py: -------------------------------------------------------------------------------- 1 | from unittest import TestCase 2 | 3 | from pymemcache.serde import ( 4 | CompressedSerde, 5 | pickle_serde, 6 | PickleSerde, 7 | FLAG_BYTES, 8 | FLAG_COMPRESSED, 9 | FLAG_PICKLE, 10 | FLAG_INTEGER, 11 | FLAG_TEXT, 12 | ) 13 | import pytest 14 | import pickle 15 | import zlib 16 | 17 | 18 | class CustomInt(int): 19 | """ 20 | Custom integer type for testing. 21 | 22 | Entirely useless, but used to show that built in types get serialized and 23 | deserialized back as the same type of object. 24 | """ 25 | 26 | pass 27 | 28 | 29 | def check(serde, value, expected_flags): 30 | serialized, flags = serde.serialize(b"key", value) 31 | assert flags == expected_flags 32 | 33 | # pymemcache stores values as byte strings, so we immediately the value 34 | # if needed so deserialized works as it would with a real server 35 | if not isinstance(serialized, bytes): 36 | serialized = str(serialized).encode("ascii") 37 | 38 | deserialized = serde.deserialize(b"key", serialized, flags) 39 | assert deserialized == value 40 | 41 | 42 | @pytest.mark.unit() 43 | class TestSerde: 44 | serde = pickle_serde 45 | 46 | def test_bytes(self): 47 | check(self.serde, b"value", FLAG_BYTES) 48 | check(self.serde, b"\xc2\xa3 $ \xe2\x82\xac", FLAG_BYTES) # £ $ € 49 | 50 | def test_unicode(self): 51 | check(self.serde, "value", FLAG_TEXT) 52 | check(self.serde, "£ $ €", FLAG_TEXT) 53 | 54 | def test_int(self): 55 | check(self.serde, 1, FLAG_INTEGER) 56 | 57 | def test_pickleable(self): 58 | check(self.serde, {"a": "dict"}, FLAG_PICKLE) 59 | 60 | def test_subtype(self): 61 | # Subclass of a native type will be restored as the same type 62 | check(self.serde, CustomInt(123123), FLAG_PICKLE) 63 | 64 | 65 | @pytest.mark.unit() 66 | class TestSerdePickleVersion0(TestCase): 67 | serde = PickleSerde(pickle_version=0) 68 | 69 | 70 | @pytest.mark.unit() 71 | class TestSerdePickleVersion1(TestCase): 72 | serde = PickleSerde(pickle_version=1) 73 | 74 | 75 | @pytest.mark.unit() 76 | class TestSerdePickleVersion2(TestCase): 77 | serde = PickleSerde(pickle_version=2) 78 | 79 | 80 | @pytest.mark.unit() 81 | class TestSerdePickleVersionHighest(TestCase): 82 | serde = PickleSerde(pickle_version=pickle.HIGHEST_PROTOCOL) 83 | 84 | 85 | @pytest.mark.parametrize("serde", [pickle_serde, CompressedSerde()]) 86 | @pytest.mark.unit() 87 | def test_compressed_simple(serde): 88 | # test_bytes 89 | check(serde, b"value", FLAG_BYTES) 90 | check(serde, b"\xc2\xa3 $ \xe2\x82\xac", FLAG_BYTES) # £ $ € 91 | 92 | # test_unicode 93 | check(serde, "value", FLAG_TEXT) 94 | check(serde, "£ $ €", FLAG_TEXT) 95 | 96 | # test_int 97 | check(serde, 1, FLAG_INTEGER) 98 | 99 | # test_pickleable 100 | check(serde, {"a": "dict"}, FLAG_PICKLE) 101 | 102 | # test_subtype 103 | # Subclass of a native type will be restored as the same type 104 | check(serde, CustomInt(12312), FLAG_PICKLE) 105 | 106 | 107 | @pytest.mark.parametrize( 108 | "serde", 109 | [ 110 | CompressedSerde(min_compress_len=49), 111 | # Custom compression. This could be something like lz4 112 | CompressedSerde( 113 | compress=lambda value: zlib.compress(value, 9), 114 | decompress=lambda value: zlib.decompress(value), 115 | min_compress_len=49, 116 | ), 117 | ], 118 | ) 119 | @pytest.mark.unit() 120 | def test_compressed_complex(serde): 121 | # test_bytes 122 | check(serde, b"value" * 10, FLAG_BYTES | FLAG_COMPRESSED) 123 | check(serde, b"\xc2\xa3 $ \xe2\x82\xac" * 10, FLAG_BYTES | FLAG_COMPRESSED) # £ $ € 124 | 125 | # test_unicode 126 | check(serde, "value" * 10, FLAG_TEXT | FLAG_COMPRESSED) 127 | check(serde, "£ $ €" * 10, FLAG_TEXT | FLAG_COMPRESSED) 128 | 129 | # test_int, doesn't make sense to compress 130 | check(serde, 9223372036854775807, FLAG_INTEGER) 131 | 132 | # test_pickleable 133 | check( 134 | serde, 135 | { 136 | "foo": "bar", 137 | "baz": "qux", 138 | "uno": "dos", 139 | "tres": "tres", 140 | }, 141 | FLAG_PICKLE | FLAG_COMPRESSED, 142 | ) 143 | 144 | # test_subtype 145 | # Subclass of a native type will be restored as the same type 146 | check(serde, CustomInt(9223372036854775807), FLAG_PICKLE | FLAG_COMPRESSED) 147 | -------------------------------------------------------------------------------- /pymemcache/test/test_utils.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from pymemcache.serde import pickle_serde 4 | from pymemcache.test.utils import MockMemcacheClient 5 | 6 | 7 | @pytest.mark.unit() 8 | def test_get_set(): 9 | client = MockMemcacheClient() 10 | assert client.get(b"hello") is None 11 | 12 | client.set(b"hello", 12) 13 | assert client.get(b"hello") == 12 14 | 15 | 16 | @pytest.mark.unit() 17 | def test_get_set_string(): 18 | client = MockMemcacheClient() 19 | assert client.get("hello") is None 20 | 21 | value = "world" 22 | client.set("hello", value) 23 | assert client.get("hello") == b"world" 24 | 25 | 26 | @pytest.mark.unit() 27 | def test_get_set_string_with_serde(): 28 | client = MockMemcacheClient(serde=pickle_serde) 29 | assert client.get("hello") is None 30 | 31 | value = "world" 32 | client.set("hello", value) 33 | assert client.get("hello") == value 34 | 35 | 36 | @pytest.mark.unit() 37 | def test_get_set_unicide_key(): 38 | client = MockMemcacheClient() 39 | assert client.get("hello") is None 40 | 41 | client.set(b"hello", 12) 42 | assert client.get("hello") == 12 43 | 44 | 45 | @pytest.mark.unit() 46 | def test_get_set_non_ascii_value(): 47 | client = MockMemcacheClient() 48 | assert client.get(b"hello") is None 49 | 50 | # This is the value of msgpack.packb('non_ascii') 51 | non_ascii_str = b"\xa9non_ascii" 52 | client.set(b"hello", non_ascii_str) 53 | assert client.get(b"hello") == non_ascii_str 54 | 55 | 56 | @pytest.mark.unit() 57 | def test_get_many_set_many(): 58 | client = MockMemcacheClient() 59 | client.set(b"h", 1) 60 | 61 | result = client.get_many([b"h", b"e", b"l", b"o"]) 62 | assert result == {b"h": 1} 63 | 64 | # Convert keys into bytes 65 | d = {k.encode("ascii"): v for k, v in dict(h=1, e=2, z=3).items()} 66 | client.set_many(d) 67 | assert client.get_many([b"h", b"e", b"z", b"o"]) == d 68 | 69 | 70 | @pytest.mark.unit() 71 | def test_get_many_set_many_non_ascii_values(): 72 | client = MockMemcacheClient() 73 | 74 | # These are the values of calling msgpack.packb() on '1', '2', and '3' 75 | non_ascii_1 = b"\xa11" 76 | non_ascii_2 = b"\xa12" 77 | non_ascii_3 = b"\xa13" 78 | client.set(b"h", non_ascii_1) 79 | 80 | result = client.get_many([b"h", b"e", b"l", b"o"]) 81 | assert result == {b"h": non_ascii_1} 82 | 83 | # Convert keys into bytes 84 | d = { 85 | k.encode("ascii"): v 86 | for k, v in dict(h=non_ascii_1, e=non_ascii_2, z=non_ascii_3).items() 87 | } 88 | client.set_many(d) 89 | assert client.get_many([b"h", b"e", b"z", b"o"]) == d 90 | 91 | 92 | @pytest.mark.unit() 93 | def test_add(): 94 | client = MockMemcacheClient() 95 | 96 | client.add(b"k", 2) 97 | assert client.get(b"k") == 2 98 | 99 | client.add(b"k", 25) 100 | assert client.get(b"k") == 2 101 | 102 | 103 | @pytest.mark.unit() 104 | def test_delete(): 105 | client = MockMemcacheClient() 106 | 107 | client.add(b"k", 2) 108 | assert client.get(b"k") == 2 109 | 110 | client.delete(b"k") 111 | assert client.get(b"k") is None 112 | 113 | 114 | @pytest.mark.unit() 115 | def test_incr_decr(): 116 | client = MockMemcacheClient() 117 | 118 | client.add(b"k", 2) 119 | 120 | client.incr(b"k", 4) 121 | assert client.get(b"k") == 6 122 | 123 | client.decr(b"k", 2) 124 | assert client.get(b"k") == 4 125 | 126 | 127 | @pytest.mark.unit() 128 | def test_prepand_append(): 129 | client = MockMemcacheClient() 130 | 131 | client.set(b"k", "1") 132 | client.append(b"k", "a") 133 | client.prepend(b"k", "p") 134 | assert client.get(b"k") == b"p1a" 135 | -------------------------------------------------------------------------------- /pymemcache/test/utils.py: -------------------------------------------------------------------------------- 1 | """ 2 | Useful testing utilities. 3 | 4 | This module is considered public API. 5 | 6 | """ 7 | 8 | import time 9 | 10 | import socket 11 | 12 | from pymemcache.exceptions import MemcacheClientError, MemcacheIllegalInputError 13 | from pymemcache.serde import LegacyWrappingSerde 14 | from pymemcache.client.base import check_key_helper 15 | 16 | 17 | class MockMemcacheClient: 18 | """ 19 | A (partial) in-memory mock for Clients. 20 | 21 | """ 22 | 23 | def __init__( 24 | self, 25 | server=None, 26 | serde=None, 27 | serializer=None, 28 | deserializer=None, 29 | connect_timeout=None, 30 | timeout=None, 31 | no_delay=False, 32 | ignore_exc=False, 33 | socket_module=None, 34 | default_noreply=True, 35 | allow_unicode_keys=False, 36 | encoding="ascii", 37 | tls_context=None, 38 | **kwargs, 39 | ): 40 | self._contents = {} 41 | 42 | def _serializer(key, value): 43 | if isinstance(value, str): 44 | value = value.encode() 45 | return value, 0 46 | 47 | if serializer is None: 48 | serializer = _serializer 49 | 50 | self.serde = serde or LegacyWrappingSerde(serializer, deserializer) 51 | self.allow_unicode_keys = allow_unicode_keys 52 | 53 | # Unused, but present for interface compatibility 54 | self.server = server 55 | self.connect_timeout = connect_timeout 56 | self.timeout = timeout 57 | self.no_delay = no_delay 58 | self.ignore_exc = ignore_exc 59 | self.socket_module = socket 60 | self.sock = None 61 | self.encoding = encoding 62 | self.tls_context = tls_context 63 | 64 | def check_key(self, key): 65 | """Checks key and add key_prefix.""" 66 | return check_key_helper(key, allow_unicode_keys=self.allow_unicode_keys) 67 | 68 | def clear(self): 69 | """Method used to clear/reset mock cache""" 70 | self._contents.clear() 71 | 72 | def get(self, key, default=None): 73 | key = self.check_key(key) 74 | 75 | if key not in self._contents: 76 | return default 77 | 78 | expire, value, flags = self._contents[key] 79 | if expire and expire < time.time(): 80 | del self._contents[key] 81 | return default 82 | 83 | return self.serde.deserialize(key, value, flags) 84 | 85 | def get_many(self, keys): 86 | out = {} 87 | for key in keys: 88 | value = self.get(key) 89 | if value is not None: 90 | out[key] = value 91 | return out 92 | 93 | get_multi = get_many 94 | 95 | def set(self, key, value, expire=0, noreply=True, flags=None): 96 | key = self.check_key(key) 97 | if isinstance(value, str) and not isinstance(value, bytes): 98 | try: 99 | value.encode(self.encoding) 100 | except (UnicodeEncodeError, UnicodeDecodeError): 101 | raise MemcacheIllegalInputError 102 | 103 | value, flags = self.serde.serialize(key, value) 104 | 105 | if expire: 106 | expire += time.time() 107 | 108 | self._contents[key] = expire, value, flags 109 | return True 110 | 111 | def set_many(self, values, expire=0, noreply=True, flags=None): 112 | result = [] 113 | for key, value in values.items(): 114 | ret = self.set(key, value, expire, noreply, flags=flags) 115 | if not ret: 116 | result.append(key) 117 | return [] if noreply else result 118 | 119 | set_multi = set_many 120 | 121 | def incr(self, key, value, noreply=False): 122 | current = self.get(key) 123 | present = current is not None 124 | if present: 125 | self.set(key, current + value, noreply=noreply) 126 | return None if noreply or not present else current + value 127 | 128 | def decr(self, key, value, noreply=False): 129 | current = self.get(key) 130 | present = current is not None 131 | if present: 132 | self.set(key, current - value, noreply=noreply) 133 | return None if noreply or not present else current - value 134 | 135 | def add(self, key, value, expire=0, noreply=True, flags=None): 136 | current = self.get(key) 137 | present = current is not None 138 | if not present: 139 | self.set(key, value, expire, noreply, flags=flags) 140 | return noreply or not present 141 | 142 | def delete(self, key, noreply=True): 143 | key = self.check_key(key) 144 | current = self._contents.pop(key, None) 145 | present = current is not None 146 | return noreply or present 147 | 148 | def delete_many(self, keys, noreply=True): 149 | for key in keys: 150 | self.delete(key, noreply) 151 | return True 152 | 153 | def prepend(self, key, value, expire=0, noreply=True, flags=None): 154 | current = self.get(key) 155 | if current is not None: 156 | if isinstance(value, str) and not isinstance(value, bytes): 157 | try: 158 | value = value.encode(self.encoding) 159 | except (UnicodeEncodeError, UnicodeDecodeError): 160 | raise MemcacheIllegalInputError 161 | self.set(key, value + current, expire, noreply, flags=flags) 162 | return True 163 | 164 | def append(self, key, value, expire=0, noreply=True, flags=None): 165 | current = self.get(key) 166 | if current is not None: 167 | if isinstance(value, str) and not isinstance(value, bytes): 168 | try: 169 | value = value.encode(self.encoding) 170 | except (UnicodeEncodeError, UnicodeDecodeError): 171 | raise MemcacheIllegalInputError 172 | self.set(key, current + value, expire, noreply, flags=flags) 173 | return True 174 | 175 | delete_multi = delete_many 176 | 177 | def stats(self, *_args): 178 | # I make no claim that these values make any sense, but the format 179 | # of the output is the same as for pymemcache.client.Client.stats() 180 | return { 181 | "version": "MockMemcacheClient", 182 | "rusage_user": 1.0, 183 | "rusage_system": 1.0, 184 | "hash_is_expanding": False, 185 | "slab_reassign_running": False, 186 | "inter": "in-memory", 187 | "evictions": False, 188 | "growth_factor": 1.0, 189 | "stat_key_prefix": "", 190 | "umask": 0o644, 191 | "detail_enabled": False, 192 | "cas_enabled": False, 193 | "auth_enabled_sasl": False, 194 | "maxconns_fast": False, 195 | "slab_reassign": False, 196 | "slab_automove": False, 197 | } 198 | 199 | def replace(self, key, value, expire=0, noreply=True, flags=None): 200 | current = self.get(key) 201 | present = current is not None 202 | if present: 203 | self.set(key, value, expire, noreply, flags=flags) 204 | return noreply or present 205 | 206 | def cas(self, key, value, cas, expire=0, noreply=False, flags=None): 207 | raise MemcacheClientError("CAS is not enabled for this instance") 208 | 209 | def touch(self, key, expire=0, noreply=True): 210 | current = self.get(key) 211 | present = current is not None 212 | if present: 213 | self.set(key, current, expire, noreply=noreply) 214 | return True if noreply or present else False 215 | 216 | def cache_memlimit(self, memlimit): 217 | return True 218 | 219 | def version(self): 220 | return "MockMemcacheClient" 221 | 222 | def flush_all(self, delay=0, noreply=True): 223 | self.clear() 224 | 225 | return noreply or self._contents == {} 226 | 227 | def quit(self): 228 | pass 229 | 230 | def close(self): 231 | pass 232 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.black] 2 | target-version = ['py39', 'py310', 'py311', 'py312', 'py313'] 3 | 4 | [tool.mypy] 5 | python_version = 3.9 6 | ignore_missing_imports = true 7 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [metadata] 2 | name = pymemcache 3 | version = attr: pymemcache.__version__ 4 | author = Jon Parise 5 | author_email = jon@pinterest.com 6 | description = A comprehensive, fast, pure Python memcached client 7 | long_description = file: README.rst, ChangeLog.rst 8 | long_description_content_type = text/x-rst 9 | license = Apache License 2.0 10 | project_urls = 11 | Documentation = https://pymemcache.readthedocs.io/ 12 | Source = https://github.com/pinterest/pymemcache 13 | Issue Tracker = https://github.com/pinterest/pymemcache/issues 14 | keywords = memcache, client, database 15 | classifiers = 16 | Programming Language :: Python 17 | Programming Language :: Python :: 3 :: Only 18 | Programming Language :: Python :: 3.9 19 | Programming Language :: Python :: 3.10 20 | Programming Language :: Python :: 3.11 21 | Programming Language :: Python :: 3.12 22 | Programming Language :: Python :: 3.13 23 | Programming Language :: Python :: Implementation :: PyPy 24 | License :: OSI Approved :: Apache Software License 25 | Topic :: Database 26 | 27 | [options] 28 | python_requires = >= 3.9 29 | 30 | [bdist_wheel] 31 | universal = true 32 | 33 | [coverage:run] 34 | omit = pymemcache/test/* 35 | 36 | [tool:pytest] 37 | norecursedirs = build docs/_build *.egg .tox *.venv 38 | addopts = 39 | --verbose 40 | --tb=short 41 | --capture=no 42 | -rfEsxX 43 | --cov=pymemcache --cov-config=setup.cfg --cov-report=xml --cov-report=term-missing 44 | -m unit 45 | markers = 46 | unit 47 | integration 48 | benchmark 49 | testpaths = 50 | pymemcache/test 51 | 52 | [flake8] 53 | show-source = True 54 | exclude = .eggs/*,.tox/*,.venv/*,docs/* 55 | extend-ignore = E203, E501 56 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | from setuptools import setup 4 | 5 | setup() 6 | -------------------------------------------------------------------------------- /test-requirements.txt: -------------------------------------------------------------------------------- 1 | Faker==26.1.0 2 | pytest==8.3.4 3 | pytest-cov==4.0.0 4 | gevent==24.2.1; "PyPy" not in platform_python_implementation 5 | pylibmc==1.6.3; sys.platform != 'win32' 6 | python-memcached==1.62 7 | zstd==1.5.4.0 8 | setuptools>=65.5.1 # not directly required, pinned by Snyk to avoid a vulnerability 9 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | [tox] 2 | envlist = 3 | py39, 4 | py310, 5 | py311, 6 | py312, 7 | py313, 8 | pypy3, 9 | docs, 10 | lint, 11 | mypy, 12 | venv, 13 | skip_missing_interpreters = true 14 | 15 | [gh-actions] 16 | python = 17 | pypy-3.10: pypy3 18 | 19 | [testenv] 20 | description = run tests with {basepython} 21 | deps = -r{toxinidir}/test-requirements.txt 22 | skip_install = True 23 | commands = 24 | python -m pytest {posargs} 25 | 26 | [testenv:lint] 27 | description = lint source code 28 | deps = -r{toxinidir}/lint-requirements.txt 29 | commands = 30 | python setup.py check --metadata --restructuredtext --strict 31 | flake8 32 | black --check . 33 | 34 | [testenv:mypy] 35 | description = type check source code 36 | deps = -r{toxinidir}/mypy-requirements.txt 37 | commands = 38 | python -m mypy -p pymemcache 39 | 40 | [testenv:docs] 41 | description = invoke sphinx-build to build the HTML docs 42 | deps = -r{toxinidir}/docs-requirements.txt 43 | commands = 44 | sphinx-build -b html docs/ docs/_build/html 45 | 46 | [testenv:venv] 47 | commands = {posargs} 48 | --------------------------------------------------------------------------------