├── .github └── workflows │ ├── commit_message_validation.yml │ ├── release.yml │ ├── stale_issues.yml │ └── tests.yml ├── .gitignore ├── CHANGELOG.md ├── LICENSE ├── README.md ├── conftest.py ├── docker-compose.yml ├── pyproject.toml ├── redis_cache └── __init__.py ├── setup.cfg ├── setup.py ├── test.sh └── tests └── test_redis_cache.py /.github/workflows/commit_message_validation.yml: -------------------------------------------------------------------------------- 1 | name: Commit message validation 2 | 3 | on: pull_request 4 | 5 | jobs: 6 | commit-message-validation: 7 | runs-on: ubuntu-latest 8 | 9 | steps: 10 | - name: Checkout 11 | uses: actions/checkout@v2 12 | with: 13 | fetch-depth: 0 14 | 15 | - name: Commit message validation 16 | uses: taylorhakes/commit-message-validator@master 17 | env: 18 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 19 | with: 20 | no_jira: true -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | name: Semantic Release 2 | 3 | on: 4 | push: 5 | branches: 6 | - master 7 | 8 | jobs: 9 | release: 10 | runs-on: ubuntu-latest 11 | concurrency: release 12 | permissions: 13 | id-token: write 14 | contents: write 15 | environment: production 16 | steps: 17 | - uses: actions/checkout@v2 18 | with: 19 | fetch-depth: 0 20 | token: ${{ secrets.GH_TOKEN }} 21 | - name: Python Semantic Release 22 | id: semantic_release 23 | uses: taylorhakes/python-semantic-release@master 24 | with: 25 | github_token: ${{ secrets.GH_TOKEN }} 26 | - uses: actions/setup-python@v4 27 | with: 28 | python-version: "3.x" 29 | - name: deps 30 | run: python -m pip install -U build 31 | - name: build 32 | run: python -m build 33 | - name: Publish distribution 📦 to PyPI 34 | uses: pypa/gh-action-pypi-publish@release/v1 35 | if: steps.semantic_release.outputs.released == 'true' -------------------------------------------------------------------------------- /.github/workflows/stale_issues.yml: -------------------------------------------------------------------------------- 1 | name: Close inactive issues 2 | on: 3 | schedule: 4 | - cron: "30 1 */10 * *" 5 | 6 | jobs: 7 | close-issues: 8 | runs-on: ubuntu-latest 9 | permissions: 10 | issues: write 11 | pull-requests: write 12 | steps: 13 | - uses: actions/stale@v3 14 | with: 15 | days-before-issue-stale: 30 16 | days-before-issue-close: 14 17 | stale-issue-label: "stale" 18 | stale-issue-message: "This issue is stale because it has been open for 30 days with no activity." 19 | close-issue-message: "This issue was closed because it has been inactive for 14 days since being marked as stale." 20 | days-before-pr-stale: -1 21 | days-before-pr-close: -1 22 | repo-token: ${{ secrets.GITHUB_TOKEN }} -------------------------------------------------------------------------------- /.github/workflows/tests.yml: -------------------------------------------------------------------------------- 1 | name: Tests 2 | 3 | on: 4 | schedule: 5 | - cron: '0 1 * * *' 6 | push: 7 | branches: [ master ] 8 | pull_request: 9 | branches: [ master ] 10 | 11 | jobs: 12 | build: 13 | runs-on: ubuntu-latest 14 | strategy: 15 | matrix: 16 | python-version: ['3.8', '3.9', '3.10'] 17 | redis-version: ['5', '6', 'latest'] 18 | 19 | steps: 20 | - uses: actions/checkout@v2 21 | - name: Run tests 22 | env: 23 | PY_VERSION: ${{ matrix.python-version }} 24 | REDIS_VERSION: ${{ matrix.redis-version }} 25 | run: | 26 | docker compose run --rm test 27 | 28 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | *.egg-info/ 24 | .installed.cfg 25 | *.egg 26 | MANIFEST 27 | 28 | # PyInstaller 29 | # Usually these files are written by a python script from a template 30 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 31 | *.manifest 32 | *.spec 33 | 34 | # Installer logs 35 | pip-log.txt 36 | pip-delete-this-directory.txt 37 | 38 | # Unit test / coverage reports 39 | htmlcov/ 40 | .tox/ 41 | .coverage 42 | .coverage.* 43 | .cache 44 | nosetests.xml 45 | coverage.xml 46 | *.cover 47 | .hypothesis/ 48 | .pytest_cache/ 49 | 50 | # Translations 51 | *.mo 52 | *.pot 53 | 54 | # Django stuff: 55 | *.log 56 | local_settings.py 57 | db.sqlite3 58 | 59 | # Flask stuff: 60 | instance/ 61 | .webassets-cache 62 | 63 | # Scrapy stuff: 64 | .scrapy 65 | 66 | # Sphinx documentation 67 | docs/_build/ 68 | 69 | # PyBuilder 70 | target/ 71 | 72 | # Jupyter Notebook 73 | .ipynb_checkpoints 74 | 75 | # pyenv 76 | .python-version 77 | 78 | # celery beat schedule file 79 | celerybeat-schedule 80 | 81 | # SageMath parsed files 82 | *.sage.py 83 | 84 | # Environments 85 | .env 86 | .venv 87 | env/ 88 | venv/ 89 | ENV/ 90 | env.bak/ 91 | venv.bak/ 92 | 93 | # Spyder project settings 94 | .spyderproject 95 | .spyproject 96 | 97 | # Rope project settings 98 | .ropeproject 99 | 100 | # mkdocs documentation 101 | /site 102 | 103 | # mypy 104 | .mypy_cache/ 105 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # CHANGELOG 2 | 3 | 4 | 5 | ## v4.0.2 (2025-03-25) 6 | 7 | ### Fix 8 | 9 | * fix(lua): Fix time parsing in limit cache ([`b7d0605`](https://github.com/taylorhakes/python-redis-cache/commit/b7d060521b673a49a90bd79c8c67d8cc4c2df34f)) 10 | 11 | 12 | ## v4.0.1 (2024-08-27) 13 | 14 | ### Build 15 | 16 | * build(fix): Change docker-compose to docker compose ([`e701a9e`](https://github.com/taylorhakes/python-redis-cache/commit/e701a9e482a13221535e9c404258f6af7903df08)) 17 | 18 | ### Documentation 19 | 20 | * docs: updated wording on instance docs ([`ca65f4e`](https://github.com/taylorhakes/python-redis-cache/commit/ca65f4e515aa45c27b88972d76c416bdaf9d3ed3)) 21 | 22 | * docs: add class/instance method documentation ([`027344d`](https://github.com/taylorhakes/python-redis-cache/commit/027344d3a62655a82b8be8df25f93a400b217c2d)) 23 | 24 | ### Fix 25 | 26 | * fix(cache): support for unpassed default args (#39) 27 | 28 | * feat(cache): support caching functions with positional-only arguments 29 | 30 | BREAKING CHANGE: requires Python version >= 3.8 31 | 32 | * fix(cache): cache params with defaults not passed as args 33 | 34 | --------- 35 | 36 | Co-authored-by: Taylor Hakes <taylorhakes@users.noreply.github.com> ([`4432b14`](https://github.com/taylorhakes/python-redis-cache/commit/4432b14ffbaaee0aa7142b694c853d518671bb88)) 37 | 38 | 39 | ## v4.0.0 (2024-06-17) 40 | 41 | ### Breaking 42 | 43 | * feat(cache): support caching functions with positional-only arguments 44 | 45 | BREAKING CHANGE: requires Python version >= 3.8 ([`68f00ab`](https://github.com/taylorhakes/python-redis-cache/commit/68f00abeff87672b2ae4a04106ef7fc91f6ce86d)) 46 | 47 | 48 | ## v3.2.0 (2024-05-02) 49 | 50 | ### Feature 51 | 52 | * feat(active-flag): added the ability to disable the cache ([`1d27b9a`](https://github.com/taylorhakes/python-redis-cache/commit/1d27b9a9b1c51664b48986e63057cebe63000e39)) 53 | 54 | 55 | ## v3.1.0 (2024-03-21) 56 | 57 | ### Feature 58 | 59 | * feat(cluster): add ability to disable braces by not supporting cluster ([`939f046`](https://github.com/taylorhakes/python-redis-cache/commit/939f046922be7421ff39d4a29fe5c3be631a1179)) 60 | 61 | 62 | ## v3.0.2 (2024-03-21) 63 | 64 | ### Fix 65 | 66 | * fix(build): add redis to install_requires (#33) ([`3a21ccd`](https://github.com/taylorhakes/python-redis-cache/commit/3a21ccd908993ffe169f67d636ec9e41a924016f)) 67 | 68 | 69 | ## v3.0.1 (2024-01-10) 70 | 71 | ### Fix 72 | 73 | * fix(keys): update keys to be in same key space (#30) ([`8b03459`](https://github.com/taylorhakes/python-redis-cache/commit/8b03459a253bee5fd6cd1aadd14aa0d5f1e78198)) 74 | 75 | 76 | ## v3.0.0 (2024-01-09) 77 | 78 | ### Breaking 79 | 80 | * feat(keys): keep keys sorted and fix key format (#28) 81 | 82 | BREAKING CHANGE: key format fix ([`aa554ee`](https://github.com/taylorhakes/python-redis-cache/commit/aa554ee10cbb8660844d841cc912947136cd3623)) 83 | 84 | 85 | ## v2.2.5 (2023-09-17) 86 | 87 | ### Fix 88 | 89 | * fix(build): update release order ([`db88117`](https://github.com/taylorhakes/python-redis-cache/commit/db88117f060fb6fc0515cbf2ee3485377d5e95a8)) 90 | 91 | 92 | ## v2.2.4 (2023-09-16) 93 | 94 | ### Fix 95 | 96 | * fix(release): fix version change ([`d434cb6`](https://github.com/taylorhakes/python-redis-cache/commit/d434cb6dacae29022eb7176f59777225f8b2c782)) 97 | 98 | 99 | ## v2.2.3 (2023-09-16) 100 | 101 | ### Fix 102 | 103 | * fix(build): fetch depth ([`18fd8af`](https://github.com/taylorhakes/python-redis-cache/commit/18fd8af81759c4e94b772bd8360b42c107da786c)) 104 | 105 | 106 | ## v2.2.2 (2023-09-16) 107 | 108 | ### Fix 109 | 110 | * fix(release): fix missing permission ([`d4136e1`](https://github.com/taylorhakes/python-redis-cache/commit/d4136e1c6768fd7b635186aca0cd4b1f7011470b)) 111 | 112 | 113 | ## v2.2.1 (2023-09-16) 114 | 115 | ### Documentation 116 | 117 | * docs: remove version badge ([`f0810d2`](https://github.com/taylorhakes/python-redis-cache/commit/f0810d2ca42325592aa4b98c01608f022fcd900d)) 118 | 119 | * docs: update readme badge ([`bdbe5e1`](https://github.com/taylorhakes/python-redis-cache/commit/bdbe5e17ccd3926c58ffd69b58b6775ca71bbc56)) 120 | 121 | * docs(readme): add exception_handler into signature ([`8bb9193`](https://github.com/taylorhakes/python-redis-cache/commit/8bb9193014c2b556b547871aeb572ea3ec447505)) 122 | 123 | * docs(readme): add note about decode_responses ([`9471fb0`](https://github.com/taylorhakes/python-redis-cache/commit/9471fb0b660820cbf7a3a00b3ce8e75d7a50a79b)) 124 | 125 | * docs(readme): update docs about exception_handler parameter ([`70d48c1`](https://github.com/taylorhakes/python-redis-cache/commit/70d48c1cf89505915972fae3bae57e8b07945e4c)) 126 | 127 | ### Fix 128 | 129 | * fix(build): Permissions ([`0e45716`](https://github.com/taylorhakes/python-redis-cache/commit/0e457165b2d7b48178ae3c2fed521e507ef6ae51)) 130 | 131 | * fix(release): add pypi distribution ([`508f5a8`](https://github.com/taylorhakes/python-redis-cache/commit/508f5a87c73adf90da337ce9aad7791d33f24742)) 132 | 133 | 134 | ## v2.2.0 (2023-07-25) 135 | 136 | ### Feature 137 | 138 | * feat(exceptions): add ability to handle redis exceptions ([`dad5ac0`](https://github.com/taylorhakes/python-redis-cache/commit/dad5ac03253f7b3fd5b7bf725f549beebda09f09)) 139 | 140 | ### Test 141 | 142 | * test: update python versions in tests ([`3e4afdb`](https://github.com/taylorhakes/python-redis-cache/commit/3e4afdb976e99c56d9e1f660b23f024d77ed3406)) 143 | 144 | * test: bump redis from 3.5.3 to 4.4.4 (#22) 145 | 146 | Bumps [redis](https://github.com/redis/redis-py) from 3.5.3 to 4.4.4. 147 | - [Release notes](https://github.com/redis/redis-py/releases) 148 | - [Changelog](https://github.com/redis/redis-py/blob/master/CHANGES) 149 | - [Commits](https://github.com/redis/redis-py/compare/3.5.3...v4.4.4) 150 | 151 | --- 152 | updated-dependencies: 153 | - dependency-name: redis 154 | dependency-type: direct:development 155 | ... 156 | 157 | Signed-off-by: dependabot[bot] <support@github.com> 158 | Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> ([`3c3674f`](https://github.com/taylorhakes/python-redis-cache/commit/3c3674f44983f2a95f401cec85f7aeabd09750c6)) 159 | 160 | * test: change name of test ([`0ee3e81`](https://github.com/taylorhakes/python-redis-cache/commit/0ee3e818eb64793699517eba29b468cc221fddeb)) 161 | 162 | 163 | ## v2.1.2 (2023-05-15) 164 | 165 | ### Fix 166 | 167 | * fix(perf): update list to set ([`a4984bb`](https://github.com/taylorhakes/python-redis-cache/commit/a4984bb980ed40bb7c0cf077a69ece823becf3e2)) 168 | 169 | 170 | ## v2.1.1 (2023-05-15) 171 | 172 | ### Fix 173 | 174 | * fix(cache): Fix bug in variable args ([`3dce0e7`](https://github.com/taylorhakes/python-redis-cache/commit/3dce0e774ee00b914bbbf5aaac6b5cce3589968f)) 175 | 176 | ### Test 177 | 178 | * test: add test for args, kwargs mix ([`68f09ba`](https://github.com/taylorhakes/python-redis-cache/commit/68f09bab88bca27a3565c1360cb75c464ddc7d31)) 179 | 180 | 181 | ## v2.1.0 (2023-05-11) 182 | 183 | ### Documentation 184 | 185 | * docs(readme): update docs to specify Python 3.7 ([`145693d`](https://github.com/taylorhakes/python-redis-cache/commit/145693d07b83ac1ecc046841c8a82aba2ec6e6f4)) 186 | 187 | ### Feature 188 | 189 | * feat: fix caching to work on cluster ([`69c1377`](https://github.com/taylorhakes/python-redis-cache/commit/69c13775aeec21e8994228d61e22482a0c12a324)) 190 | 191 | 192 | ## v2.0.0 (2023-01-12) 193 | 194 | ### Breaking 195 | 196 | * fix: update key names to prevent bug 197 | 198 | BREAKING CHANGE: changes key algorithm to avoid bug with name conflict 199 | 200 | Use `copy_old_keys` to migrate data. ([`c9efd2d`](https://github.com/taylorhakes/python-redis-cache/commit/c9efd2de7247fe4974bd6e29e790b3011e9b9581)) 201 | 202 | ### Build 203 | 204 | * build(actions): add commit message validation on PRs ([`f2f8f35`](https://github.com/taylorhakes/python-redis-cache/commit/f2f8f356cdae1b7fea085e7f8dcc04a2e541b28d)) 205 | 206 | * build(workflow): update cron to run once every 10 days ([`f8c086d`](https://github.com/taylorhakes/python-redis-cache/commit/f8c086d7e4b8e9579cca1d1e1bf82ff5d353f8e2)) 207 | 208 | * build(workflow): add stale github issue closer ([`90b9358`](https://github.com/taylorhakes/python-redis-cache/commit/90b9358c353b8cde6ab26146def9a0e03c2c86a8)) 209 | 210 | ### Unknown 211 | 212 | * doc(README): update readme mget ([`1401856`](https://github.com/taylorhakes/python-redis-cache/commit/14018568b4cc80cf22584ce22313e6d1294e3c89)) 213 | 214 | * Merge pull request #9 from AdrianDeAnda/python-310 215 | 216 | Add Python 3.10 tests to Github Actions ([`1c4e6c1`](https://github.com/taylorhakes/python-redis-cache/commit/1c4e6c1e9b62e6978f8e82169dfefd228d841621)) 217 | 218 | * Add Python 3.10 tests to Github Actions ([`fc8dbb5`](https://github.com/taylorhakes/python-redis-cache/commit/fc8dbb56b694e36f44eed3672795eea1469a0ba4)) 219 | 220 | 221 | ## v1.2.0 (2021-10-22) 222 | 223 | ### Feature 224 | 225 | * feat: key_serializer for custom serialization ([`1d4d1eb`](https://github.com/taylorhakes/python-redis-cache/commit/1d4d1eb24e2830a2c0cee509d65c9e9b772f08ff)) 226 | 227 | 228 | ## v1.1.2 (2021-09-02) 229 | 230 | ### Fix 231 | 232 | * fix: add newline to pyproject.toml ([`d6677a4`](https://github.com/taylorhakes/python-redis-cache/commit/d6677a4eb3890f384c4d7aeef9b1ebe8593636e1)) 233 | 234 | ### Unknown 235 | 236 | * bugfix: Add missing pyproject.toml ([`fd89d46`](https://github.com/taylorhakes/python-redis-cache/commit/fd89d462c71d9988df22f4523d1be892a5f87eaf)) 237 | 238 | * bugfix: correct release.yml format ([`7b37dfd`](https://github.com/taylorhakes/python-redis-cache/commit/7b37dfd800137006c6b72cff38224327b73303d9)) 239 | 240 | * bugfix: add release.yml ([`0df737c`](https://github.com/taylorhakes/python-redis-cache/commit/0df737c315f60aae5473769e27910ae62f7f20da)) 241 | 242 | * Change python_requires to >3.6 ([`a6ca0f1`](https://github.com/taylorhakes/python-redis-cache/commit/a6ca0f1871636c5f1335e01a2335dcf21499373e)) 243 | 244 | * Merge pull request #6 from AdrianDeAnda/python-39 245 | 246 | Add support for Python 3.9 ([`3f5334f`](https://github.com/taylorhakes/python-redis-cache/commit/3f5334f1d599ac88d144c480b5717c653d784b95)) 247 | 248 | * Add support for Python 3.9 ([`ce1768d`](https://github.com/taylorhakes/python-redis-cache/commit/ce1768dba1331b013366389b39aeeb3b1017b229)) 249 | 250 | * Commit new version ([`0f7d0d4`](https://github.com/taylorhakes/python-redis-cache/commit/0f7d0d4d40dacac75b98701ee0c9e0dec7a73210)) 251 | 252 | * Merge pull request #3 from lfvilella/master 253 | 254 | Fixing invalidate_all ([`68132cb`](https://github.com/taylorhakes/python-redis-cache/commit/68132cbdca69feb17f9472fd8cf7b06c1beec892)) 255 | 256 | * fix review ([`e3a0f16`](https://github.com/taylorhakes/python-redis-cache/commit/e3a0f16d4a10501cf88d8a3bc721b51f1f079dc6)) 257 | 258 | * Code review changes 259 | 260 | 1) test in different redis versions 261 | 2) fixing invalidate_all ([`5335b70`](https://github.com/taylorhakes/python-redis-cache/commit/5335b700626cd315323df20a395b336c4140abe0)) 262 | 263 | * test in different redis versions ([`4cac5d8`](https://github.com/taylorhakes/python-redis-cache/commit/4cac5d8e810e50b3e2e216f1f44dc939cad61ee2)) 264 | 265 | * 1) Make zrange works by adding zadd 266 | 2) always clear the cache on tests ([`69e8815`](https://github.com/taylorhakes/python-redis-cache/commit/69e8815d429e5fffd4e23a97d1f5595b1d415739)) 267 | 268 | * CI test in different python versions (#1) ([`60e7c71`](https://github.com/taylorhakes/python-redis-cache/commit/60e7c712c417c5e28948c9e335399274911c6ad9)) 269 | 270 | * update readme and define py versions ([`c3ba154`](https://github.com/taylorhakes/python-redis-cache/commit/c3ba154c654395d527ac2d80f7b18097cc19d175)) 271 | 272 | * Create CI.yml ([`bb064c5`](https://github.com/taylorhakes/python-redis-cache/commit/bb064c58fba229f8e5980f454b380d97cd2099a9)) 273 | 274 | * refactoring tests ([`5d6be6c`](https://github.com/taylorhakes/python-redis-cache/commit/5d6be6c3a3716bdd3de3529215fb1cabfcf4d91e)) 275 | 276 | * refactoring tests ([`d2afb81`](https://github.com/taylorhakes/python-redis-cache/commit/d2afb81900de145414c6d48221b0af1314077fd7)) 277 | 278 | * Revert "fixing invalidate_all" 279 | 280 | This reverts commit 3c8194c85c66d46209e09de3657530641503d408. ([`c6c56ce`](https://github.com/taylorhakes/python-redis-cache/commit/c6c56cef06e044381da5bf0314abc796876d3190)) 281 | 282 | * fix cache key ([`f2b35f8`](https://github.com/taylorhakes/python-redis-cache/commit/f2b35f8ff9716fe3e9c2869c0e8291a95107f631)) 283 | 284 | * fixing invalidate_all ([`3c8194c`](https://github.com/taylorhakes/python-redis-cache/commit/3c8194c85c66d46209e09de3657530641503d408)) 285 | 286 | * Fix bug in calling cache twice ([`98c4daf`](https://github.com/taylorhakes/python-redis-cache/commit/98c4daf441a9e17445b571db4e18197ac70237dc)) 287 | 288 | * Fixed mget test to check values ([`4665045`](https://github.com/taylorhakes/python-redis-cache/commit/4665045a7dfb5d1ec63c2b44ae528761dbbd5534)) 289 | 290 | * Add basic multi get ([`2dc865e`](https://github.com/taylorhakes/python-redis-cache/commit/2dc865e7515ed45684cfee021a6dceb62e6bf60e)) 291 | 292 | * Remove hashing to avoid collisions on key values ([`8a89e7d`](https://github.com/taylorhakes/python-redis-cache/commit/8a89e7d750b6dedb34397afe7aee19cde17baaa7)) 293 | 294 | * Fixed bug for deserializer and serializer that returns bytes ([`4643c9e`](https://github.com/taylorhakes/python-redis-cache/commit/4643c9e0678f0ab80dddbf1f7f81de614e5fd1a3)) 295 | 296 | * Added link to pickle security issues ([`99d623b`](https://github.com/taylorhakes/python-redis-cache/commit/99d623b4ee295fe6bc7b4f9376bb7b276178761a)) 297 | 298 | * Add test for pickle and custom serializer ([`b1564d4`](https://github.com/taylorhakes/python-redis-cache/commit/b1564d44aea8dbdea0a8c1ea9cf6606b76e437b9)) 299 | 300 | * Changed the requirements to be clear the version could be higher ([`669fee9`](https://github.com/taylorhakes/python-redis-cache/commit/669fee974f11893a2d4e7d5b1195a5386adb2921)) 301 | 302 | * Add test.sh script to run docker tests ([`300dd48`](https://github.com/taylorhakes/python-redis-cache/commit/300dd4867c46f4757ca11aa571b14276d947644e)) 303 | 304 | * Update wording on readme ([`496bf6f`](https://github.com/taylorhakes/python-redis-cache/commit/496bf6fd97149e64e297f5dc396d6011d78087e2)) 305 | 306 | * Add explanation of how to call the function ([`f5dfc65`](https://github.com/taylorhakes/python-redis-cache/commit/f5dfc653e8a0b357ef8792e3eff7b14eecad6192)) 307 | 308 | * Fix formatting ([`c49d5b0`](https://github.com/taylorhakes/python-redis-cache/commit/c49d5b0237d3eea0751fa49144aaac86a4ae7f28)) 309 | 310 | * Updated readme to provide more detail on ttl and limit parmeters ([`238e5f0`](https://github.com/taylorhakes/python-redis-cache/commit/238e5f05da7a5a6ffade113469fa819b375bc6b8)) 311 | 312 | * Save values in cache if it doesn't exist ([`7855cdd`](https://github.com/taylorhakes/python-redis-cache/commit/7855cddab55b6b5d45cb01d961643966b86db319)) 313 | 314 | * Update readme formatting ([`8c31654`](https://github.com/taylorhakes/python-redis-cache/commit/8c31654c7ab92c98df0ae1f7601362128d29850f)) 315 | 316 | * Updated readme with invalidate API ([`0521037`](https://github.com/taylorhakes/python-redis-cache/commit/05210377e154243b3d42ff4761482a6a3cd775cc)) 317 | 318 | * Added invalidate and invalidate_all ([`75fba53`](https://github.com/taylorhakes/python-redis-cache/commit/75fba538fd5a92fd6a34597369790f3439d69fa1)) 319 | 320 | * Add lua updates ([`29693cd`](https://github.com/taylorhakes/python-redis-cache/commit/29693cd949839b56ff25cb3cccd203043e464141)) 321 | 322 | * Add tests against real redis ([`173dc01`](https://github.com/taylorhakes/python-redis-cache/commit/173dc01406da3ed2acc182e60e6ae8c8951323dd)) 323 | 324 | * Updated readme ([`100e3a0`](https://github.com/taylorhakes/python-redis-cache/commit/100e3a0c901a7a74554e740ebafad343c4d5f9da)) 325 | 326 | * Add API docs ([`0bd5786`](https://github.com/taylorhakes/python-redis-cache/commit/0bd578628d2f6f1050888287bc1330d5b7cddc4e)) 327 | 328 | * Update readme ([`933712b`](https://github.com/taylorhakes/python-redis-cache/commit/933712bd3ac6681bc4c4e23625ddf19f328b5502)) 329 | 330 | * Add python code ([`a033d79`](https://github.com/taylorhakes/python-redis-cache/commit/a033d79742a998c7a6ca2228d2c064294e55d4ec)) 331 | 332 | * Initial commit ([`5135a44`](https://github.com/taylorhakes/python-redis-cache/commit/5135a44e9e0dad996be3113dbbc09ac43d2206c0)) 333 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2019 Taylor Hakes 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | [![CI](https://github.com/taylorhakes/python-redis-cache/actions/workflows/tests.yml/badge.svg?branch=master)](https://github.com/taylorhakes/python-redis-cache/actions/workflows/tests.yml/badge.svg?branch=master) 2 | [![pypi](https://img.shields.io/pypi/v/python-redis-cache.svg)](https://pypi.python.org/pypi/python-redis-cache) 3 | [![license](https://img.shields.io/github/license/taylorhakes/python-redis-cache.svg)](https://github.com/taylorhakes/python-redis-cache/blob/master/LICENSE) 4 | 5 | # python-redis-cache 6 | Simple redis cache for Python functions 7 | 8 | ### Requirements 9 | - Redis 5+ 10 | - Python 3.8+ (should work in Python 3.6+, but not tested) 11 | 12 | ## How to install 13 | ``` 14 | pip install python-redis-cache 15 | ``` 16 | 17 | ## How to use 18 | ```python 19 | from redis import StrictRedis 20 | from redis_cache import RedisCache 21 | 22 | client = StrictRedis(host="redis", decode_responses=True) 23 | cache = RedisCache(redis_client=client) 24 | 25 | 26 | @cache.cache() 27 | def my_func(arg1, arg2): 28 | return some_expensive_operation() 29 | 30 | # Use the function 31 | my_func(1, 2) 32 | 33 | # Call it again with the same arguments and it will use cache 34 | my_func(1, 2) 35 | 36 | # Invalidate a single value 37 | my_func.invalidate(1, 2) 38 | 39 | # Invalidate all values for function 40 | my_func.invalidate_all() 41 | ``` 42 | 43 | ## API 44 | ```python 45 | # Create the redis cache 46 | cache = RedisCache(redis_client, prefix="rc", serializer=dumps, deserializer=loads, key_serializer=None, support_cluster=True, exception_handler=None) 47 | 48 | # Cache decorator to go on functions, see above 49 | cache.cache(ttl=..., limit=..., namespace=...) -> Callable[[Callable], Callable] 50 | 51 | # Get multiple values from the cache 52 | cache.mget([{"fn": my_func, "args": [1,2], "kwargs": {}}, ...]) -> List[Any] 53 | 54 | Redis 55 | 56 | # Cached function API 57 | 58 | # Returns a cached value, if it exists in cache. Saves value in cache if it doesn't exist 59 | cached_func(*args, *kwargs) 60 | 61 | # Invalidates a single value 62 | cached_func.invalidate(*args, **kwargs) 63 | 64 | # Invalidates all values for cached function 65 | cached_func.invalidate_all() 66 | ``` 67 | 68 | - prefix - The string to prefix the redis keys with 69 | - serializer/deserializer - functions to convert arguments and return value to a string (user JSON by default) 70 | - ttl - The time in seconds to cache the return value 71 | - namespace - The string namespace of the cache. This is useful for allowing multiple functions to use the same cache. By default its `f'{function.__module__}.{function.__file__}'` 72 | - exception_handler - Function to handle Redis cache exceptions. This allows you to fall back to calling the original function or logging exceptions. Function has the following signature `exception_handler(exception: Exception, function: Callable, args: List, kwargs: Dict) -> Any`. If using this handler, reraise the exception in the handler to stop execution of the function. All return results will be used even if `None`. If handler not defined, it will raise the exception and not call the original function. 73 | - support_cluster - Set to False to disable the `{` prefix on the keys. This is NOT recommended. See below for more info. 74 | - active - Optional flag to disable the caching completly for troubleshooting/lower environments 75 | 76 | 77 | ## Limitations and things to know 78 | Arguments and return types must be JSON serializable by default. You can override the serializer, but be careful with using Pickle. Make sure you understand the security risks. Pickle should not be used with untrusted values. 79 | https://security.stackexchange.com/questions/183966/safely-load-a-pickle-file 80 | `decode_responses` parameter must be `False` in redis client if you use pickle. 81 | 82 | - **ttl** - is based on the time from when it's first inserted in the cache, not based on the last access 83 | - **limit** - The limit will revoke keys (once it hits the limit) based on FIFO, not based on LRU 84 | 85 | ### Redis key names 86 | The key names by default are as follows: 87 | ```python 88 | from base64 import b64encode 89 | 90 | key = f"{{rc:{fn.__module__}.{fn.__qualname__}}}:{b64encode(function_args).decode('utf-8')}" 91 | ``` 92 | The cache key names start with `{`, which can be confusing, but is required for redis clusters to place the keys 93 | in the correct slots. 94 | 95 | **NOTE**: It is NOT recommended to use any of the options below. The key name generation by default handles all use cases. 96 | 97 | #### Specifying `prefix` - The string to prefix the redis keys with 98 | ```python 99 | cache = RedisCache(redis_client, prefix="custom_prefix") 100 | 101 | # Changes keys to the following 102 | key = f"{{custom_prefix:{fn.__module__}.{fn.__qualname__}}}:{b64encode(function_args).decode('utf-8')}" 103 | ``` 104 | #### Specifying `namespace` - The name of the cache function 105 | ```python 106 | cache = RedisCache(redis_client) 107 | 108 | @cache.cache(namespace="custom_func_name") 109 | def my_func(arg1, arg2): 110 | pass 111 | 112 | # Changes keys to the following 113 | key = f"{{rc:custom_func_name}}:{b64encode(function_args).decode('utf-8')}" 114 | ``` 115 | #### Specifying `key_serializer` or `serializer` - The way function arguments are serialized 116 | ```python 117 | def custom_key_serializer(fn_args): 118 | ## Do something with fn_args and return a string. For instance 119 | return my_custom_serializer(fn_args) 120 | 121 | cache = RedisCache(redis_client, key_serializer=custom_key_serializer) 122 | 123 | # Changes keys to the following 124 | key = f"{{rc:{fn.__module__}.{fn.__qualname__}}}:{b64encode(custom_serialized_args).decode('utf-8')}" 125 | ``` 126 | 127 | #### Specifying `support_cluster=False`- This will disable the `{` prefix on the keys 128 | This option is NOT recommended because this library will no longer work with redis clusters. Often times people/companies 129 | will start not using cluster mode and then will migrate to using cluster. This option will make that migration require 130 | a lot of work. If you know for sure you will never use a redis cluster, then you can enable this option. 131 | If you are unsure, don't use this option. There is not any benefit. 132 | ```python 133 | cache = RedisCache(redis_client, support_cluster=False) 134 | 135 | # Changes keys to the following 136 | key = f"rc:{fn.__module__}.{fn.__qualname__}:{b64encode(custom_serialized_args).decode('utf-8')}" 137 | ``` 138 | 139 | ### Instance/Class methods 140 | To cache instance/class methods it may require a little refactoring. This is because the `self`/`cls` cannot be 141 | serialized to JSON without custom serializers. The best way to handle caching class methods is to make a 142 | more specific static method to cache (or global function). For instance: 143 | 144 | Don't do this: 145 | ```python 146 | class MyClass: 147 | @cache.cache() 148 | def my_func(self, arg1, arg2): 149 | return self.some_arg + arg1 + arg2 150 | ``` 151 | 152 | Do this instead: 153 | ```python 154 | class MyClass: 155 | def my_func(self, arg1, arg2): 156 | return self.my_cached_method(self.some_arg, arg1, arg2) 157 | 158 | @cache.cache() 159 | @staticmethod 160 | def my_cached_method(some_arg, arg1, arg2): 161 | return some_arg + arg1 + arg2 162 | ``` 163 | 164 | If you aren't using `self`/`cls` in the method, you can use the `@staticmethod` decorator to make it a static method. 165 | If you must use `self`/`cls` in your cached method and can't use the options suggested above, you will need to create 166 | a custom JSON key serializer for the `self`/`cls` object or you can use the Pickle serializer (which isn't recommended). 167 | -------------------------------------------------------------------------------- /conftest.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/taylorhakes/python-redis-cache/e17b52b22e51de117f7b71f8fa8a578adb73e69f/conftest.py -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: '3.7' 2 | services: 3 | test: 4 | image: python:${PY_VERSION:-3.9.16} 5 | volumes: 6 | - .:/python 7 | command: ${TEST_COMMAND:-python setup.py test} 8 | stdin_open: true 9 | tty: true 10 | working_dir: /python 11 | depends_on: 12 | - redis-test-host 13 | 14 | redis-test-host: 15 | image: redis:${REDIS_VERSION:-5} 16 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = [ 3 | "setuptools>=42", 4 | "wheel" 5 | ] 6 | build-backend = "setuptools.build_meta" 7 | 8 | [tool.semantic_release] 9 | version_variables = ["setup.py:__version__"] 10 | branch = "master" 11 | -------------------------------------------------------------------------------- /redis_cache/__init__.py: -------------------------------------------------------------------------------- 1 | from functools import wraps 2 | from json import dumps, loads 3 | from base64 import b64encode 4 | from inspect import signature, Parameter 5 | 6 | def compact_dump(value): 7 | return dumps(value, separators=(',', ':'), sort_keys=True) 8 | 9 | def get_args(fn, args, kwargs): 10 | """ 11 | This function parses the args and kwargs in the context of a function and creates unified 12 | dictionary of {: }. This is useful 13 | because arguments can be passed as args or kwargs, and we want to make sure we cache 14 | them both the same. Otherwise there would be different caching for add(1, 2) and add(arg1=1, arg2=2) 15 | """ 16 | arg_sig = signature(fn) 17 | standard_args = [param.name for param in arg_sig.parameters.values() if param.kind is param.POSITIONAL_OR_KEYWORD or param.kind is param.POSITIONAL_ONLY] 18 | allowed_kwargs = {param.name for param in arg_sig.parameters.values() if param.kind is param.POSITIONAL_OR_KEYWORD or param.kind is param.KEYWORD_ONLY} 19 | variable_args = [param.name for param in arg_sig.parameters.values() if param.kind is param.VAR_POSITIONAL] 20 | variable_kwargs = [param.name for param in arg_sig.parameters.values() if param.kind is param.VAR_KEYWORD] 21 | parsed_args = {} 22 | 23 | if standard_args or variable_args: 24 | for index, arg in enumerate(args): 25 | try: 26 | parsed_args[standard_args[index]] = arg 27 | except IndexError: 28 | # then fallback to using the positional varargs name 29 | if variable_args: 30 | vargs_name = variable_args[0] 31 | if vargs_name not in parsed_args: 32 | parsed_args[vargs_name] = [] 33 | 34 | parsed_args[vargs_name].append(arg) 35 | 36 | 37 | if kwargs: 38 | for key, value in kwargs.items(): 39 | if key in allowed_kwargs: 40 | parsed_args[key] = value 41 | elif variable_kwargs: 42 | vkwargs_name = variable_kwargs[0] 43 | if vkwargs_name not in parsed_args: 44 | parsed_args[vkwargs_name] = {} 45 | parsed_args[vkwargs_name][key] = value 46 | 47 | for param in arg_sig.parameters.values(): 48 | if param.name not in parsed_args and param.default is not Parameter.empty: 49 | parsed_args[param.name] = param.default 50 | 51 | return parsed_args 52 | 53 | 54 | def get_cache_lua_fn(client): 55 | if not hasattr(client, '_lua_cache_fn'): 56 | client._lua_cache_fn = client.register_script(""" 57 | local ttl = tonumber(ARGV[2]) 58 | local value 59 | if ttl > 0 then 60 | value = redis.call('SETEX', KEYS[1], ttl, ARGV[1]) 61 | else 62 | value = redis.call('SET', KEYS[1], ARGV[1]) 63 | end 64 | local limit = tonumber(ARGV[3]) 65 | if limit > 0 then 66 | local time_parts = redis.call('TIME') 67 | -- TIME returns [seconds, microseconds] (as strings), so parse each 68 | -- and add together to get the full timestamp 69 | local time = tonumber(time_parts[1]) + (tonumber(time_parts[2]) / 1000000) 70 | redis.call('ZADD', KEYS[2], time, KEYS[1]) 71 | local count = tonumber(redis.call('ZCOUNT', KEYS[2], '-inf', '+inf')) 72 | local over = count - limit 73 | if over > 0 then 74 | local stale_keys_and_scores = redis.call('ZPOPMIN', KEYS[2], over) 75 | -- Remove the the scores and just leave the keys 76 | local stale_keys = {} 77 | for i = 1, #stale_keys_and_scores, 2 do 78 | stale_keys[#stale_keys+1] = stale_keys_and_scores[i] 79 | end 80 | redis.call('ZREM', KEYS[2], unpack(stale_keys)) 81 | redis.call('DEL', unpack(stale_keys)) 82 | end 83 | end 84 | return value 85 | """) 86 | return client._lua_cache_fn 87 | 88 | 89 | # Utility function to batch keys 90 | def chunks(iterable, n): 91 | """Yield successive n-sized chunks from iterator.""" 92 | _iterable = iter(iterable) 93 | while True: 94 | elements = [] 95 | for _ in range(n): 96 | try: 97 | elements.append(next(_iterable)) 98 | except StopIteration: 99 | break 100 | 101 | if not len(elements): 102 | break 103 | 104 | yield elements 105 | 106 | 107 | class RedisCache: 108 | def __init__(self, redis_client, prefix="rc", serializer=compact_dump, deserializer=loads, key_serializer=None, support_cluster=True, exception_handler=None, active:bool=True): 109 | self.client = redis_client 110 | self.prefix = prefix 111 | self.serializer = serializer 112 | self.deserializer = deserializer 113 | self.key_serializer = key_serializer 114 | self.exception_handler = exception_handler 115 | self.support_cluster = support_cluster 116 | self.active = active 117 | 118 | def cache(self, ttl=0, limit=0, namespace=None, exception_handler=None): 119 | return CacheDecorator( 120 | redis_client=self.client, 121 | prefix=self.prefix, 122 | serializer=self.serializer, 123 | deserializer=self.deserializer, 124 | key_serializer=self.key_serializer, 125 | ttl=ttl, 126 | limit=limit, 127 | namespace=namespace, 128 | support_cluster=self.support_cluster, 129 | exception_handler=exception_handler or self.exception_handler, 130 | active=self.active 131 | ) 132 | 133 | def mget(self, *fns_with_args): 134 | keys = [] 135 | for fn_and_args in fns_with_args: 136 | fn = fn_and_args['fn'] 137 | args = fn_and_args['args'] if 'args' in fn_and_args else [] 138 | kwargs = fn_and_args['kwargs'] if 'kwargs' in fn_and_args else {} 139 | keys.append(fn.instance.get_key(args=args, kwargs=kwargs)) 140 | 141 | results = self.client.mget(*keys) 142 | pipeline = self.client.pipeline() 143 | 144 | deserialized_results = [] 145 | needs_pipeline = False 146 | for i, result in enumerate(results): 147 | if result is None: 148 | needs_pipeline = True 149 | 150 | fn_and_args = fns_with_args[i] 151 | fn = fn_and_args['fn'] 152 | args = fn_and_args['args'] if 'args' in fn_and_args else [] 153 | kwargs = fn_and_args['kwargs'] if 'kwargs' in fn_and_args else {} 154 | result = fn.instance.original_fn(*args, **kwargs) 155 | result_serialized = self.serializer(result) 156 | get_cache_lua_fn(self.client)(keys=[keys[i], fn.instance.keys_key], args=[result_serialized, fn.instance.ttl, fn.instance.limit], client=pipeline) 157 | else: 158 | result = self.deserializer(result) 159 | deserialized_results.append(result) 160 | 161 | if needs_pipeline: 162 | pipeline.execute() 163 | return deserialized_results 164 | 165 | class CacheDecorator: 166 | def __init__(self, redis_client, prefix="rc", serializer=compact_dump, deserializer=loads, key_serializer=None, ttl=0, limit=0, namespace=None, support_cluster=True, exception_handler=None, active:bool=True): 167 | self.client = redis_client 168 | self.prefix = prefix 169 | self.serializer = serializer 170 | self.key_serializer = key_serializer 171 | self.deserializer = deserializer 172 | self.ttl = ttl 173 | self.limit = limit 174 | self.namespace = namespace 175 | self.exception_handler = exception_handler 176 | self.support_cluster = support_cluster 177 | self.keys_key = None 178 | self.original_fn = None 179 | self.active=active 180 | 181 | 182 | def get_full_prefix(self): 183 | if self.support_cluster: 184 | # Redis cluster requires keys operated in batch to be in the same key space. Redis cluster hashes the keys to 185 | # determine the key space. The braces specify which part of the key to hash (instead of the whole key). 186 | # See https://github.com/taylorhakes/python-redis-cache/issues/29 The `{prefix}:keys` and `{prefix}:args` 187 | # need to be in the same key space. 188 | return f'{{{self.prefix}:{self.namespace}}}' 189 | else: 190 | return f'{self.prefix}:{self.namespace}' 191 | 192 | def get_key(self, args, kwargs): 193 | normalized_args = get_args(self.original_fn, args, kwargs) 194 | 195 | if self.key_serializer: 196 | serialized_data = self.key_serializer(normalized_args) 197 | else: 198 | serialized_data = self.serializer(normalized_args) 199 | 200 | if isinstance(serialized_data, str): 201 | serialized_data = serialized_data.encode('utf-8') 202 | 203 | # Encode the value as base64 to avoid issues with {} and other special characters 204 | serialized_encoded_data = b64encode(serialized_data).decode('utf-8') 205 | 206 | return f'{self.get_full_prefix()}:{serialized_encoded_data}' 207 | 208 | def __call__(self, fn): 209 | self.namespace = self.namespace or f'{fn.__module__}.{fn.__qualname__}' 210 | self.keys_key = f'{self.get_full_prefix()}:keys' 211 | self.original_fn = fn 212 | 213 | @wraps(fn) 214 | def inner(*args, **kwargs): 215 | nonlocal self 216 | # Return the original function if we're not in active mode 217 | if not self.active: 218 | return fn(*args, **kwargs) 219 | key = self.get_key(args, kwargs) 220 | result = None 221 | 222 | exception_handled = False 223 | try: 224 | result = self.client.get(key) 225 | except Exception as e: 226 | if self.exception_handler: 227 | # This allows people to handle failures in cache lookups 228 | exception_handled = True 229 | parsed_result = self.exception_handler(e, self.original_fn, args, kwargs) 230 | if result: 231 | parsed_result = self.deserializer(result) 232 | elif not exception_handled: 233 | parsed_result = fn(*args, **kwargs) 234 | result_serialized = self.serializer(parsed_result) 235 | get_cache_lua_fn(self.client)(keys=[key, self.keys_key], args=[result_serialized, self.ttl, self.limit]) 236 | 237 | return parsed_result 238 | 239 | inner.invalidate = self.invalidate 240 | inner.invalidate_all = self.invalidate_all 241 | inner.get_full_prefix = self.get_full_prefix 242 | inner.instance = self 243 | return inner 244 | 245 | def invalidate(self, *args, **kwargs): 246 | key = self.get_key(args, kwargs) 247 | pipe = self.client.pipeline() 248 | pipe.delete(key) 249 | pipe.zrem(self.keys_key, key) 250 | pipe.execute() 251 | 252 | def invalidate_all(self, *args, **kwargs): 253 | chunks_gen = chunks(self.client.scan_iter(f'{self.get_full_prefix()}:*'), 500) 254 | for keys in chunks_gen: 255 | self.client.delete(*keys) 256 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [aliases] 2 | test=pytest 3 | 4 | [tool:pytest] 5 | addopts = -s -v 6 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | from setuptools import setup, find_packages 2 | 3 | # read the contents of your README file 4 | from os import path 5 | this_directory = path.abspath(path.dirname(__file__)) 6 | with open(path.join(this_directory, 'README.md'), encoding='utf-8') as f: 7 | long_description = f.read() 8 | 9 | __version__ = "4.0.2" 10 | 11 | setup( 12 | name='python-redis-cache', 13 | version=__version__, 14 | description='Basic Redis caching for functions', 15 | long_description=long_description, 16 | long_description_content_type='text/markdown', 17 | url='http://github.com/taylorhakes/python-redis-cache', 18 | author='Taylor Hakes', 19 | license='MIT', 20 | python_requires='>=3.8', 21 | packages=find_packages(), 22 | install_requires=['redis'], 23 | setup_requires=['pytest-runner==5.3.1'], 24 | tests_require=['pytest==6.2.5', 'redis==4.4.4'], 25 | ) 26 | -------------------------------------------------------------------------------- /test.sh: -------------------------------------------------------------------------------- 1 | docker-compose run --rm test 2 | -------------------------------------------------------------------------------- /tests/test_redis_cache.py: -------------------------------------------------------------------------------- 1 | import uuid 2 | import time 3 | 4 | from redis import StrictRedis 5 | from redis_cache import RedisCache, get_args 6 | from base64 import b64encode 7 | 8 | import pickle 9 | import pytest 10 | import zlib 11 | 12 | 13 | redis_host = "redis-test-host" 14 | client = StrictRedis(host=redis_host, decode_responses=True) 15 | client_no_decode = StrictRedis(host=redis_host) 16 | 17 | 18 | @pytest.fixture(scope="session", autouse=True) 19 | def clear_cache(request): 20 | client.flushall() 21 | 22 | 23 | @pytest.fixture() 24 | def cache(): 25 | return RedisCache(redis_client=client) 26 | 27 | 28 | @pytest.fixture() 29 | def inactive_cache(): 30 | return RedisCache(redis_client=client, active=False) 31 | 32 | def add_func(n1, n2): 33 | """ Add function 34 | Add n1 to n2 and return a uuid4 unique verifier 35 | 36 | Returns: 37 | tuple(int, str(uuid.uuid4)) 38 | """ 39 | return n1 + n2, str(uuid.uuid4()) 40 | 41 | 42 | def test_basic_check(cache): 43 | @cache.cache() 44 | def add_basic(arg1, arg2): 45 | return add_func(arg1, arg2) 46 | 47 | r_3_4, v_3_4 = add_basic(3, 4) 48 | r_3_4_cached, v_3_4_cached = add_basic(3, 4) 49 | # Make sure the same cache is used for kwargs 50 | r_3_4_cached_kwargs, v_3_4_cached_kwargs = add_basic(arg1=3, arg2=4) 51 | r_3_4_cached_mix, v_3_4_cached_mix = add_basic(3, arg2=4) 52 | r_5_5, v_5_5 = add_basic(5, 5) 53 | 54 | assert 7 == r_3_4 == r_3_4_cached == r_3_4_cached_kwargs == r_3_4_cached_mix \ 55 | and v_3_4 == v_3_4_cached == v_3_4_cached_kwargs == v_3_4_cached_mix 56 | assert 10 == r_5_5 and v_5_5 != r_3_4 57 | 58 | 59 | def test_ttl(cache): 60 | @cache.cache(ttl=1) 61 | def add_ttl(arg1, arg2): 62 | return add_func(arg1, arg2) 63 | 64 | r_1, v_1 = add_ttl(3, 4) 65 | r_2, v_2 = add_ttl(3, 4) 66 | time.sleep(2) 67 | 68 | r_3, v_3 = add_ttl(3, 4) 69 | 70 | assert 7 == r_1 == r_2 == r_3 71 | assert v_1 == v_2 != v_3 72 | 73 | def test_inactive_cache(inactive_cache): 74 | @inactive_cache.cache(ttl=2) 75 | def add_ttl(arg1, arg2): 76 | return add_func(arg1, arg2) 77 | 78 | r_1, v_1 = add_ttl(3, 4) 79 | r_2, v_2 = add_ttl(3, 4) 80 | 81 | assert 7 == r_1 == r_2 82 | # In inactive scenario the calls should return different uuids 83 | assert v_1 != v_2 84 | 85 | def test_limit(cache): 86 | @cache.cache(limit=2) 87 | def add_limit(arg1, arg2): 88 | return add_func(arg1, arg2) 89 | 90 | r_3_4, v_3_4 = add_limit(3, 4) 91 | # cache_queue [add_limit(3, 4)] 92 | 93 | r_5_5, v_5_5 = add_limit(5, 5) 94 | # cache_queue [add_limit(3, 4), add_limit(5, 5)] 95 | 96 | r_6_5, v_6_5 = add_limit(6, 5) # limit hitted rotating 97 | # cache_queue [add_limit(5, 5), add_limit(6, 5)] 98 | 99 | r2_3_4, v2_3_4 = add_limit(3, 4) # new cache generated 100 | # cache_queue [add_limit(6, 5), add_limit(3, 4)] 101 | 102 | # cache was rotated the first call needed to be re-executed/re-cached 103 | assert r_3_4 == r2_3_4 and v_3_4 != v2_3_4 104 | 105 | r2_6_5, v2_6_5 = add_limit(6, 5) # still cached 106 | # cache_queue [add_limit(6, 5), add_limit(3, 4)] 107 | assert r_6_5 == r2_6_5 and v_6_5 == v2_6_5 108 | 109 | r3_3_4, v3_3_4 = add_limit(3, 4) # still cached 110 | # cache_queue [add_limit(6, 5), add_limit(3, 4)] 111 | assert r2_3_4 == r3_3_4 and v2_3_4 == v3_3_4 112 | 113 | 114 | def test_invalidate_not_in_cache(cache): 115 | @cache.cache() 116 | def add_invalidate_not_in_cache(arg1, arg2): 117 | return add_func(arg1, arg2) 118 | 119 | r_3_4, v_3_4 = add_invalidate_not_in_cache(3, 4) 120 | r_4_4, v_4_4 = add_invalidate_not_in_cache(4, 4) 121 | 122 | # calling invalidate with params that was never 123 | # passed should not change the cache status 124 | add_invalidate_not_in_cache.invalidate(5, 5) 125 | 126 | r2_3_4, v2_3_4 = add_invalidate_not_in_cache(3, 4) 127 | r2_4_4, v2_4_4 = add_invalidate_not_in_cache(4, 4) 128 | 129 | assert r_3_4 == r2_3_4 and v_3_4 == v2_3_4 130 | assert r_4_4 == r2_4_4 and v_4_4 == v2_4_4 131 | 132 | 133 | def test_invalidate_in_cache(cache): 134 | @cache.cache() 135 | def add_invalidate_in_cache(arg1, arg2): 136 | return add_func(arg1, arg2) 137 | 138 | r_3_4, v_3_4 = add_invalidate_in_cache(3, 4) 139 | r_4_4, v_4_4 = add_invalidate_in_cache(4, 4) 140 | 141 | # we are invalidating 4, 4 so it should be re-executed next time 142 | add_invalidate_in_cache.invalidate(4, 4) 143 | 144 | r2_3_4, v2_3_4 = add_invalidate_in_cache(3, 4) 145 | r2_4_4, v2_4_4 = add_invalidate_in_cache(4, 4) 146 | 147 | assert r_3_4 == r2_3_4 and v_3_4 == v2_3_4 148 | # 4, 4 was invalidated a new verifier should be generated 149 | assert r_4_4 == r2_4_4 and v_4_4 != v2_4_4 150 | 151 | 152 | def test_invalidate_all(): 153 | cache = RedisCache(redis_client=client) 154 | 155 | @cache.cache() 156 | def f1_invalidate_all(arg1, arg2): 157 | return add_func(arg1, arg2) 158 | 159 | @cache.cache() 160 | def f2222_invalidate_all(arg1, arg2): 161 | return add_func(arg1, arg2) 162 | 163 | r_3_4, v_3_4 = f1_invalidate_all(3, 4) 164 | r_4_4, v_4_4 = f1_invalidate_all(4, 4) 165 | r_5_5, v_5_5 = f2222_invalidate_all(5, 5) 166 | 167 | # invalidating all caches to the function f1_invalidate_all 168 | f1_invalidate_all.invalidate_all() 169 | 170 | r2_3_4, v2_3_4 = f1_invalidate_all(3, 4) 171 | r2_4_4, v2_4_4 = f1_invalidate_all(4, 4) 172 | r2_5_5, v2_5_5 = f2222_invalidate_all(5, 5) 173 | 174 | # all caches related to f1_invalidate_all were invalidated 175 | assert r_3_4 == r2_3_4 and v_3_4 != v2_3_4 176 | assert r_4_4 == r2_4_4 and v_4_4 != v2_4_4 177 | 178 | # caches of f2222_invalidate_all should stay stored 179 | assert r_5_5 == r2_5_5 and v_5_5 == v2_5_5 180 | 181 | 182 | class Result: 183 | def __init__(self, arg1, arg2): 184 | self.sum = arg1 + arg2 185 | self.verifier = str(uuid.uuid4()) 186 | 187 | 188 | class Arg: 189 | def __init__(self, value): 190 | self.value = value 191 | 192 | 193 | def test_custom_serializer(): 194 | cache = RedisCache( 195 | redis_client=client_no_decode, 196 | serializer=pickle.dumps, 197 | deserializer=pickle.loads, 198 | ) 199 | 200 | @cache.cache() 201 | def add_custom_serializer(arg1, arg2): 202 | return Result(arg1.value, arg2.value) 203 | 204 | r1 = add_custom_serializer(Arg(2), Arg(3)) 205 | r2 = add_custom_serializer(Arg(2), Arg(3)) 206 | 207 | assert r1.sum == r2.sum and r1.verifier == r2.verifier 208 | 209 | 210 | def test_custom_serializer_with_compress(): 211 | def dumps(value): 212 | return zlib.compress(pickle.dumps(value)) 213 | 214 | def loads(value): 215 | return pickle.loads(zlib.decompress(value)) 216 | 217 | cache = RedisCache( 218 | redis_client=client_no_decode, serializer=dumps, deserializer=loads, 219 | ) 220 | 221 | @cache.cache() 222 | def add_compress_serializer(arg1, arg2): 223 | return Result(arg1.value, arg2.value) 224 | 225 | r1 = add_compress_serializer(Arg(2), Arg(3)) 226 | r2 = add_compress_serializer(Arg(2), Arg(3)) 227 | 228 | assert r1.sum == r2.sum and r1.verifier == r2.verifier 229 | 230 | def test_custom_key_serializer(): 231 | def key_serializer(args): 232 | return f'{args}' 233 | 234 | cache = RedisCache( 235 | redis_client=client_no_decode, 236 | serializer=pickle.dumps, 237 | deserializer=pickle.loads, 238 | key_serializer=key_serializer 239 | ) 240 | 241 | @cache.cache() 242 | def add_custom_key_serializer(arg1, arg2): 243 | return arg1 + arg2 244 | 245 | r1 = add_custom_key_serializer(2, 3) 246 | r2 = add_custom_key_serializer(2, 3) 247 | 248 | encoded_args = b64encode("{'arg1': 2, 'arg2': 3}".encode('utf-8')).decode('utf-8') 249 | 250 | assert r1 == r2 251 | assert client.exists(f'{{rc:test_redis_cache.test_custom_key_serializer..add_custom_key_serializer}}:{encoded_args}') 252 | 253 | def test_support_cluster_false(): 254 | 255 | cache = RedisCache( 256 | redis_client=client, 257 | support_cluster=False 258 | ) 259 | 260 | @cache.cache() 261 | def add_custom_key_serializer(arg1, arg2): 262 | return arg1 + arg2 263 | 264 | r1 = add_custom_key_serializer(2, 3) 265 | r2 = add_custom_key_serializer(2, 3) 266 | 267 | encoded_args = b64encode('{"arg1":2,"arg2":3}'.encode('utf-8')).decode('utf-8') 268 | 269 | assert r1 == r2 270 | assert client.exists(f'rc:test_redis_cache.test_support_cluster_false..add_custom_key_serializer:{encoded_args}') 271 | 272 | 273 | def test_basic_mget(cache): 274 | @cache.cache() 275 | def add_basic_get(arg1, arg2): 276 | return add_func(arg1, arg2) 277 | 278 | r_3_4, v_3_4 = cache.mget({"fn": add_basic_get, "args": (3, 4)})[0] 279 | r2_3_4, v2_3_4 = add_basic_get(3, 4) 280 | 281 | assert r_3_4 == r2_3_4 and v_3_4 == v2_3_4 282 | 283 | 284 | def test_same_name_method(cache): 285 | class A: 286 | @staticmethod 287 | @cache.cache() 288 | def static_method(): 289 | return 'A' 290 | 291 | class B: 292 | @staticmethod 293 | @cache.cache() 294 | def static_method(): 295 | return 'B' 296 | 297 | A.static_method() # Store the value in the cache 298 | B.static_method() 299 | 300 | key_a = A.static_method.instance.get_key([], {}) 301 | key_b = B.static_method.instance.get_key([], {}) 302 | 303 | # 1. Check that both keys exists 304 | assert client.exists(key_a) 305 | assert client.exists(key_b) 306 | 307 | # 2. They are different 308 | assert key_a != key_b 309 | 310 | # 3. And stored values are different 311 | assert A.static_method() != B.static_method() 312 | 313 | 314 | def test_same_name_inner_function(cache): 315 | def a(): 316 | @cache.cache() 317 | def inner_function(): 318 | return 'A' 319 | 320 | return inner_function 321 | 322 | def b(): 323 | @cache.cache() 324 | def inner_function(): 325 | return 'B' 326 | 327 | return inner_function 328 | 329 | first_func = a() 330 | second_func = b() 331 | 332 | first_func() # Store the value in the cache 333 | second_func() 334 | 335 | first_key = first_func.instance.get_key([], {}) 336 | second_key = second_func.instance.get_key([], {}) 337 | 338 | # 1. Check that both keys exists 339 | assert client.exists(first_key) 340 | assert client.exists(second_key) 341 | 342 | # 2. They are different 343 | assert first_key != second_key 344 | 345 | # 3. And stored values are different 346 | assert first_func() != second_func() 347 | 348 | 349 | def test_get_args(cache): 350 | def fn1(a, b): 351 | pass 352 | 353 | def fn2(a, b, *c): 354 | pass 355 | 356 | def fn3(*c): 357 | pass 358 | 359 | def fn4(a, *c, d, **e): 360 | pass 361 | 362 | def fn5(*, d, **e): 363 | pass 364 | 365 | def fn6(a, b, /, c, d): 366 | pass 367 | 368 | def fn7(a, b, c=3, *, d): 369 | pass 370 | 371 | assert get_args(fn1, (1,2), {}) == dict(a=1, b=2) 372 | assert get_args(fn1, [], dict(a=1, b=2)) == dict(a=1, b=2) 373 | assert get_args(fn1, [1], dict(b=2)) == dict(a=1, b=2) 374 | assert get_args(fn2, [1,2,3,4], {}) == dict(a=1, b=2, c=[3,4]) 375 | assert get_args(fn3, [1, 2, 3, 4], {}) == dict(c=[1, 2, 3, 4]) 376 | assert get_args(fn4, [1, 2, 3, 4], dict(d=5, f=6, g=7, h=8)) == dict(a=1, c=[2, 3, 4], d=5, e=dict(f=6, g=7, h=8)) 377 | assert get_args(fn5, [], dict(d=5, f=6, g=7, h=8)) == dict(d=5, e=dict(f=6, g=7, h=8)) 378 | assert get_args(fn6, [1, 2, 3], dict(d=4)) == dict(a=1, b=2, c=3, d=4) 379 | assert get_args(fn7, [1, 2], dict(d=4)) == dict(a=1, b=2, c=3, d=4) 380 | 381 | # Simulate the environment where redis is not available 382 | # Only test the CacheDecorator since the exception handling should be done inside the decorator 383 | # The exceptions of other methods, e.g. invalidate and invalidate_all, can be easily handled by using try-except outside 384 | # The uuid4 verifier is not tested under this environment 385 | 386 | def custom_exception_handler(exception, fn, args, kwargs): 387 | return fn(*args, **kwargs) 388 | 389 | @pytest.fixture() 390 | def no_redis_cache(): 391 | return RedisCache(redis_client=None, exception_handler=custom_exception_handler) 392 | 393 | 394 | def add_func_no_redis(n1, n2): 395 | """ Add function 396 | Add n1 to n2 397 | 398 | Returns: 399 | int 400 | """ 401 | return n1 + n2 402 | 403 | 404 | def test_basic_check_no_redis(no_redis_cache): 405 | @no_redis_cache.cache() 406 | def add_basic(arg1, arg2): 407 | return add_func_no_redis(arg1, arg2) 408 | 409 | r_3_4 = add_basic(3, 4) 410 | r_3_4_cached = add_basic(3, 4) 411 | # Make sure the same cache is used for kwargs 412 | r_3_4_cached_kwargs = add_basic(arg1=3, arg2=4) 413 | r_3_4_cached_mix = add_basic(3, arg2=4) 414 | r_5_5 = add_basic(5, 5) 415 | 416 | assert 7 == r_3_4 == r_3_4_cached == r_3_4_cached_kwargs == r_3_4_cached_mix 417 | assert 10 == r_5_5 != r_3_4 418 | 419 | 420 | --------------------------------------------------------------------------------