├── .github ├── ISSUE_TEMPLATE │ ├── bug-report.md │ ├── config.yml │ └── feature-request.md ├── dependabot.yml ├── pull_request_template.md └── workflows │ ├── lock.yaml │ └── tests.yml ├── .gitignore ├── .pre-commit-config.yaml ├── .readthedocs.yaml ├── CHANGES.rst ├── CONTRIBUTING.rst ├── LICENSE ├── MANIFEST.in ├── README.rst ├── docs ├── Makefile ├── _static │ └── flask-cache.png ├── _templates │ └── sidebarintro.html ├── api.rst ├── changelog.rst ├── conf.py ├── index.rst ├── license.rst └── make.bat ├── examples ├── hello.cfg └── hello.py ├── requirements ├── dev.in ├── dev.txt ├── docs.in ├── docs.txt ├── tests.in ├── tests.txt ├── typing.in └── typing.txt ├── setup.cfg ├── setup.py ├── src └── flask_caching │ ├── __init__.py │ ├── backends │ ├── __init__.py │ ├── base.py │ ├── filesystemcache.py │ ├── memcache.py │ ├── nullcache.py │ ├── rediscache.py │ ├── simplecache.py │ └── uwsgicache.py │ ├── contrib │ ├── __init__.py │ ├── googlecloudstoragecache.py │ └── uwsgicache.py │ ├── jinja2ext.py │ ├── py.typed │ └── utils.py ├── tests ├── conftest.py ├── test_backend_cache.py ├── test_basic_app.py ├── test_cache.py ├── test_init.py ├── test_memoize.py ├── test_template.html ├── test_templates.py └── test_view.py └── tox.ini /.github/ISSUE_TEMPLATE/bug-report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug report 3 | about: Report a bug in Flask-Caching (not other projects which depend on Flask-Caching) 4 | --- 5 | 6 | 12 | 13 | 19 | 20 | 23 | 24 | Environment: 25 | 26 | - Python version: 27 | - Flask-Caching version: 28 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/config.yml: -------------------------------------------------------------------------------- 1 | blank_issues_enabled: false 2 | contact_links: 3 | - name: Questions 4 | url: https://stackoverflow.com/search?tab=relevance&q=Flask-Caching 5 | about: Search for and ask questions about your code on Stack Overflow. 6 | - name: Questions and discussions 7 | url: https://discord.gg/pallets 8 | about: Discuss questions about your code on our Discord chat. 9 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature-request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Feature request 3 | about: Suggest a new feature for Flask-Caching 4 | --- 5 | 6 | 10 | 11 | 16 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: pip 4 | directory: "/requirements" 5 | schedule: 6 | interval: monthly 7 | time: "08:00" 8 | open-pull-requests-limit: 99 9 | -------------------------------------------------------------------------------- /.github/pull_request_template.md: -------------------------------------------------------------------------------- 1 | 6 | 7 | 10 | 11 | - fixes # 12 | 13 | 18 | 19 | Checklist: 20 | 21 | - [ ] Add tests that demonstrate the correct behavior of the change. Tests should fail without the change. 22 | - [ ] Add or update relevant docs, in the docs folder and in code. 23 | - [ ] Add an entry in `CHANGES.rst` summarizing the change and linking to the issue. 24 | - [ ] Add `.. versionchanged::` entries in any relevant code docs. 25 | - [ ] Run `pre-commit` hooks and fix any issues. 26 | - [ ] Run `pytest` and `tox`, no tests failed. 27 | -------------------------------------------------------------------------------- /.github/workflows/lock.yaml: -------------------------------------------------------------------------------- 1 | name: 'Lock threads' 2 | 3 | on: 4 | schedule: 5 | - cron: '0 0 * * *' 6 | 7 | jobs: 8 | lock: 9 | runs-on: ubuntu-latest 10 | steps: 11 | - uses: dessant/lock-threads@v2 12 | with: 13 | github-token: ${{ github.token }} 14 | issue-lock-inactive-days: 14 15 | pr-lock-inactive-days: 14 16 | -------------------------------------------------------------------------------- /.github/workflows/tests.yml: -------------------------------------------------------------------------------- 1 | name: Tests 2 | on: 3 | push: 4 | branches: 5 | - master 6 | - '*.x' 7 | paths-ignore: 8 | - 'docs/**' 9 | - '*.md' 10 | - '*.rst' 11 | pull_request: 12 | branches: 13 | - master 14 | - '*.x' 15 | paths-ignore: 16 | - 'docs/**' 17 | - '*.md' 18 | - '*.rst' 19 | jobs: 20 | tests: 21 | runs-on: ubuntu-latest 22 | strategy: 23 | fail-fast: false 24 | matrix: 25 | include: 26 | - {name: Linux, python: '3.9', os: ubuntu-latest, tox: py39} 27 | - {name: '3.8', python: '3.8', os: ubuntu-latest, tox: py38} 28 | - {name: '3.10', python: '3.10', os: ubuntu-latest, tox: py310} 29 | - {name: '3.11', python: '3.11', os: ubuntu-latest, tox: py311} 30 | - {name: 'PyPy', python: pypy-3.8, os: ubuntu-latest, tox: pypy38} 31 | - {name: 'mypy', python: '3.9', os: ubuntu-latest, tox: typing} 32 | steps: 33 | - uses: actions/checkout@v2 34 | - uses: actions/setup-python@v2 35 | with: 36 | python-version: ${{ matrix.python }} 37 | - name: Install APT dependencies 38 | run: | 39 | sudo apt-get update 40 | sudo apt-get install libmemcached-dev memcached redis 41 | sudo systemctl stop memcached 42 | sudo systemctl stop redis-server 43 | - name: update pip 44 | run: | 45 | pip install -U wheel 46 | pip install -U setuptools 47 | python -m pip install -U pip 48 | - name: get pip cache dir 49 | id: pip-cache 50 | run: echo "::set-output name=dir::$(pip cache dir)" 51 | - name: cache pip 52 | uses: actions/cache@v2 53 | with: 54 | path: ${{ steps.pip-cache.outputs.dir }} 55 | key: pip|${{ runner.os }}|${{ matrix.python }}|${{ hashFiles('setup.py') }}|${{ hashFiles('requirements/*.txt') }} 56 | - run: pip install tox 57 | - run: tox -e ${{ matrix.tox }} 58 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # general things to ignore 2 | build/ 3 | dist/ 4 | *.egg-info/ 5 | *.egg 6 | *.eggs 7 | *.py[cod] 8 | __pycache__/ 9 | *.so 10 | *~ 11 | venv/ 12 | env/ 13 | .DS_Store 14 | *.swp 15 | docs/_build 16 | 17 | # due to using t/nox and pytest 18 | .tox 19 | .cache 20 | .pytest_cache 21 | .coverage 22 | htmlcov/ 23 | .xprocess 24 | .vscode 25 | .idea 26 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | ci: 2 | autoupdate_schedule: monthly 3 | repos: 4 | - repo: https://github.com/asottile/pyupgrade 5 | rev: v3.15.2 6 | hooks: 7 | - id: pyupgrade 8 | args: ["--py38-plus"] 9 | - repo: https://github.com/asottile/reorder_python_imports 10 | rev: v3.12.0 11 | hooks: 12 | - id: reorder-python-imports 13 | args: ["--application-directories", "src"] 14 | - repo: https://github.com/PyCQA/flake8 15 | rev: 7.0.0 16 | hooks: 17 | - id: flake8 18 | additional_dependencies: 19 | - flake8-bugbear 20 | - flake8-implicit-str-concat 21 | - repo: https://github.com/pre-commit/pre-commit-hooks 22 | rev: v4.6.0 23 | hooks: 24 | - id: fix-byte-order-marker 25 | - id: trailing-whitespace 26 | - id: end-of-file-fixer 27 | - repo: https://github.com/psf/black 28 | rev: 24.4.2 29 | hooks: 30 | - id: black 31 | -------------------------------------------------------------------------------- /.readthedocs.yaml: -------------------------------------------------------------------------------- 1 | # .readthedocs.yaml 2 | # Read the Docs configuration file 3 | # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details 4 | 5 | # Required 6 | version: 2 7 | 8 | # Set the version of Python and other tools you might need 9 | build: 10 | os: ubuntu-22.04 11 | tools: 12 | python: "3.11" 13 | 14 | # Build documentation in the docs/ directory with Sphinx 15 | sphinx: 16 | configuration: docs/conf.py 17 | 18 | # We recommend specifying your dependencies to enable reproducible builds: 19 | # https://docs.readthedocs.io/en/stable/guides/reproducible-builds.html 20 | # python: 21 | # install: 22 | # - requirements: docs/requirements.txt 23 | -------------------------------------------------------------------------------- /CHANGES.rst: -------------------------------------------------------------------------------- 1 | Changelog 2 | ========= 3 | 4 | Version 2.3.1 5 | ------------- 6 | 7 | Released 2025-02-22 8 | 9 | - Relax cachelib version to allow latest releases 10 | 11 | 12 | 13 | Version 2.3.0 14 | ------------- 15 | 16 | Released 2024-05-04 17 | 18 | - Added ``response_hit_indication`` flag to ``Cache.cached`` decorator for appending 'hit_cache' headers to responses, indicating cache hits. 19 | 20 | 21 | Version 2.2.0 22 | ------------- 23 | 24 | - Drop python 3.7 support 25 | - python 3.11 officially supported 26 | - Fix issue causing `args_to_ignore` to not work with `flask_caching.Cache.memoize` decorator when keyword arguments were used in the decorated function call 27 | 28 | 29 | Version 2.1.0 30 | ------------- 31 | 32 | Released 2024-10-08 33 | 34 | - fix type signature in ``flask_caching.utils.make_template_fragment_key``. :pr:`430` 35 | - Added docs and example for make_cache_key 36 | - support Flask 3 37 | 38 | 39 | Version 2.0.2 40 | ------------- 41 | 42 | Released 2023-01-12 43 | 44 | - fix issue with boto3 dependencie due to latest cachelib released 45 | - migrate ``flask_caching.backends.RedisCluster`` dependency from redis-py-cluster to redis-py 46 | - bug fix: make the ``make_cache_key`` attributed of decorated view functions writeable. :pr:`431`, :issue:`97` 47 | 48 | 49 | Version 2.0.1 50 | ------------- 51 | 52 | Released 2022-07-30 53 | 54 | - Relax dependency pin to allow Flask 2.x.x 55 | 56 | 57 | Version 2.0.0 58 | ------------- 59 | 60 | Released 2022-06-26 61 | 62 | - fix bug where ``flask_caching.backends.RedisSentinelCache.get_many`` would query wrong host&port combination. :pr:`372` 63 | - Remove ``flask_caching.backends.FileSystemCache`` method overrides. It now shares 100% of ``cachelib.FileSystemCache`` API and is fully compatible. Functionality relient on implementation details of said overrides from older releases might not work anymore. :pr:`369` 64 | - Add proxy to underlaying ``has`` method of cache clients. :pr:`356` 65 | - ``flask_caching.backends.FileSystemCache`` now stores timestamps in a universal (non-frammed) way following the lastest version of ``cachelib.FileSystemCache``. The change also reduces overhead from 17 bytes (via previous method using pickle) to 4 bytes (using python's ``struct``). This, however, will break compatibily since older timestamps are serialized with a different strategy. 66 | 67 | 68 | Version 1.11.1 69 | -------------- 70 | 71 | Released 2022-05-27 72 | 73 | - Add cachelib to setup.py: :pr:`354` 74 | 75 | 76 | Version 1.11.0 77 | -------------- 78 | 79 | Released 2022-05-27 80 | 81 | - Add suport for cached/memoized generators. :pr:`286` 82 | - Add support for Flask 2.0 async. :pr:`282` 83 | - Cachelib is now used as backend. :pr:`308` 84 | - Drop support for python 3.6. :pr:`332` 85 | - Add support for dynamic cache timeouts `#296` 86 | - Fix bug in ``CACHE_OPTIONS`` reading for redis in ``RedisSentinelCache``. :pr:`343` 87 | 88 | 89 | Version 1.10.1 90 | -------------- 91 | 92 | Released 2021-03-17 93 | 94 | - A ``GoogleCloudStorageCache`` backend has been added to the user contributed 95 | caching backends. :pr:`214` 96 | - Fix a regression introduced in the last release which broke all applications 97 | subclassing the ``Cache`` class. 98 | - Add test_generic_get_bytes test case. 99 | :pr:`236` 100 | - Various improvements and fixes. 101 | 102 | 103 | Version 1.10.0 104 | -------------- 105 | 106 | Released 2021-03-04 107 | 108 | - **Important**: The way caching backends are loaded have been refactored. 109 | Instead of passing the name of the initialization function one can now use 110 | the full path to the caching backend class. 111 | For example: 112 | ``CACHE_TYPE="flask_caching.backends.SimpleCache"``. 113 | In the next major release (2.0), this will be the only supported way. 114 | - UWSGICache is not officially supported anymore and moved to the user 115 | contributed backends. 116 | - Switch from Travis-CI to GitHub Actions 117 | - Fix add() in RedisCache without a timeout. 118 | :pr:`218` 119 | - Fix error in how the FileSystemCache counts the number of files. 120 | :pr:`210` 121 | - Type Annotations have been added. 122 | :pr:`198` 123 | - Add some basic logging to SimpleCache and FileSystemCache for better 124 | observability. 125 | :pr:`203` 126 | - Add option in memoize to ignore args 127 | :pr:`201` 128 | - Stop marking wheels as Python 2 compatible. 129 | :pr:`196` 130 | - Fix ``default_timeout`` not being properly passed to its super constructor. 131 | :pr:`187` 132 | - Fix ``kwargs`` not being passed on in function ``_memoize_make_cache_key``. 133 | :pr:`184` 134 | - Add a Redis Cluster Mode caching backend. 135 | :pr:`173` 136 | - Do not let PIP install this package on unsupported Python Versions. 137 | :pr:`179` 138 | - Fix uWSGI initialization by checking if uWSGI has the 'cache2' option 139 | enabled. :pr:`176` 140 | - Documentation updates and fixes. 141 | 142 | 143 | Version 1.9.0 144 | ------------- 145 | 146 | Released 2020-06-02 147 | 148 | - Add an option to include the functions source code when generating the cache 149 | key. :pr:`156` 150 | - Add an feature that allows one to completely control the way how cache keys 151 | are generated. For example, one can now implement a function that generates a 152 | cache key the based on POST requests. 153 | :pr:`159` 154 | - Fix the cache backend naming collisions by renaming them from ``simple`` to 155 | ``simplecache``, ``null`` to ``nullcache`` and ``filesystem`` to 156 | ``filesystemcache``. 157 | - Explicitly pass the ``default_timeout`` to ``RedisCache`` from 158 | ``RedisSentinelCache``. 159 | - Use ``os.replace`` instead of werkzeug's ``rename`` due to Windows raising an 160 | ``OSError`` if the dst file already exist. 161 | - Documentation updates and fixes. 162 | 163 | 164 | Version 1.8.0 165 | ------------- 166 | 167 | Released 2019-11-24 168 | 169 | - **BREAKING:** Removed support for Python 2. Python 3.5 and upwards are 170 | supported as of now. 171 | - Add option to specify if ``None`` is a cached value or not. See 172 | :pr:`140` and 173 | `#141` 174 | - Allow to use ``__caching_id__`` rather than ``__repr__`` as an object 175 | caching key. 176 | :pr:`123` 177 | - The RedisCache backend now support generating the key_prefix via a callable. 178 | :pr:`109` 179 | - Emit a warning if the ``CACHE_TYPE`` is set to ``filesystem`` but no 180 | ``CACHE_DIR`` is set. 181 | - Fixes Google App Engine Memcache backend. 182 | See issue `#120` for 183 | more details. 184 | - Various documentation updates and fixes. 185 | 186 | 187 | Version 1.7.2 188 | ------------- 189 | 190 | Released 2019-05-28 191 | 192 | **This is the last version supporting Python 2!** 193 | 194 | - Do not run a cached/memoized function if the cached return value is None. 195 | :pr:`108` 196 | 197 | 198 | Version 1.7.1 199 | ------------- 200 | 201 | Released 2019-04-16 202 | 203 | - Fix introspecting Python 3 functions by using varkw. 204 | :pr:`101` 205 | - Remove leftover files (``uwsgi.py``) in PyPI package. See issue 206 | `#102` for more details. 207 | 208 | 209 | Version 1.7.0 210 | ------------- 211 | 212 | Released 2019-03-29 213 | 214 | - Added a feature called 'response_filter' which enables one to only 215 | cache views depending on the response code. 216 | :pr:`99` 217 | - A DeprecationWarning got turned into a TypeError. 218 | 219 | 220 | Version 1.6.0 221 | ------------- 222 | 223 | Released 2019-03-06 224 | 225 | - The ``delete_many`` function is now able to ignore any errors and continue 226 | deleting the cache. However, in order to preserve backwards compatibility, 227 | the default mode is to abort the deletion process. In order to use the new 228 | deletion mode, one has to flip the config setting ``CACHE_IGNORE_ERRORS`` to 229 | ``True``. This was and still is only relevant for the **filesystem** and 230 | **simple** cache backends. 231 | :pr:`94` 232 | - Re-added the ``gaememcached`` CACHE_TYPE for improved backwards compatibility. 233 | - Documentation improvements 234 | 235 | 236 | Version 1.5.0 237 | ------------- 238 | 239 | Released 2019-02-23 240 | 241 | - Add support for a Redis Sentinel Cluster. 242 | :pr:`90` 243 | - Parameterize the hash function so alternatives can be used. 244 | :pr:`77` 245 | - Include the deprecated ``werkzeug.contrib.cache`` module in Flask-Caching. 246 | :pr:`75` 247 | 248 | 249 | Version 1.4.0 250 | ------------- 251 | 252 | Released 2018-04-16 253 | 254 | - Fix logic for creating key for var args in memoize. 255 | :pr:`70` 256 | - Allow older Werkzeug versions by making the UWSGICache backend conditional. 257 | :pr:`55` 258 | - Some documentation improvements. 259 | :pr:`48`, 260 | `#51`, 261 | `#56`, 262 | `#67` 263 | - Some CI improvements. 264 | :pr:`49`, 265 | `#50` 266 | 267 | 268 | Version 1.3.3 269 | ------------- 270 | 271 | Released 2017-06-25 272 | 273 | - Add support for multiple query params and use md5 for consistent hashing. 274 | :pr:`43` 275 | 276 | 277 | Version 1.3.2 278 | ------------- 279 | 280 | Released 2017-06-25 281 | 282 | - Fix ``spreadsaslmemcached`` backend when using Python 3. 283 | - Fix kwargs order when memoizing a function using Python 3.6 or greater. 284 | See `#27` 285 | 286 | 287 | Version 1.3.1 288 | ------------- 289 | 290 | Released 2017-06-20 291 | 292 | - Avoid breakage for environments with Werkzeug<0.12 installed because 293 | the uwsgi backend depends on Werkzeug >=0.12. See `#38` 294 | 295 | 296 | Version 1.3.0 297 | ------------- 298 | 299 | Released 2017-06-17 300 | 301 | - Add uWSGI Caching backend (requires Werkzeug >= 0.12) 302 | - Provide a keyword `query_string` to the cached decorator in order to create 303 | the same cache key for different query string requests, 304 | so long as they have the same key/value (order does not matter). 305 | :pr:`35` 306 | - Use pytest as test suite and test runner. Additionally, the tests have 307 | been split up into multiple files instead of having one big file. 308 | 309 | 310 | Version 1.2.0 311 | ------------- 312 | 313 | Released 2017-02-02 314 | 315 | - Allows functions with kwargs to be memoized correctly. See `#18` 316 | 317 | 318 | Version 1.1.1 319 | ------------- 320 | 321 | Released 2016-12-09 322 | 323 | - Fix PyPI Package distribution. See `#15` 324 | 325 | 326 | Version 1.1.0 327 | ------------- 328 | 329 | Released 2016-12-09 330 | 331 | - Fix 'redis' backend import mechanisim. See `#14` 332 | - Made backends a module to better control which cache backends to expose 333 | and moved our custom clients into a own module inside of the backends 334 | module. See also `#14` (and partly some own changes). 335 | - Some docs and test changes. See `#8` 336 | and `#12` 337 | 338 | 339 | Version 1.0.1 340 | ------------- 341 | 342 | Released 2016-08-30 343 | 344 | - The caching wrappers like `add`, `set`, etc are now returning the wrapped 345 | result as someone would expect. See `#5` 346 | 347 | 348 | Version 1.0.0 349 | ------------- 350 | 351 | Released 2016-07-05 352 | 353 | - Changed the way of importing Flask-Cache. Instead of using the depreacted 354 | method for importing Flask Extensions (via ``flask.ext.cache``), 355 | the name of the extension, ``flask_cache`` is used. Have a look at 356 | `Flask's documentation` 357 | for more information regarding this matter. This also fixes the 358 | deprecation warning from Flask. 359 | - Lots of PEP8 and Documentation fixes. 360 | - Renamed this fork Flask-Caching (``flask_caching``) as it will now be 361 | available on PyPI for download. 362 | 363 | In addition to the above mentioned fixes, following pull requests have been 364 | merged into this fork of `Flask-Cache`: 365 | 366 | - `#90 Update documentation: route decorator before cache` 367 | - `#95 Pass the memoize parameters into unless().` 368 | - `#109 wrapped function called twice` 369 | - `#117 Moves setting the app attribute to the _set_cache method` 370 | - `#121 fix doc for delete_memoized` 371 | - `#122 Added proxy for werkzeug get_dict` 372 | - `#123 "forced_update" option to 'cache' and 'memoize' decorators` 373 | - `#124 Fix handling utf8 key args` (cherry-picked) 374 | - `#125 Fix unittest failing for redis unittest` 375 | - `#127 Improve doc for using @cached on view` 376 | - `#128 Doc for delete_memoized` 377 | - `#129 tries replacing inspect.getargspec with either signature or getfullargspec if possible` 378 | - `make_cache_key() returning incorrect key` (cherry-picked) 379 | 380 | 381 | Version 0.13 382 | ------------ 383 | 384 | Released 2014-04-21 385 | 386 | - Port to Python >= 3.3 (requiring Python 2.6/2.7 for 2.x). 387 | - Fixed bug with using per-memoize timeouts greater than the default timeout 388 | - Added better support for per-instance memoization. 389 | - Various bug fixes 390 | 391 | 392 | Version 0.12 393 | ------------ 394 | 395 | Released 2013-04-29 396 | 397 | - Changes jinja2 cache templates to use stable predictable keys. Previously 398 | the key for a cache tag included the line number of the template, which made 399 | it difficult to predict what the key would be outside of the application. 400 | - Adds config variable `CACHE_NO_NULL_WARNING` to silence warning messages 401 | when using 'null' cache as part of testing. 402 | - Adds passthrough to clear entire cache backend. 403 | 404 | 405 | Version 0.11.1 406 | -------------- 407 | 408 | Released 2013-04-7 409 | 410 | - Bugfix for using memoize on instance methods. 411 | The previous key was id(self), the new key is repr(self) 412 | 413 | 414 | Version 0.11 415 | ------------ 416 | 417 | Released 2013-03-23 418 | 419 | - Fail gracefully in production if cache backend raises an exception. 420 | - Support for redis DB number 421 | - Jinja2 templatetag cache now concats all args together into a single key 422 | instead of treating each arg as a separate key name. 423 | - Added delete memcache version hash function 424 | - Support for multiple cache objects on a single app again. 425 | - Added SpreadSASLMemcached, if a value is greater than the memcached threshold 426 | which defaults to 1MB, this splits the value across multiple keys. 427 | - Added support to use URL to connect to redis. 428 | 429 | 430 | Version 0.10.1 431 | -------------- 432 | 433 | Released 2013-01-13 434 | 435 | - Added warning message when using cache type of 'null' 436 | - Changed imports to relative instead of absolute for AppEngine compatibility 437 | 438 | 439 | Version 0.10.0 440 | -------------- 441 | 442 | Released 2013-01-05 443 | 444 | - Added `saslmemcached` backend to support Memcached behind SASL authentication. 445 | - Fixes a bug with memoize when the number of args != number of kwargs 446 | 447 | 448 | Version 0.9.2 449 | ------------- 450 | 451 | Released 2012-11-18 452 | 453 | - Bugfix with default kwargs 454 | 455 | 456 | Version 0.9.1 457 | ------------- 458 | 459 | Released 2012-11-16 460 | 461 | - Fixes broken memoized on functions that use default kwargs 462 | 463 | 464 | Version 0.9.0 465 | ------------- 466 | 467 | Released 2012-10-14 468 | 469 | - Fixes memoization to work on methods. 470 | 471 | 472 | Version 0.8.0 473 | ------------- 474 | 475 | Released 2012-09-30 476 | 477 | - Migrated to the new flask extension naming convention of flask_cache instead of flaskext.cache 478 | - Removed unnecessary dependencies in setup.py file. 479 | - Documentation updates 480 | 481 | 482 | Version 0.7.0 483 | ------------- 484 | 485 | Released 2012-08-25 486 | 487 | - Allows multiple cache objects to be instantiated with different configuration values. 488 | 489 | 490 | Version 0.6.0 491 | ------------- 492 | 493 | Released 2012-08-12 494 | 495 | - Memoization is now safer for multiple applications using the same backing store. 496 | - Removed the explicit set of NullCache if the Flask app is set testing=True 497 | - Swapped Conditional order for key_prefix 498 | 499 | 500 | Version 0.5.0 501 | ------------- 502 | 503 | Released 2012-02-03 504 | 505 | - Deleting memoized functions now properly functions in production 506 | environments where multiple instances of the application are running. 507 | - get_memoized_names and get_memoized_keys have been removed. 508 | - Added ``make_name`` to memoize, make_name is an optional callable that can be passed 509 | to memoize to modify the cache_key that gets generated. 510 | - Added ``unless`` to memoize, this is the same as the unless parameter in ``cached`` 511 | - memoization now converts all kwargs to positional arguments, this is so that 512 | when a function is called multiple ways, it would evaluate to the same cache_key 513 | 514 | 515 | Version 0.4.0 516 | ------------- 517 | 518 | Released 2011-12-11 519 | 520 | - Added attributes for uncached, make_cache_key, cache_timeout 521 | to the decorated functions. 522 | 523 | 524 | Version 0.3.4 525 | ------------- 526 | 527 | Released 2011-09-10 528 | 529 | - UTF-8 encoding of cache key 530 | - key_prefix argument of the cached decorator now supports callables. 531 | 532 | 533 | Version 0.3.3 534 | ------------- 535 | 536 | Released 2011-06-03 537 | 538 | Uses base64 for memoize caching. This fixes rare issues where the cache_key 539 | was either a tuple or larger than the caching backend would be able to 540 | support. 541 | 542 | Adds support for deleting memoized caches optionally based on function parameters. 543 | 544 | Python 2.5 compatibility, plus bugfix with string.format. 545 | 546 | Added the ability to retrieve memoized function names or cache keys. 547 | 548 | 549 | Version 0.3.2 550 | ------------- 551 | 552 | Bugfix release. Fixes a bug that would cause an exception if no 553 | ``CACHE_TYPE`` was supplied. 554 | 555 | Version 0.3.1 556 | ------------- 557 | 558 | Pypi egg fix. 559 | 560 | 561 | Version 0.3 562 | ----------- 563 | 564 | - CACHE_TYPE changed. Now one of ['null', 'simple', 'memcached', 565 | 'gaememcached', 'filesystem'], or an import string to a function that will 566 | instantiate a cache object. This allows Flask-Cache to be much more 567 | extensible and configurable. 568 | 569 | 570 | Version 0.2 571 | ----------- 572 | 573 | - CACHE_TYPE now uses an import_string. 574 | - Added CACHE_OPTIONS and CACHE_ARGS configuration values. 575 | - Added delete_memoized 576 | 577 | 578 | Version 0.1 579 | ----------- 580 | 581 | - Initial public release 582 | -------------------------------------------------------------------------------- /CONTRIBUTING.rst: -------------------------------------------------------------------------------- 1 | How to contribute to Flask-Caching 2 | ================================== 3 | 4 | Thank you for considering contributing to Flask-Caching! 5 | 6 | 7 | Support questions 8 | ----------------- 9 | 10 | Please don't use the issue tracker for this. The issue tracker is a tool 11 | to address bugs and feature requests in Flask-Caching itself. Use one of the 12 | following resources for questions about using Flask-Caching or issues with 13 | your own code: 14 | 15 | - The ``#get-help`` channel on our Discord chat: 16 | https://discord.gg/pallets 17 | - Ask on `Stack Overflow`_. Search with Google first using: 18 | ``site:stackoverflow.com Flask-Caching {search term, exception message, etc.}`` 19 | 20 | .. _Stack Overflow: https://stackoverflow.com/search?tab=relevance&q=Flask-Caching 21 | 22 | 23 | Reporting issues 24 | ---------------- 25 | 26 | Include the following information in your post: 27 | 28 | - Describe what you expected to happen. 29 | - If possible, include a `minimal reproducible example`_ to help us 30 | identify the issue. This also helps check that the issue is not with 31 | your own code. 32 | - Describe what actually happened. Include the full traceback if there 33 | was an exception. 34 | - List your Python and Flask-Caching versions. If possible, check if this 35 | issue is already fixed in the latest releases or the latest code in 36 | the repository. 37 | 38 | .. _minimal reproducible example: https://stackoverflow.com/help/minimal-reproducible-example 39 | 40 | 41 | Submitting patches 42 | ------------------ 43 | 44 | If there is not an open issue for what you want to submit, prefer 45 | opening one for discussion before working on a PR. You can work on any 46 | issue that doesn't have an open PR linked to it or a maintainer assigned 47 | to it. These show up in the sidebar. No need to ask if you can work on 48 | an issue that interests you. 49 | 50 | Include the following in your patch: 51 | 52 | - Use `Black`_ to format your code. This and other tools will run 53 | automatically if you install `pre-commit`_ using the instructions 54 | below. 55 | - Include tests if your patch adds or changes code. Make sure the test 56 | fails without your patch. 57 | - Update any relevant docs pages and docstrings. Docs pages and 58 | docstrings should be wrapped at 72 characters. 59 | - Add an entry in ``CHANGES.rst``. Use the same style as other 60 | entries. Also include ``.. versionchanged::`` inline changelogs in 61 | relevant docstrings. 62 | 63 | .. _Black: https://black.readthedocs.io 64 | .. _pre-commit: https://pre-commit.com 65 | 66 | 67 | First time setup 68 | ~~~~~~~~~~~~~~~~ 69 | 70 | - Download and install the `latest version of git`_. 71 | - Configure git with your `username`_ and `email`_. 72 | 73 | .. code-block:: text 74 | 75 | $ git config --global user.name 'your name' 76 | $ git config --global user.email 'your email' 77 | 78 | - Make sure you have a `GitHub account`_. 79 | - Fork Flask-Caching to your GitHub account by clicking the `Fork`_ button. 80 | - `Clone`_ the main repository locally. 81 | 82 | .. code-block:: text 83 | 84 | $ git clone https://github.com/pallets-eco/flask-caching 85 | $ cd flask-caching 86 | 87 | - Add your fork as a remote to push your work to. Replace 88 | ``{username}`` with your username. This names the remote "fork", the 89 | default Pallets remote is "origin". 90 | 91 | .. code-block:: text 92 | 93 | git remote add fork https://github.com/{username}/flask-caching 94 | 95 | - Create a virtualenv. 96 | 97 | .. tabs:: 98 | 99 | .. group-tab:: Linux/macOS 100 | 101 | .. code-block:: text 102 | 103 | $ python3 -m venv env 104 | $ . env/bin/activate 105 | 106 | .. group-tab:: Windows 107 | 108 | .. code-block:: text 109 | 110 | > py -3 -m venv env 111 | > env\Scripts\activate 112 | 113 | - Upgrade pip and setuptools. 114 | 115 | .. code-block:: text 116 | 117 | $ python -m pip install --upgrade pip setuptools 118 | 119 | - Install the development dependencies, then install Flask-Caching in 120 | editable mode. 121 | 122 | .. code-block:: text 123 | 124 | $ pip install -r requirements/dev.txt && pip install -e . 125 | 126 | - Install the pre-commit hooks. 127 | 128 | .. code-block:: text 129 | 130 | $ pre-commit install 131 | 132 | .. _latest version of git: https://git-scm.com/downloads 133 | .. _username: https://docs.github.com/en/github/using-git/setting-your-username-in-git 134 | .. _email: https://docs.github.com/en/github/setting-up-and-managing-your-github-user-account/setting-your-commit-email-address 135 | .. _GitHub account: https://github.com/join 136 | .. _Fork: https://github.com/pallets-eco/flask-caching/fork 137 | .. _Clone: https://docs.github.com/en/github/getting-started-with-github/fork-a-repo#step-2-create-a-local-clone-of-your-fork 138 | 139 | 140 | Start coding 141 | ~~~~~~~~~~~~ 142 | 143 | - Create a branch to identify the issue you would like to work on. If 144 | you're submitting a bug or documentation fix, branch off of the 145 | latest ".x" branch. 146 | 147 | .. code-block:: text 148 | 149 | $ git fetch origin 150 | $ git checkout -b your-branch-name origin/main 151 | 152 | If you're submitting a feature addition or change, branch off of the 153 | "main" branch. 154 | 155 | .. code-block:: text 156 | 157 | $ git fetch origin 158 | $ git checkout -b your-branch-name origin/main 159 | 160 | - Using your favorite editor, make your changes, 161 | `committing as you go`_. 162 | - Include tests that cover any code changes you make. Make sure the 163 | test fails without your patch. Run the tests as described below. 164 | - Push your commits to your fork on GitHub and 165 | `create a pull request`_. Link to the issue being addressed with 166 | ``fixes #123`` in the pull request. 167 | 168 | .. code-block:: text 169 | 170 | $ git push --set-upstream fork your-branch-name 171 | 172 | .. _committing as you go: https://dont-be-afraid-to-commit.readthedocs.io/en/latest/git/commandlinegit.html#commit-your-changes 173 | .. _create a pull request: https://docs.github.com/en/github/collaborating-with-issues-and-pull-requests/creating-a-pull-request 174 | 175 | 176 | Running the tests 177 | ~~~~~~~~~~~~~~~~~ 178 | 179 | Run the basic test suite with pytest. 180 | 181 | .. code-block:: text 182 | 183 | $ pytest 184 | 185 | This runs the tests for the current environment, which is usually 186 | sufficient. CI will run the full suite when you submit your pull 187 | request. You can run the full test suite with tox if you don't want to 188 | wait. 189 | 190 | .. code-block:: text 191 | 192 | $ tox 193 | 194 | 195 | Running test coverage 196 | ~~~~~~~~~~~~~~~~~~~~~ 197 | 198 | Generating a report of lines that do not have test coverage can indicate 199 | where to start contributing. Run ``pytest`` using ``coverage`` and 200 | generate a report. 201 | 202 | .. code-block:: text 203 | 204 | $ pip install coverage 205 | $ coverage run -m pytest 206 | $ coverage html 207 | 208 | Open ``htmlcov/index.html`` in your browser to explore the report. 209 | 210 | Read more about `coverage `__. 211 | 212 | 213 | Building the docs 214 | ~~~~~~~~~~~~~~~~~ 215 | 216 | Build the docs in the ``docs`` directory using Sphinx. 217 | 218 | .. code-block:: text 219 | 220 | $ cd docs 221 | $ make html 222 | 223 | Open ``_build/html/index.html`` in your browser to view the docs. 224 | 225 | Read more about `Sphinx `__. 226 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2010 by Thadeus Burgess. 2 | Copyright (c) 2016 by Peter Justin. 3 | 4 | Some rights reserved. 5 | 6 | Redistribution and use in source and binary forms, with or without 7 | modification, are permitted provided that the following conditions are 8 | met: 9 | 10 | * Redistributions of source code must retain the above copyright 11 | notice, this list of conditions and the following disclaimer. 12 | 13 | * Redistributions in binary form must reproduce the above 14 | copyright notice, this list of conditions and the following 15 | disclaimer in the documentation and/or other materials provided 16 | with the distribution. 17 | 18 | * The names of the contributors may not be used to endorse or 19 | promote products derived from this software without specific 20 | prior written permission. 21 | 22 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 23 | "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 24 | LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 25 | A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 26 | OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 27 | SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 28 | LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 29 | DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 30 | THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 31 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 32 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 33 | 34 | 35 | The "cache" module from werkzeug is licensed under a BSD-3 Clause license as is 36 | stated below: 37 | 38 | Copyright (c) 2017, Pallets Team 39 | 40 | All rights reserved. 41 | 42 | 43 | Redistribution and use in source and binary forms, with or without 44 | modification, are permitted provided that the following conditions are 45 | met: 46 | 47 | * Redistributions of source code must retain the above copyright notice, 48 | this list of conditions and the following disclaimer. 49 | 50 | * Redistributions in binary form must reproduce the above copyright 51 | notice, this list of conditions and the following disclaimer in the 52 | documentation and/or other materials provided with the distribution. 53 | 54 | * Neither the name of the copyright holder nor the names of its 55 | contributors may be used to endorse or promote products derived from 56 | this software without specific prior written permission. 57 | 58 | THIS SOFTWARE AND DOCUMENTATION IS PROVIDED BY THE COPYRIGHT HOLDERS AND 59 | CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, 60 | BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND 61 | FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE 62 | COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, 63 | INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT 64 | NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF 65 | USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON 66 | ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 67 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF 68 | THIS SOFTWARE AND DOCUMENTATION, EVEN IF ADVISED OF THE POSSIBILITY OF 69 | SUCH DAMAGE. 70 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include LICENSE 2 | include CONTRIBUTORS 3 | include CHANGES 4 | include README.md 5 | include tox.ini 6 | include setup.cfg 7 | include src/flask_caching/py.typed 8 | 9 | graft docs 10 | graft tests 11 | 12 | prune docs/_build 13 | 14 | global-exclude __pycache__ 15 | global-exclude *.py[co] 16 | global-exclude *.sw[a-z] 17 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | Flask-Caching 2 | ============= 3 | 4 | A fork of the `Flask-cache`_ extension which adds easy cache support to Flask. 5 | 6 | .. _Flask-cache: https://github.com/thadeusb/flask-cache 7 | 8 | 9 | Installing 10 | ---------- 11 | 12 | Install and update using `pip`_: 13 | 14 | .. code-block:: text 15 | 16 | $ pip install -U flask-caching 17 | 18 | .. _pip: https://pip.pypa.io/en/stable/getting-started/ 19 | 20 | 21 | Donate 22 | ------ 23 | 24 | The Pallets organization develops and supports Flask and the libraries 25 | it uses. In order to grow the community of contributors and users, and 26 | allow the maintainers to devote more time to the projects, `please 27 | donate today`_. 28 | 29 | .. _please donate today: https://palletsprojects.com/donate 30 | 31 | 32 | Links 33 | ----- 34 | 35 | - Documentation: https://flask-caching.readthedocs.io 36 | - Changes: https://flask-caching.readthedocs.io/en/latest/changelog.html 37 | - PyPI Releases: https://pypi.org/project/Flask-Caching/ 38 | - Source Code: https://github.com/pallets-eco/flask-caching 39 | - Issue Tracker: https://github.com/pallets-eco/flask-caching/issues 40 | - Twitter: https://twitter.com/PalletsTeam 41 | - Chat: https://discord.gg/pallets 42 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | PAPER = 8 | BUILDDIR = _build 9 | 10 | # Internal variables. 11 | PAPEROPT_a4 = -D latex_paper_size=a4 12 | PAPEROPT_letter = -D latex_paper_size=letter 13 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 14 | # the i18n builder cannot share the environment and doctrees with the others 15 | I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 16 | 17 | .PHONY: help 18 | help: 19 | @echo "Please use \`make ' where is one of" 20 | @echo " html to make standalone HTML files" 21 | @echo " dirhtml to make HTML files named index.html in directories" 22 | @echo " singlehtml to make a single large HTML file" 23 | @echo " pickle to make pickle files" 24 | @echo " json to make JSON files" 25 | @echo " htmlhelp to make HTML files and a HTML help project" 26 | @echo " qthelp to make HTML files and a qthelp project" 27 | @echo " applehelp to make an Apple Help Book" 28 | @echo " devhelp to make HTML files and a Devhelp project" 29 | @echo " epub to make an epub" 30 | @echo " epub3 to make an epub3" 31 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" 32 | @echo " latexpdf to make LaTeX files and run them through pdflatex" 33 | @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" 34 | @echo " text to make text files" 35 | @echo " man to make manual pages" 36 | @echo " texinfo to make Texinfo files" 37 | @echo " info to make Texinfo files and run them through makeinfo" 38 | @echo " gettext to make PO message catalogs" 39 | @echo " changes to make an overview of all changed/added/deprecated items" 40 | @echo " xml to make Docutils-native XML files" 41 | @echo " pseudoxml to make pseudoxml-XML files for display purposes" 42 | @echo " linkcheck to check all external links for integrity" 43 | @echo " doctest to run all doctests embedded in the documentation (if enabled)" 44 | @echo " coverage to run coverage check of the documentation (if enabled)" 45 | @echo " dummy to check syntax errors of document sources" 46 | 47 | .PHONY: clean 48 | clean: 49 | rm -rf $(BUILDDIR)/* 50 | 51 | .PHONY: html 52 | html: 53 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html 54 | @echo 55 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." 56 | 57 | .PHONY: dirhtml 58 | dirhtml: 59 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml 60 | @echo 61 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." 62 | 63 | .PHONY: singlehtml 64 | singlehtml: 65 | $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml 66 | @echo 67 | @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." 68 | 69 | .PHONY: pickle 70 | pickle: 71 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle 72 | @echo 73 | @echo "Build finished; now you can process the pickle files." 74 | 75 | .PHONY: json 76 | json: 77 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json 78 | @echo 79 | @echo "Build finished; now you can process the JSON files." 80 | 81 | .PHONY: htmlhelp 82 | htmlhelp: 83 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp 84 | @echo 85 | @echo "Build finished; now you can run HTML Help Workshop with the" \ 86 | ".hhp project file in $(BUILDDIR)/htmlhelp." 87 | 88 | .PHONY: qthelp 89 | qthelp: 90 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp 91 | @echo 92 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \ 93 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:" 94 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/Flask-Caching.qhcp" 95 | @echo "To view the help file:" 96 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/Flask-Caching.qhc" 97 | 98 | .PHONY: applehelp 99 | applehelp: 100 | $(SPHINXBUILD) -b applehelp $(ALLSPHINXOPTS) $(BUILDDIR)/applehelp 101 | @echo 102 | @echo "Build finished. The help book is in $(BUILDDIR)/applehelp." 103 | @echo "N.B. You won't be able to view it unless you put it in" \ 104 | "~/Library/Documentation/Help or install it in your application" \ 105 | "bundle." 106 | 107 | .PHONY: devhelp 108 | devhelp: 109 | $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp 110 | @echo 111 | @echo "Build finished." 112 | @echo "To view the help file:" 113 | @echo "# mkdir -p $$HOME/.local/share/devhelp/Flask-Caching" 114 | @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/Flask-Caching" 115 | @echo "# devhelp" 116 | 117 | .PHONY: epub 118 | epub: 119 | $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub 120 | @echo 121 | @echo "Build finished. The epub file is in $(BUILDDIR)/epub." 122 | 123 | .PHONY: epub3 124 | epub3: 125 | $(SPHINXBUILD) -b epub3 $(ALLSPHINXOPTS) $(BUILDDIR)/epub3 126 | @echo 127 | @echo "Build finished. The epub3 file is in $(BUILDDIR)/epub3." 128 | 129 | .PHONY: latex 130 | latex: 131 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 132 | @echo 133 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." 134 | @echo "Run \`make' in that directory to run these through (pdf)latex" \ 135 | "(use \`make latexpdf' here to do that automatically)." 136 | 137 | .PHONY: latexpdf 138 | latexpdf: 139 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 140 | @echo "Running LaTeX files through pdflatex..." 141 | $(MAKE) -C $(BUILDDIR)/latex all-pdf 142 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 143 | 144 | .PHONY: latexpdfja 145 | latexpdfja: 146 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 147 | @echo "Running LaTeX files through platex and dvipdfmx..." 148 | $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja 149 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 150 | 151 | .PHONY: text 152 | text: 153 | $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text 154 | @echo 155 | @echo "Build finished. The text files are in $(BUILDDIR)/text." 156 | 157 | .PHONY: man 158 | man: 159 | $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man 160 | @echo 161 | @echo "Build finished. The manual pages are in $(BUILDDIR)/man." 162 | 163 | .PHONY: texinfo 164 | texinfo: 165 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 166 | @echo 167 | @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." 168 | @echo "Run \`make' in that directory to run these through makeinfo" \ 169 | "(use \`make info' here to do that automatically)." 170 | 171 | .PHONY: info 172 | info: 173 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 174 | @echo "Running Texinfo files through makeinfo..." 175 | make -C $(BUILDDIR)/texinfo info 176 | @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." 177 | 178 | .PHONY: gettext 179 | gettext: 180 | $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale 181 | @echo 182 | @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." 183 | 184 | .PHONY: changes 185 | changes: 186 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes 187 | @echo 188 | @echo "The overview file is in $(BUILDDIR)/changes." 189 | 190 | .PHONY: linkcheck 191 | linkcheck: 192 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck 193 | @echo 194 | @echo "Link check complete; look for any errors in the above output " \ 195 | "or in $(BUILDDIR)/linkcheck/output.txt." 196 | 197 | .PHONY: doctest 198 | doctest: 199 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest 200 | @echo "Testing of doctests in the sources finished, look at the " \ 201 | "results in $(BUILDDIR)/doctest/output.txt." 202 | 203 | .PHONY: coverage 204 | coverage: 205 | $(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage 206 | @echo "Testing of coverage in the sources finished, look at the " \ 207 | "results in $(BUILDDIR)/coverage/python.txt." 208 | 209 | .PHONY: xml 210 | xml: 211 | $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml 212 | @echo 213 | @echo "Build finished. The XML files are in $(BUILDDIR)/xml." 214 | 215 | .PHONY: pseudoxml 216 | pseudoxml: 217 | $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml 218 | @echo 219 | @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." 220 | 221 | .PHONY: dummy 222 | dummy: 223 | $(SPHINXBUILD) -b dummy $(ALLSPHINXOPTS) $(BUILDDIR)/dummy 224 | @echo 225 | @echo "Build finished. Dummy builder generates no files." 226 | -------------------------------------------------------------------------------- /docs/_static/flask-cache.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pallets-eco/flask-caching/e59bc040cd47cd2b43e501d636d43d442c50b3ff/docs/_static/flask-cache.png -------------------------------------------------------------------------------- /docs/_templates/sidebarintro.html: -------------------------------------------------------------------------------- 1 |

Useful Links

2 | 6 | -------------------------------------------------------------------------------- /docs/api.rst: -------------------------------------------------------------------------------- 1 | API 2 | === 3 | 4 | This section contains the API documentation of the Flask-Caching extension and 5 | lists the backends which are supported out of the box via cachelib. 6 | The `Configuration `_ section explains 7 | how the backends can be used. 8 | 9 | 10 | .. module:: flask_caching 11 | 12 | 13 | Cache API 14 | --------- 15 | 16 | .. autoclass:: Cache 17 | :members: init_app, get, set, add, delete, get_many, set_many, delete_many, 18 | has, clear, cached, memoize, delete_memoized, delete_memoized_verhash 19 | 20 | 21 | Backends 22 | -------- 23 | 24 | .. versionchanged:: 1.11.0 25 | flask-caching now uses cachelib as backend. See `cachelib API`_ for further details. 26 | 27 | 28 | .. _cachelib API: https://cachelib.readthedocs.io/en/stable/ 29 | -------------------------------------------------------------------------------- /docs/changelog.rst: -------------------------------------------------------------------------------- 1 | .. include:: ../CHANGES.rst 2 | -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | # 2 | # Flask-Caching documentation build configuration file, created by 3 | # sphinx-quickstart on Mon Jul 4 22:58:53 2016. 4 | # 5 | # This file is execfile()d with the current directory set to its 6 | # containing dir. 7 | # 8 | # Note that not all possible configuration values are present in this 9 | # autogenerated file. 10 | # 11 | # All configuration values have a default; values that are commented out 12 | # serve to show the default. 13 | # If extensions (or modules to document with autodoc) are in another directory, 14 | # add these directories to sys.path here. If the directory is relative to the 15 | # documentation root, use os.path.abspath to make it absolute, like shown here. 16 | # 17 | # import os 18 | # import sys 19 | import alabaster 20 | 21 | # sys.path.insert(0, os.path.abspath('.')) 22 | 23 | # -- General configuration ------------------------------------------------ 24 | 25 | # If your documentation needs a minimal Sphinx version, state it here. 26 | # 27 | # needs_sphinx = '1.0' 28 | 29 | # Add any Sphinx extension module names here, as strings. They can be 30 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 31 | # ones. 32 | extensions = [ 33 | "sphinx.ext.autodoc", 34 | "sphinx.ext.intersphinx", 35 | ] 36 | 37 | # Add any paths that contain templates here, relative to this directory. 38 | templates_path = ["_templates"] 39 | 40 | # The suffix(es) of source filenames. 41 | # You can specify multiple suffix as a list of string: 42 | # 43 | # source_suffix = ['.rst', '.md'] 44 | source_suffix = ".rst" 45 | 46 | # The encoding of source files. 47 | # 48 | # source_encoding = 'utf-8-sig' 49 | 50 | # The master toctree document. 51 | master_doc = "index" 52 | 53 | autodoc_member_order = "bysource" 54 | 55 | # General information about the project. 56 | project = "Flask-Caching" 57 | copyright = "2016, Thadeus Burgess, Peter Justin" 58 | author = "Thadeus Burgess, Peter Justin" 59 | 60 | # The version info for the project you're documenting, acts as replacement for 61 | # |version| and |release|, also used in various other places throughout the 62 | # built documents. 63 | # 64 | # The short X.Y version. 65 | version = "1.0.0" 66 | # The full version, including alpha/beta/rc tags. 67 | release = "1.0.0" 68 | 69 | # The language for content autogenerated by Sphinx. Refer to documentation 70 | # for a list of supported languages. 71 | # 72 | # This is also used if you do content translation via gettext catalogs. 73 | # Usually you set "language" from the command line for these cases. 74 | language = "en" 75 | 76 | # There are two options for replacing |today|: either, you set today to some 77 | # non-false value, then it is used: 78 | # 79 | # today = '' 80 | # 81 | # Else, today_fmt is used as the format for a strftime call. 82 | # 83 | # today_fmt = '%B %d, %Y' 84 | 85 | # List of patterns, relative to source directory, that match files and 86 | # directories to ignore when looking for source files. 87 | # This patterns also effect to html_static_path and html_extra_path 88 | exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"] 89 | 90 | # The reST default role (used for this markup: `text`) to use for all 91 | # documents. 92 | # 93 | # default_role = None 94 | 95 | # If true, '()' will be appended to :func: etc. cross-reference text. 96 | # 97 | # add_function_parentheses = True 98 | 99 | # If true, the current module name will be prepended to all description 100 | # unit titles (such as .. function::). 101 | # 102 | # add_module_names = True 103 | 104 | # If true, sectionauthor and moduleauthor directives will be shown in the 105 | # output. They are ignored by default. 106 | # 107 | # show_authors = False 108 | 109 | # The name of the Pygments (syntax highlighting) style to use. 110 | pygments_style = "sphinx" 111 | 112 | # A list of ignored prefixes for module index sorting. 113 | # modindex_common_prefix = [] 114 | 115 | # If true, keep warnings as "system message" paragraphs in the built documents. 116 | # keep_warnings = False 117 | 118 | # If true, `todo` and `todoList` produce output, else they produce nothing. 119 | todo_include_todos = False 120 | 121 | 122 | # -- Options for HTML output ---------------------------------------------- 123 | 124 | # The theme to use for HTML and HTML Help pages. See the documentation for 125 | # a list of builtin themes. 126 | # 127 | html_theme = "alabaster" 128 | 129 | # Theme options are theme-specific and customize the look and feel of a theme 130 | # further. For a list of options available for each theme, see the 131 | # documentation. 132 | # 133 | html_theme_options = { 134 | "description": "Adds caching support to your Flask application.", 135 | "logo": "flask-cache.png", 136 | "github_button": False, 137 | "github_banner": True, 138 | "github_user": "sh4nks", 139 | "github_repo": "flask-caching", 140 | "extra_nav_links": { 141 | "Flask-Caching @ PyPI": "https://pypi.python.org/pypi/Flask-Caching", 142 | "Flask-Caching @ GitHub": "https://github.com/sh4nks/Flask-Caching", 143 | }, 144 | } 145 | # Add any paths that contain custom themes here, relative to this directory. 146 | html_theme_path = [alabaster.get_path()] 147 | 148 | # The name for this set of Sphinx documents. 149 | # " v documentation" by default. 150 | # 151 | # html_title = u'Flask-Caching v1.0.0' 152 | 153 | # A shorter title for the navigation bar. Default is the same as html_title. 154 | # 155 | # html_short_title = None 156 | 157 | # The name of an image file (relative to this directory) to place at the top 158 | # of the sidebar. 159 | # 160 | # html_logo = None 161 | 162 | # The name of an image file (relative to this directory) to use as a favicon of 163 | # the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 164 | # pixels large. 165 | # 166 | # html_favicon = None 167 | 168 | # Add any paths that contain custom static files (such as style sheets) here, 169 | # relative to this directory. They are copied after the builtin static files, 170 | # so a file named "default.css" will overwrite the builtin "default.css". 171 | html_static_path = ["_static"] 172 | 173 | # Add any extra paths that contain custom files (such as robots.txt or 174 | # .htaccess) here, relative to this directory. These files are copied 175 | # directly to the root of the documentation. 176 | # 177 | # html_extra_path = [] 178 | 179 | # If not None, a 'Last updated on:' timestamp is inserted at every page 180 | # bottom, using the given strftime format. 181 | # The empty string is equivalent to '%b %d, %Y'. 182 | # 183 | # html_last_updated_fmt = None 184 | 185 | # If true, SmartyPants will be used to convert quotes and dashes to 186 | # typographically correct entities. 187 | # 188 | # html_use_smartypants = True 189 | 190 | # Custom sidebar templates, maps document names to template names. 191 | # 192 | # Custom sidebar templates, maps document names to template names. 193 | html_sidebars = { 194 | "**": [ 195 | "about.html", 196 | "localtoc.html", 197 | "sidebarintro.html", 198 | "relations.html", 199 | "searchbox.html", 200 | ] 201 | } 202 | 203 | # Additional templates that should be rendered to pages, maps page names to 204 | # template names. 205 | # 206 | # html_additional_pages = {} 207 | 208 | # If false, no module index is generated. 209 | # 210 | # html_domain_indices = True 211 | 212 | # If false, no index is generated. 213 | # 214 | # html_use_index = True 215 | 216 | # If true, the index is split into individual pages for each letter. 217 | # 218 | # html_split_index = False 219 | 220 | # If true, links to the reST sources are added to the pages. 221 | # 222 | # html_show_sourcelink = True 223 | 224 | # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. 225 | # 226 | # html_show_sphinx = True 227 | 228 | # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. 229 | # 230 | # html_show_copyright = True 231 | 232 | # If true, an OpenSearch description file will be output, and all pages will 233 | # contain a tag referring to it. The value of this option must be the 234 | # base URL from which the finished HTML is served. 235 | # 236 | # html_use_opensearch = '' 237 | 238 | # This is the file name suffix for HTML files (e.g. ".xhtml"). 239 | # html_file_suffix = None 240 | 241 | # Language to be used for generating the HTML full-text search index. 242 | # Sphinx supports the following languages: 243 | # 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' 244 | # 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr', 'zh' 245 | # 246 | # html_search_language = 'en' 247 | 248 | # A dictionary with options for the search language support, empty by default. 249 | # 'ja' uses this config value. 250 | # 'zh' user can custom change `jieba` dictionary path. 251 | # 252 | # html_search_options = {'type': 'default'} 253 | 254 | # The name of a javascript file (relative to the configuration directory) that 255 | # implements a search results scorer. If empty, the default will be used. 256 | # 257 | # html_search_scorer = 'scorer.js' 258 | 259 | # Output file base name for HTML help builder. 260 | htmlhelp_basename = "Flask-Cachingdoc" 261 | 262 | # -- Options for LaTeX output --------------------------------------------- 263 | 264 | latex_elements = { 265 | # The paper size ('letterpaper' or 'a4paper'). 266 | # 267 | # 'papersize': 'letterpaper', 268 | # The font size ('10pt', '11pt' or '12pt'). 269 | # 270 | # 'pointsize': '10pt', 271 | # Additional stuff for the LaTeX preamble. 272 | # 273 | # 'preamble': '', 274 | # Latex figure (float) alignment 275 | # 276 | # 'figure_align': 'htbp', 277 | } 278 | 279 | # Grouping the document tree into LaTeX files. List of tuples 280 | # (source start file, target name, title, 281 | # author, documentclass [howto, manual, or own class]). 282 | latex_documents = [ 283 | ( 284 | master_doc, 285 | "Flask-Caching.tex", 286 | "Flask-Caching Documentation", 287 | "Thadeus Burgess, Peter Justin", 288 | "manual", 289 | ), 290 | ] 291 | 292 | # The name of an image file (relative to this directory) to place at the top of 293 | # the title page. 294 | # 295 | # latex_logo = None 296 | 297 | # For "manual" documents, if this is true, then toplevel headings are parts, 298 | # not chapters. 299 | # 300 | # latex_use_parts = False 301 | 302 | # If true, show page references after internal links. 303 | # 304 | # latex_show_pagerefs = False 305 | 306 | # If true, show URL addresses after external links. 307 | # 308 | # latex_show_urls = False 309 | 310 | # Documents to append as an appendix to all manuals. 311 | # 312 | # latex_appendices = [] 313 | 314 | # If false, no module index is generated. 315 | # 316 | # latex_domain_indices = True 317 | 318 | 319 | # -- Options for manual page output --------------------------------------- 320 | 321 | # One entry per manual page. List of tuples 322 | # (source start file, name, description, authors, manual section). 323 | man_pages = [(master_doc, "flask-caching", "Flask-Caching Documentation", [author], 1)] 324 | 325 | # If true, show URL addresses after external links. 326 | # 327 | # man_show_urls = False 328 | 329 | 330 | # -- Options for Texinfo output ------------------------------------------- 331 | 332 | # Grouping the document tree into Texinfo files. List of tuples 333 | # (source start file, target name, title, author, 334 | # dir menu entry, description, category) 335 | texinfo_documents = [ 336 | ( 337 | master_doc, 338 | "Flask-Caching", 339 | "Flask-Caching Documentation", 340 | author, 341 | "Flask-Caching", 342 | "One line description of project.", 343 | "Miscellaneous", 344 | ), 345 | ] 346 | 347 | # Documents to append as an appendix to all manuals. 348 | # 349 | # texinfo_appendices = [] 350 | 351 | # If false, no module index is generated. 352 | # 353 | # texinfo_domain_indices = True 354 | 355 | # How to display URL addresses: 'footnote', 'no', or 'inline'. 356 | # 357 | # texinfo_show_urls = 'footnote' 358 | 359 | # If true, do not generate a @detailmenu in the "Top" node's menu. 360 | # 361 | # texinfo_no_detailmenu = False 362 | 363 | 364 | # Example configuration for intersphinx: refer to the Python standard library. 365 | intersphinx_mapping = {"https://docs.python.org/3/": None} 366 | -------------------------------------------------------------------------------- /docs/license.rst: -------------------------------------------------------------------------------- 1 | License 2 | ======= 3 | 4 | .. literalinclude:: ../LICENSE 5 | :language: text 6 | -------------------------------------------------------------------------------- /docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | REM Command file for Sphinx documentation 4 | 5 | if "%SPHINXBUILD%" == "" ( 6 | set SPHINXBUILD=sphinx-build 7 | ) 8 | set BUILDDIR=_build 9 | set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% . 10 | set I18NSPHINXOPTS=%SPHINXOPTS% . 11 | if NOT "%PAPER%" == "" ( 12 | set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% 13 | set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS% 14 | ) 15 | 16 | if "%1" == "" goto help 17 | 18 | if "%1" == "help" ( 19 | :help 20 | echo.Please use `make ^` where ^ is one of 21 | echo. html to make standalone HTML files 22 | echo. dirhtml to make HTML files named index.html in directories 23 | echo. singlehtml to make a single large HTML file 24 | echo. pickle to make pickle files 25 | echo. json to make JSON files 26 | echo. htmlhelp to make HTML files and a HTML help project 27 | echo. qthelp to make HTML files and a qthelp project 28 | echo. devhelp to make HTML files and a Devhelp project 29 | echo. epub to make an epub 30 | echo. epub3 to make an epub3 31 | echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter 32 | echo. text to make text files 33 | echo. man to make manual pages 34 | echo. texinfo to make Texinfo files 35 | echo. gettext to make PO message catalogs 36 | echo. changes to make an overview over all changed/added/deprecated items 37 | echo. xml to make Docutils-native XML files 38 | echo. pseudoxml to make pseudoxml-XML files for display purposes 39 | echo. linkcheck to check all external links for integrity 40 | echo. doctest to run all doctests embedded in the documentation if enabled 41 | echo. coverage to run coverage check of the documentation if enabled 42 | echo. dummy to check syntax errors of document sources 43 | goto end 44 | ) 45 | 46 | if "%1" == "clean" ( 47 | for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i 48 | del /q /s %BUILDDIR%\* 49 | goto end 50 | ) 51 | 52 | 53 | REM Check if sphinx-build is available and fallback to Python version if any 54 | %SPHINXBUILD% 1>NUL 2>NUL 55 | if errorlevel 9009 goto sphinx_python 56 | goto sphinx_ok 57 | 58 | :sphinx_python 59 | 60 | set SPHINXBUILD=python -m sphinx.__init__ 61 | %SPHINXBUILD% 2> nul 62 | if errorlevel 9009 ( 63 | echo. 64 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 65 | echo.installed, then set the SPHINXBUILD environment variable to point 66 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 67 | echo.may add the Sphinx directory to PATH. 68 | echo. 69 | echo.If you don't have Sphinx installed, grab it from 70 | echo.http://sphinx-doc.org/ 71 | exit /b 1 72 | ) 73 | 74 | :sphinx_ok 75 | 76 | 77 | if "%1" == "html" ( 78 | %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html 79 | if errorlevel 1 exit /b 1 80 | echo. 81 | echo.Build finished. The HTML pages are in %BUILDDIR%/html. 82 | goto end 83 | ) 84 | 85 | if "%1" == "dirhtml" ( 86 | %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml 87 | if errorlevel 1 exit /b 1 88 | echo. 89 | echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. 90 | goto end 91 | ) 92 | 93 | if "%1" == "singlehtml" ( 94 | %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml 95 | if errorlevel 1 exit /b 1 96 | echo. 97 | echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. 98 | goto end 99 | ) 100 | 101 | if "%1" == "pickle" ( 102 | %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle 103 | if errorlevel 1 exit /b 1 104 | echo. 105 | echo.Build finished; now you can process the pickle files. 106 | goto end 107 | ) 108 | 109 | if "%1" == "json" ( 110 | %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json 111 | if errorlevel 1 exit /b 1 112 | echo. 113 | echo.Build finished; now you can process the JSON files. 114 | goto end 115 | ) 116 | 117 | if "%1" == "htmlhelp" ( 118 | %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp 119 | if errorlevel 1 exit /b 1 120 | echo. 121 | echo.Build finished; now you can run HTML Help Workshop with the ^ 122 | .hhp project file in %BUILDDIR%/htmlhelp. 123 | goto end 124 | ) 125 | 126 | if "%1" == "qthelp" ( 127 | %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp 128 | if errorlevel 1 exit /b 1 129 | echo. 130 | echo.Build finished; now you can run "qcollectiongenerator" with the ^ 131 | .qhcp project file in %BUILDDIR%/qthelp, like this: 132 | echo.^> qcollectiongenerator %BUILDDIR%\qthelp\Flask-Caching.qhcp 133 | echo.To view the help file: 134 | echo.^> assistant -collectionFile %BUILDDIR%\qthelp\Flask-Caching.ghc 135 | goto end 136 | ) 137 | 138 | if "%1" == "devhelp" ( 139 | %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp 140 | if errorlevel 1 exit /b 1 141 | echo. 142 | echo.Build finished. 143 | goto end 144 | ) 145 | 146 | if "%1" == "epub" ( 147 | %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub 148 | if errorlevel 1 exit /b 1 149 | echo. 150 | echo.Build finished. The epub file is in %BUILDDIR%/epub. 151 | goto end 152 | ) 153 | 154 | if "%1" == "epub3" ( 155 | %SPHINXBUILD% -b epub3 %ALLSPHINXOPTS% %BUILDDIR%/epub3 156 | if errorlevel 1 exit /b 1 157 | echo. 158 | echo.Build finished. The epub3 file is in %BUILDDIR%/epub3. 159 | goto end 160 | ) 161 | 162 | if "%1" == "latex" ( 163 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 164 | if errorlevel 1 exit /b 1 165 | echo. 166 | echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. 167 | goto end 168 | ) 169 | 170 | if "%1" == "latexpdf" ( 171 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 172 | cd %BUILDDIR%/latex 173 | make all-pdf 174 | cd %~dp0 175 | echo. 176 | echo.Build finished; the PDF files are in %BUILDDIR%/latex. 177 | goto end 178 | ) 179 | 180 | if "%1" == "latexpdfja" ( 181 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 182 | cd %BUILDDIR%/latex 183 | make all-pdf-ja 184 | cd %~dp0 185 | echo. 186 | echo.Build finished; the PDF files are in %BUILDDIR%/latex. 187 | goto end 188 | ) 189 | 190 | if "%1" == "text" ( 191 | %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text 192 | if errorlevel 1 exit /b 1 193 | echo. 194 | echo.Build finished. The text files are in %BUILDDIR%/text. 195 | goto end 196 | ) 197 | 198 | if "%1" == "man" ( 199 | %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man 200 | if errorlevel 1 exit /b 1 201 | echo. 202 | echo.Build finished. The manual pages are in %BUILDDIR%/man. 203 | goto end 204 | ) 205 | 206 | if "%1" == "texinfo" ( 207 | %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo 208 | if errorlevel 1 exit /b 1 209 | echo. 210 | echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo. 211 | goto end 212 | ) 213 | 214 | if "%1" == "gettext" ( 215 | %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale 216 | if errorlevel 1 exit /b 1 217 | echo. 218 | echo.Build finished. The message catalogs are in %BUILDDIR%/locale. 219 | goto end 220 | ) 221 | 222 | if "%1" == "changes" ( 223 | %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes 224 | if errorlevel 1 exit /b 1 225 | echo. 226 | echo.The overview file is in %BUILDDIR%/changes. 227 | goto end 228 | ) 229 | 230 | if "%1" == "linkcheck" ( 231 | %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck 232 | if errorlevel 1 exit /b 1 233 | echo. 234 | echo.Link check complete; look for any errors in the above output ^ 235 | or in %BUILDDIR%/linkcheck/output.txt. 236 | goto end 237 | ) 238 | 239 | if "%1" == "doctest" ( 240 | %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest 241 | if errorlevel 1 exit /b 1 242 | echo. 243 | echo.Testing of doctests in the sources finished, look at the ^ 244 | results in %BUILDDIR%/doctest/output.txt. 245 | goto end 246 | ) 247 | 248 | if "%1" == "coverage" ( 249 | %SPHINXBUILD% -b coverage %ALLSPHINXOPTS% %BUILDDIR%/coverage 250 | if errorlevel 1 exit /b 1 251 | echo. 252 | echo.Testing of coverage in the sources finished, look at the ^ 253 | results in %BUILDDIR%/coverage/python.txt. 254 | goto end 255 | ) 256 | 257 | if "%1" == "xml" ( 258 | %SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml 259 | if errorlevel 1 exit /b 1 260 | echo. 261 | echo.Build finished. The XML files are in %BUILDDIR%/xml. 262 | goto end 263 | ) 264 | 265 | if "%1" == "pseudoxml" ( 266 | %SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml 267 | if errorlevel 1 exit /b 1 268 | echo. 269 | echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml. 270 | goto end 271 | ) 272 | 273 | if "%1" == "dummy" ( 274 | %SPHINXBUILD% -b dummy %ALLSPHINXOPTS% %BUILDDIR%/dummy 275 | if errorlevel 1 exit /b 1 276 | echo. 277 | echo.Build finished. Dummy builder generates no files. 278 | goto end 279 | ) 280 | 281 | :end 282 | -------------------------------------------------------------------------------- /examples/hello.cfg: -------------------------------------------------------------------------------- 1 | 2 | SECRET_KEY = '\xfb\x12\xdf\xa1@i\xd6>V\xc0\xbb\x8fp\x16#Z\x0b\x81\xeb\x16' 3 | DEBUG = True 4 | CACHE_TYPE = 'SimpleCache' 5 | -------------------------------------------------------------------------------- /examples/hello.py: -------------------------------------------------------------------------------- 1 | import random 2 | from datetime import datetime 3 | 4 | from flask import Flask 5 | from flask import jsonify 6 | from flask import render_template_string 7 | 8 | from flask_caching import Cache 9 | 10 | app = Flask(__name__) 11 | app.config.from_pyfile("hello.cfg") 12 | cache = Cache(app) 13 | 14 | 15 | #: This is an example of a cached view 16 | @app.route("/api/now") 17 | @cache.cached(50) 18 | def current_time(): 19 | return str(datetime.now()) 20 | 21 | 22 | #: This is an example of a cached function 23 | @cache.cached(key_prefix="binary") 24 | def random_binary(): 25 | return [random.randrange(0, 2) for i in range(500)] 26 | 27 | 28 | @app.route("/api/get/binary") 29 | def get_binary(): 30 | return jsonify({"data": random_binary()}) 31 | 32 | 33 | #: This is an example of a memoized function 34 | @cache.memoize(60) 35 | def _add(a, b): 36 | return a + b + random.randrange(0, 1000) 37 | 38 | 39 | @cache.memoize(60) 40 | def _sub(a, b): 41 | return a - b - random.randrange(0, 1000) 42 | 43 | 44 | @app.route("/api/add//") 45 | def add(a, b): 46 | return str(_add(a, b)) 47 | 48 | 49 | @app.route("/api/sub//") 50 | def sub(a, b): 51 | return str(_sub(a, b)) 52 | 53 | 54 | @app.route("/api/cache/delete") 55 | def delete_cache(): 56 | cache.delete_memoized("_add", "_sub") 57 | return "OK" 58 | 59 | 60 | @app.route("/html") 61 | @app.route("/html/") 62 | def html(foo=None): 63 | if foo is not None: 64 | cache.set("foo", foo) 65 | return render_template_string( 66 | "foo cache: {{foo}}", foo=cache.get("foo") 67 | ) 68 | 69 | 70 | @app.route("/template") 71 | def template(): 72 | return render_template_string( 73 | """foo cache: 74 | {% cache 60, "random" %} 75 | {{ range(1, 42) | random }} 76 | {% endcache %} 77 | """ 78 | ) 79 | 80 | 81 | if __name__ == "__main__": 82 | app.run() 83 | -------------------------------------------------------------------------------- /requirements/dev.in: -------------------------------------------------------------------------------- 1 | -r tests.in 2 | -r docs.in 3 | pip-tools 4 | pre-commit 5 | tox 6 | -------------------------------------------------------------------------------- /requirements/dev.txt: -------------------------------------------------------------------------------- 1 | # 2 | # This file is autogenerated by pip-compile with Python 3.11 3 | # by the following command: 4 | # 5 | # pip-compile dev.in 6 | # 7 | alabaster==0.7.16 8 | # via sphinx 9 | asgiref==3.8.1 10 | # via -r tests.in 11 | babel==2.15.0 12 | # via sphinx 13 | blinker==1.8.2 14 | # via flask 15 | build==1.2.1 16 | # via pip-tools 17 | cachelib==0.13.0 18 | # via -r tests.in 19 | cachetools==5.3.3 20 | # via tox 21 | certifi==2024.7.4 22 | # via requests 23 | cfgv==3.4.0 24 | # via pre-commit 25 | chardet==5.2.0 26 | # via tox 27 | charset-normalizer==3.3.2 28 | # via requests 29 | click==8.1.7 30 | # via 31 | # flask 32 | # pip-tools 33 | colorama==0.4.6 34 | # via tox 35 | distlib==0.3.8 36 | # via virtualenv 37 | docutils==0.21.2 38 | # via 39 | # sphinx 40 | # sphinx-tabs 41 | filelock==3.15.4 42 | # via 43 | # tox 44 | # virtualenv 45 | flask==3.0.3 46 | # via -r tests.in 47 | identify==2.5.36 48 | # via pre-commit 49 | idna==3.7 50 | # via requests 51 | imagesize==1.4.1 52 | # via sphinx 53 | iniconfig==2.0.0 54 | # via pytest 55 | itsdangerous==2.2.0 56 | # via flask 57 | jinja2==3.1.4 58 | # via 59 | # flask 60 | # sphinx 61 | markupsafe==2.1.5 62 | # via 63 | # jinja2 64 | # werkzeug 65 | nodeenv==1.8.0 66 | # via pre-commit 67 | packaging==24.1 68 | # via 69 | # build 70 | # pallets-sphinx-themes 71 | # pyproject-api 72 | # pytest 73 | # sphinx 74 | # tox 75 | pallets-sphinx-themes==2.1.3 76 | # via -r docs.in 77 | pip-tools==7.4.1 78 | # via -r dev.in 79 | platformdirs==4.2.2 80 | # via 81 | # tox 82 | # virtualenv 83 | pluggy==1.5.0 84 | # via 85 | # pytest 86 | # tox 87 | pre-commit==3.8.0 88 | # via -r dev.in 89 | psutil==5.9.8 90 | # via pytest-xprocess 91 | pygments==2.18.0 92 | # via 93 | # sphinx 94 | # sphinx-tabs 95 | pylibmc==1.6.3 96 | # via -r tests.in 97 | pyproject-api==1.7.1 98 | # via tox 99 | pyproject-hooks==1.1.0 100 | # via 101 | # build 102 | # pip-tools 103 | pytest==8.3.2 104 | # via 105 | # -r tests.in 106 | # pytest-asyncio 107 | # pytest-xprocess 108 | pytest-asyncio==0.23.8 109 | # via -r tests.in 110 | pytest-xprocess==1.0.2 111 | # via -r tests.in 112 | pyyaml==6.0.1 113 | # via pre-commit 114 | redis==5.0.8 115 | # via -r tests.in 116 | requests==2.32.0 117 | # via sphinx 118 | snowballstemmer==2.2.0 119 | # via sphinx 120 | sphinx==8.0.2 121 | # via 122 | # -r docs.in 123 | # pallets-sphinx-themes 124 | # sphinx-issues 125 | # sphinx-tabs 126 | # sphinxcontrib-log-cabinet 127 | sphinx-issues==4.1.0 128 | # via -r docs.in 129 | sphinx-tabs==3.4.5 130 | # via -r docs.in 131 | sphinxcontrib-applehelp==1.0.8 132 | # via sphinx 133 | sphinxcontrib-devhelp==1.0.6 134 | # via sphinx 135 | sphinxcontrib-htmlhelp==2.0.5 136 | # via sphinx 137 | sphinxcontrib-jsmath==1.0.1 138 | # via sphinx 139 | sphinxcontrib-log-cabinet==1.0.1 140 | # via -r docs.in 141 | sphinxcontrib-qthelp==1.0.7 142 | # via sphinx 143 | sphinxcontrib-serializinghtml==1.1.10 144 | # via sphinx 145 | tox==4.16.0 146 | # via -r dev.in 147 | urllib3==2.2.2 148 | # via requests 149 | virtualenv==20.26.3 150 | # via 151 | # pre-commit 152 | # tox 153 | werkzeug==3.0.3 154 | # via flask 155 | wheel==0.43.0 156 | # via pip-tools 157 | 158 | # The following packages are considered to be unsafe in a requirements file: 159 | # pip 160 | # setuptools 161 | -------------------------------------------------------------------------------- /requirements/docs.in: -------------------------------------------------------------------------------- 1 | Pallets-Sphinx-Themes 2 | Sphinx 3 | sphinx-issues 4 | sphinxcontrib-log-cabinet 5 | sphinx-tabs 6 | -------------------------------------------------------------------------------- /requirements/docs.txt: -------------------------------------------------------------------------------- 1 | # 2 | # This file is autogenerated by pip-compile with Python 3.11 3 | # by the following command: 4 | # 5 | # pip-compile docs.in 6 | # 7 | alabaster==0.7.16 8 | # via sphinx 9 | babel==2.15.0 10 | # via sphinx 11 | certifi==2024.7.4 12 | # via requests 13 | charset-normalizer==3.3.2 14 | # via requests 15 | docutils==0.21.2 16 | # via 17 | # sphinx 18 | # sphinx-tabs 19 | idna==3.7 20 | # via requests 21 | imagesize==1.4.1 22 | # via sphinx 23 | jinja2==3.1.4 24 | # via sphinx 25 | markupsafe==2.1.5 26 | # via jinja2 27 | packaging==24.0 28 | # via 29 | # pallets-sphinx-themes 30 | # sphinx 31 | pallets-sphinx-themes==2.1.3 32 | # via -r docs.in 33 | pygments==2.18.0 34 | # via 35 | # sphinx 36 | # sphinx-tabs 37 | requests==2.32.0 38 | # via sphinx 39 | snowballstemmer==2.2.0 40 | # via sphinx 41 | sphinx==8.0.2 42 | # via 43 | # -r docs.in 44 | # pallets-sphinx-themes 45 | # sphinx-issues 46 | # sphinx-tabs 47 | # sphinxcontrib-log-cabinet 48 | sphinx-issues==4.1.0 49 | # via -r docs.in 50 | sphinx-tabs==3.4.5 51 | # via -r docs.in 52 | sphinxcontrib-applehelp==1.0.8 53 | # via sphinx 54 | sphinxcontrib-devhelp==1.0.6 55 | # via sphinx 56 | sphinxcontrib-htmlhelp==2.0.5 57 | # via sphinx 58 | sphinxcontrib-jsmath==1.0.1 59 | # via sphinx 60 | sphinxcontrib-log-cabinet==1.0.1 61 | # via -r docs.in 62 | sphinxcontrib-qthelp==1.0.7 63 | # via sphinx 64 | sphinxcontrib-serializinghtml==1.1.10 65 | # via sphinx 66 | urllib3==2.2.2 67 | # via requests 68 | -------------------------------------------------------------------------------- /requirements/tests.in: -------------------------------------------------------------------------------- 1 | pytest 2 | pytest-xprocess 3 | pytest-asyncio 4 | pylibmc 5 | redis 6 | Flask 7 | asgiref 8 | cachelib 9 | -------------------------------------------------------------------------------- /requirements/tests.txt: -------------------------------------------------------------------------------- 1 | # 2 | # This file is autogenerated by pip-compile with Python 3.11 3 | # by the following command: 4 | # 5 | # pip-compile tests.in 6 | # 7 | asgiref==3.8.1 8 | # via -r tests.in 9 | blinker==1.8.2 10 | # via flask 11 | cachelib==0.13.0 12 | # via -r tests.in 13 | click==8.1.7 14 | # via flask 15 | flask==3.0.3 16 | # via -r tests.in 17 | iniconfig==2.0.0 18 | # via pytest 19 | itsdangerous==2.2.0 20 | # via flask 21 | jinja2==3.1.4 22 | # via flask 23 | markupsafe==2.1.5 24 | # via 25 | # jinja2 26 | # werkzeug 27 | packaging==24.0 28 | # via pytest 29 | pluggy==1.5.0 30 | # via pytest 31 | psutil==5.9.8 32 | # via pytest-xprocess 33 | pylibmc==1.6.3 34 | # via -r tests.in 35 | pytest==8.3.2 36 | # via 37 | # -r tests.in 38 | # pytest-asyncio 39 | # pytest-xprocess 40 | pytest-asyncio==0.23.8 41 | # via -r tests.in 42 | pytest-xprocess==1.0.2 43 | # via -r tests.in 44 | redis==5.0.8 45 | # via -r tests.in 46 | werkzeug==3.0.3 47 | # via flask 48 | -------------------------------------------------------------------------------- /requirements/typing.in: -------------------------------------------------------------------------------- 1 | mypy 2 | types-redis 3 | -------------------------------------------------------------------------------- /requirements/typing.txt: -------------------------------------------------------------------------------- 1 | # 2 | # This file is autogenerated by pip-compile with Python 3.11 3 | # by the following command: 4 | # 5 | # pip-compile typing.in 6 | # 7 | cffi==1.16.0 8 | # via cryptography 9 | cryptography==42.0.7 10 | # via 11 | # types-pyopenssl 12 | # types-redis 13 | mypy==1.11.1 14 | # via -r typing.in 15 | mypy-extensions==1.0.0 16 | # via mypy 17 | pycparser==2.22 18 | # via cffi 19 | types-cffi==1.16.0.20240331 20 | # via types-pyopenssl 21 | types-pyopenssl==24.1.0.20240425 22 | # via types-redis 23 | types-redis==4.6.0.20240726 24 | # via -r typing.in 25 | types-setuptools==69.5.0.20240519 26 | # via types-cffi 27 | typing-extensions==4.11.0 28 | # via mypy 29 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [metadata] 2 | name = Flask-Caching 3 | version = attr: flask_caching.__version__ 4 | url = https://github.com/pallets-eco/flask-caching 5 | project_urls = 6 | Donate = https://palletsprojects.com/donate 7 | Documentation = https://flask-caching.readthedocs.io 8 | Changes = https://flask-caching.readthedocs.io/en/latest/changelog.html 9 | Source Code = https://github.com/pallets-eco/flask-caching 10 | Issue Tracker = https://github.com/pallets-eco/flask-caching/issues 11 | Twitter = https://twitter.com/PalletsTeam 12 | Chat = https://discord.gg/pallets 13 | license = BSD 14 | license_files = LICENSE 15 | author = Peter Justin 16 | author_email = peter.justin@outlook.com 17 | maintainer = Pallets 18 | maintainer_email = contact@palletsprojects.com 19 | description = Adds caching support to Flask applications. 20 | long_description = file: README.rst 21 | long_description_content_type = text/x-rst 22 | classifiers = 23 | Development Status :: 5 - Production/Stable 24 | Intended Audience :: Developers 25 | License :: OSI Approved :: BSD License 26 | Operating System :: OS Independent 27 | Programming Language :: Python 28 | 29 | [options] 30 | packages = find: 31 | package_dir = = src 32 | include_package_data = true 33 | python_requires = >= 3.8 34 | # Dependencies are in setup.py for GitHub's dependency graph. 35 | 36 | [options.packages.find] 37 | where = src 38 | 39 | [build_sphinx] 40 | source-dir = docs/ 41 | build-dir = docs/_build 42 | all_files = 1 43 | 44 | [upload_sphinx] 45 | upload-dir = docs/_build/html 46 | 47 | [flake8] 48 | # B = bugbear 49 | # E = pycodestyle errors 50 | # F = flake8 pyflakes 51 | # W = pycodestyle warnings 52 | # B9 = bugbear opinions 53 | # ISC = implicit-str-concat 54 | select = B, E, F, W, B9, ISC 55 | ignore = 56 | # slice notation whitespace, invalid 57 | E203 58 | # line length, handled by bugbear B950 59 | E501 60 | # bare except, handled by bugbear B001 61 | E722 62 | # bin op line break, invalid 63 | W503 64 | # explicit strict on zip calls 65 | B905 66 | # up to 88 allowed by bugbear B950 67 | max-line-length = 80 68 | 69 | [coverage:run] 70 | branch = True 71 | omit = 72 | src/flask_caching/contrib/* 73 | source = 74 | flask_caching 75 | tests 76 | 77 | [coverage:paths] 78 | source = 79 | src 80 | */site-packages 81 | 82 | [tool:pytest] 83 | testpaths = tests 84 | 85 | [mypy] 86 | files = src/flask_caching/ 87 | warn_unused_configs = true 88 | 89 | # TODO if all of these can be enabled we can just set 'strict = true' 90 | disallow_subclassing_any = true 91 | # disallow_untyped_calls = true 92 | # disallow_untyped_defs = true 93 | # disallow_incomplete_defs = true 94 | # check_untyped_defs = true 95 | disallow_untyped_decorators = true 96 | no_implicit_optional = true 97 | warn_redundant_casts = true 98 | warn_unused_ignores = true 99 | # warn_return_any = true 100 | # no_implicit_reexport = true 101 | strict_equality = true 102 | strict_concatenate = true 103 | 104 | python_version = 3.8 105 | [mypy-uwsgi] 106 | ignore_missing_imports = True 107 | 108 | [mypy-rediscluster] 109 | ignore_missing_imports = True 110 | 111 | [mypy-pylibmc] 112 | ignore_missing_imports = True 113 | 114 | [mypy-google.*] 115 | ignore_missing_imports = True 116 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | from setuptools import setup 2 | 3 | # Metadata goes in setup.cfg. These are here for GitHub's dependency graph. 4 | setup( 5 | name="Flask-Caching", 6 | install_requires=["cachelib >= 0.9.0", "Flask"], 7 | ) 8 | -------------------------------------------------------------------------------- /src/flask_caching/backends/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | flask_caching.backends 3 | ~~~~~~~~~~~~~~~~~~~~~~ 4 | 5 | Various caching backends. 6 | 7 | :copyright: (c) 2018 by Peter Justin. 8 | :copyright: (c) 2010 by Thadeus Burgess. 9 | :license: BSD, see LICENSE for more details. 10 | """ 11 | 12 | from flask_caching.backends.filesystemcache import FileSystemCache 13 | from flask_caching.backends.memcache import MemcachedCache 14 | from flask_caching.backends.memcache import SASLMemcachedCache 15 | from flask_caching.backends.memcache import SpreadSASLMemcachedCache 16 | from flask_caching.backends.nullcache import NullCache 17 | from flask_caching.backends.rediscache import RedisCache 18 | from flask_caching.backends.rediscache import RedisClusterCache 19 | from flask_caching.backends.rediscache import RedisSentinelCache 20 | from flask_caching.backends.simplecache import SimpleCache 21 | from flask_caching.backends.uwsgicache import UWSGICache 22 | 23 | 24 | __all__ = ( 25 | "null", 26 | "simple", 27 | "filesystem", 28 | "redis", 29 | "redissentinel", 30 | "rediscluster", 31 | "uwsgi", 32 | "memcached", 33 | "gaememcached", 34 | "saslmemcached", 35 | "spreadsaslmemcached", 36 | ) 37 | 38 | 39 | def null(app, config, args, kwargs): 40 | return NullCache.factory(app, config, args, kwargs) 41 | 42 | 43 | def simple(app, config, args, kwargs): 44 | return SimpleCache.factory(app, config, args, kwargs) 45 | 46 | 47 | def filesystem(app, config, args, kwargs): 48 | return FileSystemCache.factory(app, config, args, kwargs) 49 | 50 | 51 | def redis(app, config, args, kwargs): 52 | return RedisCache.factory(app, config, args, kwargs) 53 | 54 | 55 | def redissentinel(app, config, args, kwargs): 56 | return RedisSentinelCache.factory(app, config, args, kwargs) 57 | 58 | 59 | def rediscluster(app, config, args, kwargs): 60 | return RedisClusterCache.factory(app, config, args, kwargs) 61 | 62 | 63 | def uwsgi(app, config, args, kwargs): 64 | return UWSGICache.factory(app, config, args, kwargs) 65 | 66 | 67 | def memcached(app, config, args, kwargs): 68 | return MemcachedCache.factory(app, config, args, kwargs) 69 | 70 | 71 | def gaememcached(app, config, args, kwargs): 72 | return memcached(app, config, args, kwargs) 73 | 74 | 75 | def saslmemcached(app, config, args, kwargs): 76 | return SASLMemcachedCache.factory(app, config, args, kwargs) 77 | 78 | 79 | def spreadsaslmemcached(app, config, args, kwargs): 80 | return SpreadSASLMemcachedCache.factory(app, config, args, kwargs) 81 | -------------------------------------------------------------------------------- /src/flask_caching/backends/base.py: -------------------------------------------------------------------------------- 1 | """ 2 | flask_caching.backends.base 3 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~ 4 | 5 | This module contains the BaseCache that other caching 6 | backends have to implement. 7 | 8 | :copyright: (c) 2018 by Peter Justin. 9 | :copyright: (c) 2010 by Thadeus Burgess. 10 | :license: BSD, see LICENSE for more details. 11 | """ 12 | 13 | from cachelib import BaseCache as CachelibBaseCache 14 | 15 | 16 | class BaseCache(CachelibBaseCache): 17 | """Baseclass for the cache systems. All the cache systems implement this 18 | API or a superset of it. 19 | 20 | :param default_timeout: The default timeout (in seconds) that is used if 21 | no timeout is specified on :meth:`set`. A timeout 22 | of 0 indicates that the cache never expires. 23 | """ 24 | 25 | def __init__(self, default_timeout=300): 26 | CachelibBaseCache.__init__(self, default_timeout=default_timeout) 27 | self.ignore_errors = False 28 | 29 | @classmethod 30 | def factory(cls, app, config, args, kwargs): 31 | return cls() 32 | 33 | def delete_many(self, *keys): 34 | """Deletes multiple keys at once. 35 | 36 | :param keys: The function accepts multiple keys as positional 37 | arguments. 38 | :returns: A list containing all sucessfuly deleted keys 39 | :rtype: boolean 40 | """ 41 | deleted_keys = [] 42 | for key in keys: 43 | if self.delete(key): 44 | deleted_keys.append(key) 45 | else: 46 | if not self.ignore_errors: 47 | break 48 | return deleted_keys 49 | -------------------------------------------------------------------------------- /src/flask_caching/backends/filesystemcache.py: -------------------------------------------------------------------------------- 1 | """ 2 | flask_caching.backends.filesystem 3 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 4 | 5 | The filesystem caching backend. 6 | 7 | :copyright: (c) 2018 by Peter Justin. 8 | :copyright: (c) 2010 by Thadeus Burgess. 9 | :license: BSD, see LICENSE for more details. 10 | """ 11 | 12 | import hashlib 13 | import logging 14 | 15 | from cachelib import FileSystemCache as CachelibFileSystemCache 16 | 17 | from flask_caching.backends.base import BaseCache 18 | 19 | logger = logging.getLogger(__name__) 20 | 21 | 22 | class FileSystemCache(BaseCache, CachelibFileSystemCache): 23 | """A cache that stores the items on the file system. This cache depends 24 | on being the only user of the `cache_dir`. Make absolutely sure that 25 | nobody but this cache stores files there or otherwise the cache will 26 | randomly delete files therein. 27 | 28 | :param cache_dir: the directory where cache files are stored. 29 | :param threshold: the maximum number of items the cache stores before 30 | it starts deleting some. A threshold value of 0 31 | indicates no threshold. 32 | :param default_timeout: the default timeout that is used if no timeout is 33 | specified on :meth:`~BaseCache.set`. A timeout of 34 | 0 indicates that the cache never expires. 35 | :param mode: the file mode wanted for the cache files, default 0600 36 | :param hash_method: Default hashlib.md5. The hash method used to 37 | generate the filename for cached results. 38 | :param ignore_errors: If set to ``True`` the :meth:`~BaseCache.delete_many` 39 | method will ignore any errors that occurred during the 40 | deletion process. However, if it is set to ``False`` 41 | it will stop on the first error. Defaults to 42 | ``False``. 43 | """ 44 | 45 | def __init__( 46 | self, 47 | cache_dir, 48 | threshold=500, 49 | default_timeout=300, 50 | mode=0o600, 51 | hash_method=hashlib.md5, 52 | ignore_errors=False, 53 | ): 54 | 55 | BaseCache.__init__(self, default_timeout=default_timeout) 56 | CachelibFileSystemCache.__init__( 57 | self, 58 | cache_dir=cache_dir, 59 | threshold=threshold, 60 | default_timeout=default_timeout, 61 | mode=mode, 62 | hash_method=hash_method, 63 | ) 64 | 65 | self.ignore_errors = ignore_errors 66 | 67 | @classmethod 68 | def factory(cls, app, config, args, kwargs): 69 | args.insert(0, config["CACHE_DIR"]) 70 | kwargs.update( 71 | dict( 72 | threshold=config["CACHE_THRESHOLD"], 73 | ignore_errors=config["CACHE_IGNORE_ERRORS"], 74 | ) 75 | ) 76 | return cls(*args, **kwargs) 77 | -------------------------------------------------------------------------------- /src/flask_caching/backends/memcache.py: -------------------------------------------------------------------------------- 1 | """ 2 | flask_caching.backends.memcache 3 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 4 | 5 | The memcache caching backend. 6 | 7 | :copyright: (c) 2018 by Peter Justin. 8 | :copyright: (c) 2010 by Thadeus Burgess. 9 | :license: BSD, see LICENSE for more details. 10 | """ 11 | 12 | import pickle 13 | import re 14 | 15 | from cachelib import MemcachedCache as CachelibMemcachedCache 16 | 17 | from flask_caching.backends.base import BaseCache 18 | 19 | 20 | _test_memcached_key = re.compile(r"[^\x00-\x21\xff]{1,250}$").match 21 | 22 | 23 | class MemcachedCache(BaseCache, CachelibMemcachedCache): 24 | """A cache that uses memcached as backend. 25 | 26 | The first argument can either be an object that resembles the API of a 27 | :class:`memcache.Client` or a tuple/list of server addresses. In the 28 | event that a tuple/list is passed, Werkzeug tries to import the best 29 | available memcache library. 30 | 31 | This cache looks into the following packages/modules to find bindings for 32 | memcached: 33 | 34 | - ``pylibmc`` 35 | - ``google.appengine.api.memcached`` 36 | - ``memcached`` 37 | - ``libmc`` 38 | 39 | Implementation notes: This cache backend works around some limitations in 40 | memcached to simplify the interface. For example unicode keys are encoded 41 | to utf-8 on the fly. Methods such as :meth:`~BaseCache.get_dict` return 42 | the keys in the same format as passed. Furthermore all get methods 43 | silently ignore key errors to not cause problems when untrusted user data 44 | is passed to the get methods which is often the case in web applications. 45 | 46 | :param servers: a list or tuple of server addresses or alternatively 47 | a :class:`memcache.Client` or a compatible client. 48 | :param default_timeout: the default timeout that is used if no timeout is 49 | specified on :meth:`~BaseCache.set`. A timeout of 50 | 0 indicates that the cache never expires. 51 | :param key_prefix: a prefix that is added before all keys. This makes it 52 | possible to use the same memcached server for different 53 | applications. Keep in mind that 54 | :meth:`~BaseCache.clear` will also clear keys with a 55 | different prefix. 56 | """ 57 | 58 | def __init__(self, servers=None, default_timeout=300, key_prefix=None): 59 | BaseCache.__init__(self, default_timeout=default_timeout) 60 | CachelibMemcachedCache.__init__( 61 | self, 62 | servers=servers, 63 | default_timeout=default_timeout, 64 | key_prefix=key_prefix, 65 | ) 66 | 67 | @classmethod 68 | def factory(cls, app, config, args, kwargs): 69 | args.append(config["CACHE_MEMCACHED_SERVERS"]) 70 | kwargs.update(dict(key_prefix=config["CACHE_KEY_PREFIX"])) 71 | return cls(*args, **kwargs) 72 | 73 | def delete_many(self, *keys): 74 | new_keys = [] 75 | for key in keys: 76 | key = self._normalize_key(key) 77 | if _test_memcached_key(key): 78 | new_keys.append(key) 79 | return self._client.delete_multi(new_keys) 80 | 81 | def inc(self, key, delta=1): 82 | key = self._normalize_key(key) 83 | return self._client.incr(key, delta) 84 | 85 | def dec(self, key, delta=1): 86 | key = self._normalize_key(key) 87 | return self._client.decr(key, delta) 88 | 89 | 90 | class SASLMemcachedCache(MemcachedCache): 91 | def __init__( 92 | self, 93 | servers=None, 94 | default_timeout=300, 95 | key_prefix=None, 96 | username=None, 97 | password=None, 98 | **kwargs, 99 | ): 100 | super().__init__(default_timeout=default_timeout) 101 | 102 | if servers is None: 103 | servers = ["127.0.0.1:11211"] 104 | 105 | import pylibmc 106 | 107 | self._client = pylibmc.Client( 108 | servers, username=username, password=password, binary=True, **kwargs 109 | ) 110 | 111 | self.key_prefix = key_prefix 112 | 113 | @classmethod 114 | def factory(cls, app, config, args, kwargs): 115 | args.append(config["CACHE_MEMCACHED_SERVERS"]) 116 | kwargs.update( 117 | dict( 118 | username=config["CACHE_MEMCACHED_USERNAME"], 119 | password=config["CACHE_MEMCACHED_PASSWORD"], 120 | key_prefix=config["CACHE_KEY_PREFIX"], 121 | ) 122 | ) 123 | return cls(*args, **kwargs) 124 | 125 | 126 | class SpreadSASLMemcachedCache(SASLMemcachedCache): 127 | """Simple Subclass of SASLMemcached client that will spread the value 128 | across multiple keys if they are bigger than a given threshold. 129 | 130 | Spreading requires using pickle to store the value, which can significantly 131 | impact the performance. 132 | """ 133 | 134 | def __init__(self, *args, **kwargs): 135 | """ 136 | Kwargs: 137 | chunksize (int): max length of a pickled object that can fit in 138 | memcached (memcache has an upper limit of 1MB for values, 139 | default: 1048448) 140 | """ 141 | self.chunksize = kwargs.get("chunksize", 1048448) 142 | self.maxchunk = kwargs.get("maxchunk", 32) 143 | super().__init__(*args, **kwargs) 144 | 145 | @classmethod 146 | def factory(cls, app, config, args, kwargs): 147 | args.append(config["CACHE_MEMCACHED_SERVERS"]) 148 | kwargs.update( 149 | dict( 150 | username=config.get("CACHE_MEMCACHED_USERNAME"), 151 | password=config.get("CACHE_MEMCACHED_PASSWORD"), 152 | key_prefix=config.get("CACHE_KEY_PREFIX"), 153 | ) 154 | ) 155 | 156 | return cls(*args, **kwargs) 157 | 158 | def delete(self, key): 159 | for skey in self._genkeys(key): 160 | super().delete(skey) 161 | 162 | def set(self, key, value, timeout=None, chunk=True): 163 | """Set a value in cache, potentially spreading it across multiple key. 164 | 165 | :param key: The cache key. 166 | :param value: The value to cache. 167 | :param timeout: The timeout after which the cache will be invalidated. 168 | :param chunk: If set to `False`, then spreading across multiple keys 169 | is disabled. This can be faster, but it will fail if 170 | the value is bigger than the chunks. It requires you 171 | to get back the object by specifying that it is not 172 | spread. 173 | """ 174 | if chunk: 175 | return self._set(key, value, timeout=timeout) 176 | else: 177 | return super().set(key, value, timeout=timeout) 178 | 179 | def _set(self, key, value, timeout=None): 180 | # pickling/unpickling add an overhead, 181 | # I didn't found a good way to avoid pickling/unpickling if 182 | # key is smaller than chunksize, because in case or 183 | # getting the length consume the data iterator. 184 | serialized = pickle.dumps(value, 2) 185 | values = {} 186 | len_ser = len(serialized) 187 | chks = range(0, len_ser, self.chunksize) 188 | 189 | if len(chks) > self.maxchunk: 190 | raise ValueError("Cannot store value in less than %s keys" % self.maxchunk) 191 | 192 | for i in chks: 193 | values[f"{key}.{i // self.chunksize}"] = serialized[i : i + self.chunksize] 194 | 195 | super().set_many(values, timeout) 196 | 197 | def get(self, key, chunk=True): 198 | """Get a cached value. 199 | 200 | :param chunk: If set to ``False``, it will return a cached value 201 | that is spread across multiple keys. 202 | """ 203 | if chunk: 204 | return self._get(key) 205 | else: 206 | return super().get(key) 207 | 208 | def _genkeys(self, key): 209 | return [f"{key}.{i}" for i in range(self.maxchunk)] 210 | 211 | def _get(self, key): 212 | to_get = [f"{key}.{i}" for i in range(self.maxchunk)] 213 | result = super().get_many(*to_get) 214 | serialized = b"".join(v for v in result if v is not None) 215 | 216 | if not serialized: 217 | return None 218 | 219 | return pickle.loads(serialized) 220 | -------------------------------------------------------------------------------- /src/flask_caching/backends/nullcache.py: -------------------------------------------------------------------------------- 1 | """ 2 | flask_caching.backends.null 3 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~ 4 | 5 | The null cache backend. A caching backend that doesn't cache. 6 | 7 | :copyright: (c) 2018 by Peter Justin. 8 | :copyright: (c) 2010 by Thadeus Burgess. 9 | :license: BSD, see LICENSE for more details. 10 | """ 11 | 12 | from flask_caching.backends.base import BaseCache 13 | 14 | 15 | class NullCache(BaseCache): 16 | """A cache that doesn't cache. This can be useful for unit testing. 17 | 18 | :param default_timeout: a dummy parameter that is ignored but exists 19 | for API compatibility with other caches. 20 | """ 21 | 22 | def has(self, key): 23 | return False 24 | -------------------------------------------------------------------------------- /src/flask_caching/backends/rediscache.py: -------------------------------------------------------------------------------- 1 | """ 2 | flask_caching.backends.rediscache 3 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 4 | 5 | The redis caching backend. 6 | 7 | :copyright: (c) 2018 by Peter Justin. 8 | :copyright: (c) 2010 by Thadeus Burgess. 9 | :license: BSD, see LICENSE for more details. 10 | """ 11 | 12 | import pickle 13 | 14 | from cachelib import RedisCache as CachelibRedisCache 15 | 16 | from flask_caching.backends.base import BaseCache 17 | 18 | 19 | class RedisCache(BaseCache, CachelibRedisCache): 20 | """Uses the Redis key-value store as a cache backend. 21 | 22 | The first argument can be either a string denoting address of the Redis 23 | server or an object resembling an instance of a redis.Redis class. 24 | 25 | Note: Python Redis API already takes care of encoding unicode strings on 26 | the fly. 27 | 28 | :param host: address of the Redis server or an object which API is 29 | compatible with the official Python Redis client (redis-py). 30 | :param port: port number on which Redis server listens for connections. 31 | :param password: password authentication for the Redis server. 32 | :param db: db (zero-based numeric index) on Redis Server to connect. 33 | :param default_timeout: the default timeout that is used if no timeout is 34 | specified on :meth:`~BaseCache.set`. A timeout of 35 | 0 indicates that the cache never expires. 36 | :param key_prefix: A prefix that should be added to all keys. 37 | 38 | Any additional keyword arguments will be passed to ``redis.Redis``. 39 | """ 40 | 41 | def __init__( 42 | self, 43 | host="localhost", 44 | port=6379, 45 | password=None, 46 | db=0, 47 | default_timeout=300, 48 | key_prefix=None, 49 | **kwargs 50 | ): 51 | BaseCache.__init__(self, default_timeout=default_timeout) 52 | CachelibRedisCache.__init__( 53 | self, 54 | host=host, 55 | port=port, 56 | password=password, 57 | db=db, 58 | default_timeout=default_timeout, 59 | key_prefix=key_prefix, 60 | **kwargs 61 | ) 62 | 63 | @classmethod 64 | def factory(cls, app, config, args, kwargs): 65 | try: 66 | from redis import from_url as redis_from_url 67 | except ImportError as e: 68 | raise RuntimeError("no redis module found") from e 69 | 70 | kwargs.update( 71 | dict( 72 | host=config.get("CACHE_REDIS_HOST", "localhost"), 73 | port=config.get("CACHE_REDIS_PORT", 6379), 74 | db=config.get("CACHE_REDIS_DB", 0), 75 | ) 76 | ) 77 | password = config.get("CACHE_REDIS_PASSWORD") 78 | if password: 79 | kwargs["password"] = password 80 | 81 | key_prefix = config.get("CACHE_KEY_PREFIX") 82 | if key_prefix: 83 | kwargs["key_prefix"] = key_prefix 84 | 85 | redis_url = config.get("CACHE_REDIS_URL") 86 | if redis_url: 87 | kwargs["host"] = redis_from_url(redis_url, db=kwargs.pop("db", None)) 88 | 89 | new_class = cls(*args, **kwargs) 90 | 91 | return new_class 92 | 93 | def dump_object(self, value): 94 | """Dumps an object into a string for redis. By default it serializes 95 | integers as regular string and pickle dumps everything else. 96 | """ 97 | t = type(value) 98 | if t == int: 99 | return str(value).encode("ascii") 100 | return b"!" + pickle.dumps(value) 101 | 102 | def unlink(self, *keys): 103 | """when redis-py >= 3.0.0 and redis > 4, support this operation""" 104 | if not keys: 105 | return 106 | if self.key_prefix: 107 | keys = [self.key_prefix + key for key in keys] 108 | 109 | unlink = getattr(self._write_client, "unlink", None) 110 | if unlink is not None and callable(unlink): 111 | return self._write_client.unlink(*keys) 112 | return self._write_client.delete(*keys) 113 | 114 | 115 | class RedisSentinelCache(RedisCache): 116 | """Uses the Redis key-value store as a cache backend. 117 | 118 | The first argument can be either a string denoting address of the Redis 119 | server or an object resembling an instance of a redis.Redis class. 120 | 121 | Note: Python Redis API already takes care of encoding unicode strings on 122 | the fly. 123 | 124 | 125 | :param sentinels: A list or a tuple of Redis sentinel addresses. 126 | :param master: The name of the master server in a sentinel configuration. 127 | :param password: password authentication for the Redis server. 128 | :param db: db (zero-based numeric index) on Redis Server to connect. 129 | :param default_timeout: the default timeout that is used if no timeout is 130 | specified on :meth:`~BaseCache.set`. A timeout of 131 | 0 indicates that the cache never expires. 132 | :param key_prefix: A prefix that should be added to all keys. 133 | 134 | Any additional keyword arguments will be passed to 135 | ``redis.sentinel.Sentinel``. 136 | """ 137 | 138 | def __init__( 139 | self, 140 | sentinels=None, 141 | master=None, 142 | password=None, 143 | db=0, 144 | default_timeout=300, 145 | key_prefix="", 146 | **kwargs 147 | ): 148 | super().__init__(key_prefix=key_prefix, default_timeout=default_timeout) 149 | 150 | try: 151 | import redis.sentinel 152 | except ImportError as e: 153 | raise RuntimeError("no redis module found") from e 154 | 155 | if kwargs.get("decode_responses", None): 156 | raise ValueError("decode_responses is not supported by RedisCache.") 157 | 158 | sentinels = sentinels or [("127.0.0.1", 26379)] 159 | sentinel_kwargs = { 160 | key[9:]: value 161 | for key, value in kwargs.items() 162 | if key.startswith("sentinel_") 163 | } 164 | kwargs = { 165 | key: value 166 | for key, value in kwargs.items() 167 | if not key.startswith("sentinel_") 168 | } 169 | 170 | sentinel = redis.sentinel.Sentinel( 171 | sentinels=sentinels, 172 | password=password, 173 | db=db, 174 | sentinel_kwargs=sentinel_kwargs, 175 | **kwargs 176 | ) 177 | 178 | self._write_client = sentinel.master_for(master) 179 | self._read_client = sentinel.slave_for(master) 180 | 181 | @classmethod 182 | def factory(cls, app, config, args, kwargs): 183 | kwargs.update( 184 | dict( 185 | sentinels=config.get("CACHE_REDIS_SENTINELS", [("127.0.0.1", 26379)]), 186 | master=config.get("CACHE_REDIS_SENTINEL_MASTER", "mymaster"), 187 | password=config.get("CACHE_REDIS_PASSWORD", None), 188 | sentinel_password=config.get("CACHE_REDIS_SENTINEL_PASSWORD", None), 189 | key_prefix=config.get("CACHE_KEY_PREFIX", None), 190 | db=config.get("CACHE_REDIS_DB", 0), 191 | ) 192 | ) 193 | 194 | return cls(*args, **kwargs) 195 | 196 | 197 | class RedisClusterCache(RedisCache): 198 | """Uses the Redis key-value store as a cache backend. 199 | 200 | The first argument can be either a string denoting address of the Redis 201 | server or an object resembling an instance of a rediscluster.RedisCluster 202 | class. 203 | 204 | Note: Python Redis API already takes care of encoding unicode strings on 205 | the fly. 206 | 207 | 208 | :param cluster: The redis cluster nodes address separated by comma. 209 | e.g. host1:port1,host2:port2,host3:port3 . 210 | :param password: password authentication for the Redis server. 211 | :param default_timeout: the default timeout that is used if no timeout is 212 | specified on :meth:`~BaseCache.set`. A timeout of 213 | 0 indicates that the cache never expires. 214 | :param key_prefix: A prefix that should be added to all keys. 215 | 216 | Any additional keyword arguments will be passed to 217 | ``rediscluster.RedisCluster``. 218 | """ 219 | 220 | def __init__( 221 | self, cluster="", password="", default_timeout=300, key_prefix="", **kwargs 222 | ): 223 | super().__init__(key_prefix=key_prefix, default_timeout=default_timeout) 224 | 225 | if kwargs.get("decode_responses", None): 226 | raise ValueError("decode_responses is not supported by RedisCache.") 227 | 228 | try: 229 | from redis import RedisCluster 230 | from redis.cluster import ClusterNode 231 | except ImportError as e: 232 | raise RuntimeError("no redis.cluster module found") from e 233 | 234 | try: 235 | nodes = [(node.split(":")) for node in cluster.split(",")] 236 | startup_nodes = [ 237 | ClusterNode(node[0].strip(), node[1].strip()) for node in nodes 238 | ] 239 | except IndexError as e: 240 | raise ValueError( 241 | "Please give the correct cluster argument " 242 | "e.g. host1:port1,host2:port2,host3:port3" 243 | ) from e 244 | 245 | # Skips the check of cluster-require-full-coverage config, 246 | # useful for clusters without the CONFIG command (like aws) 247 | skip_full_coverage_check = kwargs.pop("skip_full_coverage_check", True) 248 | 249 | cluster = RedisCluster( 250 | startup_nodes=startup_nodes, 251 | password=password, 252 | skip_full_coverage_check=skip_full_coverage_check, 253 | **kwargs 254 | ) 255 | 256 | self._write_client = cluster 257 | self._read_client = cluster 258 | 259 | @classmethod 260 | def factory(cls, app, config, args, kwargs): 261 | kwargs.update( 262 | dict( 263 | cluster=config.get("CACHE_REDIS_CLUSTER", ""), 264 | password=config.get("CACHE_REDIS_PASSWORD", ""), 265 | default_timeout=config.get("CACHE_DEFAULT_TIMEOUT", 300), 266 | key_prefix=config.get("CACHE_KEY_PREFIX", ""), 267 | ) 268 | ) 269 | return cls(*args, **kwargs) 270 | -------------------------------------------------------------------------------- /src/flask_caching/backends/simplecache.py: -------------------------------------------------------------------------------- 1 | """ 2 | flask_caching.backends.simple 3 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 4 | 5 | The simple cache backend. 6 | 7 | :copyright: (c) 2018 by Peter Justin. 8 | :copyright: (c) 2010 by Thadeus Burgess. 9 | :license: BSD, see LICENSE for more details. 10 | """ 11 | 12 | import logging 13 | 14 | from cachelib import SimpleCache as CachelibSimpleCache 15 | 16 | from flask_caching.backends.base import BaseCache 17 | 18 | 19 | logger = logging.getLogger(__name__) 20 | 21 | 22 | class SimpleCache(BaseCache, CachelibSimpleCache): 23 | """Simple memory cache for single process environments. This class exists 24 | mainly for the development server and is not 100% thread safe. It tries 25 | to use as many atomic operations as possible and no locks for simplicity 26 | but it could happen under heavy load that keys are added multiple times. 27 | 28 | :param threshold: the maximum number of items the cache stores before 29 | it starts deleting some. 30 | :param default_timeout: the default timeout that is used if no timeout is 31 | specified on :meth:`~BaseCache.set`. A timeout of 32 | 0 indicates that the cache never expires. 33 | :param ignore_errors: If set to ``True`` the :meth:`~BaseCache.delete_many` 34 | method will ignore any errors that occurred during 35 | the deletion process. However, if it is set to 36 | ``False`` it will stop on the first error. Defaults 37 | to ``False``. 38 | """ 39 | 40 | def __init__(self, threshold=500, default_timeout=300, ignore_errors=False): 41 | BaseCache.__init__(self, default_timeout=default_timeout) 42 | CachelibSimpleCache.__init__( 43 | self, threshold=threshold, default_timeout=default_timeout 44 | ) 45 | 46 | self.ignore_errors = ignore_errors 47 | 48 | @classmethod 49 | def factory(cls, app, config, args, kwargs): 50 | kwargs.update( 51 | dict( 52 | threshold=config["CACHE_THRESHOLD"], 53 | ignore_errors=config["CACHE_IGNORE_ERRORS"], 54 | ) 55 | ) 56 | return cls(*args, **kwargs) 57 | -------------------------------------------------------------------------------- /src/flask_caching/backends/uwsgicache.py: -------------------------------------------------------------------------------- 1 | import warnings 2 | 3 | from flask_caching.contrib.uwsgicache import UWSGICache as _UWSGICache 4 | 5 | 6 | class UWSGICache(_UWSGICache): 7 | def __init__(self, *args, **kwargs): 8 | warnings.warn( 9 | "Importing UWSGICache from flask_caching.backends is deprecated, " 10 | "use flask_caching.contrib.uwsgicache.UWSGICache instead", 11 | category=DeprecationWarning, 12 | stacklevel=2, 13 | ) 14 | 15 | super().__init__(*args, **kwargs) 16 | -------------------------------------------------------------------------------- /src/flask_caching/contrib/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pallets-eco/flask-caching/e59bc040cd47cd2b43e501d636d43d442c50b3ff/src/flask_caching/contrib/__init__.py -------------------------------------------------------------------------------- /src/flask_caching/contrib/googlecloudstoragecache.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import json 3 | import logging 4 | 5 | from flask_caching.backends.base import BaseCache 6 | 7 | 8 | logger = logging.getLogger(__name__) 9 | 10 | 11 | try: 12 | from google.auth.credentials import AnonymousCredentials 13 | from google.cloud import storage, exceptions 14 | except ImportError as e: 15 | raise RuntimeError("no google-cloud-storage module found") from e 16 | 17 | 18 | class GoogleCloudStorageCache(BaseCache): 19 | """Uses an Google Cloud Storage bucket as a cache backend. 20 | Note: User-contributed functionality. This project does not guarantee that 21 | this functionality will be maintained or functional at any given time. 22 | Note: Cache keys must meet GCS criteria for a valid object name (a sequence 23 | of Unicode characters whose UTF-8 encoding is at most 1024 bytes long). 24 | Note: Expired cache objects are not automatically purged. If 25 | delete_expired_objects_on_read=True, they will be deleted following an 26 | attempted read (which reduces performance). Otherwise, you have to delete 27 | stale objects yourself. Consider an GCS bucket lifecycle rule or other 28 | out-of-band process. For example you can use the following rule. 29 | {"rule": [{"action": {"type": "Delete"}, "condition": {"daysSinceCustomTime": 0}}]} 30 | https://cloud.google.com/storage/docs/lifecycle#dayssincecustomtime 31 | :param bucket: Required. Name of the bucket to use. It must already exist. 32 | :param key_prefix: A prefix that should be added to all keys. 33 | :param default_timeout: the default timeout that is used if no timeout is 34 | specified on :meth:`~BaseCache.set`. A timeout of 35 | 0 indicates that the cache never expires. 36 | :param delete_expired_objects_on_read: If True, if a read finds a stale 37 | object, it will be deleted before 38 | a response is returned. Will slow 39 | down responses. 40 | :param anonymous: If true, use anonymous credentials. Useful for testing. 41 | Any additional keyword arguments will be passed to ``google.cloud.storage.Client``. 42 | """ 43 | 44 | def __init__( 45 | self, 46 | bucket, 47 | key_prefix=None, 48 | default_timeout=300, 49 | delete_expired_objects_on_read=False, 50 | anonymous=False, 51 | **kwargs 52 | ): 53 | super().__init__(default_timeout) 54 | if not isinstance(bucket, str): 55 | raise ValueError("GCSCache bucket parameter must be a string") 56 | if anonymous: 57 | self._client = storage.Client( 58 | credentials=AnonymousCredentials(), project="test", **kwargs 59 | ) 60 | else: 61 | self._client = storage.Client(**kwargs) 62 | self.bucket = self._client.get_bucket(bucket) 63 | self.key_prefix = key_prefix or "" 64 | self.default_timeout = default_timeout 65 | self.delete_expired_objects_on_read = delete_expired_objects_on_read 66 | 67 | @classmethod 68 | def factory(cls, app, config, args, kwargs): 69 | args.insert(0, config["CACHE_GCS_BUCKET"]) 70 | key_prefix = config.get("CACHE_KEY_PREFIX") 71 | if key_prefix: 72 | kwargs["key_prefix"] = key_prefix 73 | return cls(*args, **kwargs) 74 | 75 | def get(self, key): 76 | result = None 77 | expired = False 78 | hit_or_miss = "miss" 79 | full_key = self.key_prefix + key 80 | blob = self.bucket.get_blob(full_key) 81 | if blob is not None: 82 | expired = blob.custom_time and self._now() > blob.custom_time 83 | if expired: 84 | # Object is stale 85 | if self.delete_expired_objects_on_read: 86 | self._delete(full_key) 87 | else: 88 | try: 89 | result = blob.download_as_bytes() 90 | hit_or_miss = "hit" 91 | if blob.content_type == "application/json": 92 | result = json.loads(result) 93 | except exceptions.NotFound: 94 | pass 95 | expiredstr = "(expired)" if expired else "" 96 | logger.debug("get key %r -> %s %s", full_key, hit_or_miss, expiredstr) 97 | return result 98 | 99 | def set(self, key, value, timeout=None): 100 | result = False 101 | full_key = self.key_prefix + key 102 | content_type = "application/json" 103 | try: 104 | value = json.dumps(value) 105 | except (UnicodeDecodeError, TypeError): 106 | content_type = "application/octet-stream" 107 | blob = self.bucket.blob(full_key) 108 | if timeout is None: 109 | timeout = self.default_timeout 110 | if timeout != 0: 111 | # Use 'Custom-Time' for expiry 112 | # https://cloud.google.com/storage/docs/metadata#custom-time 113 | blob.custom_time = self._now(delta=timeout) 114 | try: 115 | blob.upload_from_string(value, content_type=content_type) 116 | result = True 117 | except exceptions.TooManyRequests: 118 | pass 119 | logger.debug("set key %r -> %s", full_key, result) 120 | return result 121 | 122 | def add(self, key, value, timeout=None): 123 | full_key = self.key_prefix + key 124 | if self._has(full_key): 125 | logger.debug("add key %r -> not added", full_key) 126 | return False 127 | else: 128 | return self.set(key, value, timeout) 129 | 130 | def delete(self, key): 131 | full_key = self.key_prefix + key 132 | return self._delete(full_key) 133 | 134 | def delete_many(self, *keys): 135 | return self._delete_many(self.key_prefix + key for key in keys) 136 | 137 | def has(self, key): 138 | full_key = self.key_prefix + key 139 | return self._has(full_key) 140 | 141 | def clear(self): 142 | return self._prune(clear_all=True) 143 | 144 | def _prune(self, clear_all=False): 145 | # Delete in batches of 100 which is much faster than individual deletes 146 | nremoved = 0 147 | now = self._now() 148 | response_iterator = self._client.list_blobs( 149 | self.bucket, 150 | prefix=self.key_prefix, 151 | fields="items(name,customTime),nextPageToken", 152 | ) 153 | to_delete = [] 154 | for blob in response_iterator: 155 | if clear_all or blob.custom_time and blob.custom_time < now: 156 | to_delete.append(blob.name) 157 | nremoved += 1 158 | if len(to_delete) == 100: 159 | self._delete_many(to_delete) 160 | to_delete = [] 161 | # Delete the remainder 162 | if to_delete: 163 | self._delete_many(to_delete) 164 | logger.debug("evicted %d key(s)", nremoved) 165 | return True 166 | 167 | def _delete(self, key): 168 | return self._delete_many([key]) 169 | 170 | def _delete_many(self, keys): 171 | try: 172 | with self._client.batch(): 173 | for key in keys: 174 | self.bucket.delete_blob(key) 175 | except (exceptions.NotFound, exceptions.TooManyRequests): 176 | pass 177 | return True 178 | 179 | def _has(self, key): 180 | result = False 181 | expired = False 182 | blob = self.bucket.get_blob(key) 183 | if blob is not None: 184 | expired = blob.custom_time and self._now() > blob.custom_time 185 | if expired: 186 | # Exists but is stale 187 | if self.delete_expired_objects_on_read: 188 | self._delete(key) 189 | else: 190 | result = True 191 | expiredstr = "(expired)" if expired else "" 192 | logger.debug("has key %r -> %s %s", key, result, expiredstr) 193 | return result 194 | 195 | def _now(self, delta=0): 196 | return datetime.datetime.now(datetime.timezone.utc) + datetime.timedelta( 197 | seconds=delta 198 | ) 199 | -------------------------------------------------------------------------------- /src/flask_caching/contrib/uwsgicache.py: -------------------------------------------------------------------------------- 1 | """ 2 | flask_caching.backends.uwsgicache 3 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 4 | 5 | The uWSGI caching backend. 6 | 7 | :copyright: (c) 2018 by Peter Justin. 8 | :copyright: (c) 2010 by Thadeus Burgess. 9 | :license: BSD, see LICENSE for more details. 10 | """ 11 | 12 | from cachelib import UWSGICache as CachelibUWSGICache 13 | 14 | from flask_caching.backends.base import BaseCache 15 | 16 | 17 | class UWSGICache(BaseCache, CachelibUWSGICache): 18 | """Implements the cache using uWSGI's caching framework. 19 | 20 | .. note:: 21 | This class cannot be used when running under PyPy, because the uWSGI 22 | API implementation for PyPy is lacking the needed functionality. 23 | 24 | :param default_timeout: The default timeout in seconds. 25 | :param cache: The name of the caching instance to connect to, for 26 | example: mycache@localhost:3031, defaults to an empty string, which 27 | means uWSGI will use the first cache instance initialized. 28 | If the cache is in the same instance as the werkzeug app, 29 | you only have to provide the name of the cache. 30 | """ 31 | 32 | def __init__(self, default_timeout=300, cache=""): 33 | BaseCache.__init__(self, default_timeout=default_timeout) 34 | CachelibUWSGICache.__init__( 35 | self, 36 | cache=cache, 37 | default_timeout=default_timeout, 38 | ) 39 | 40 | try: 41 | import uwsgi 42 | 43 | self._uwsgi = uwsgi 44 | except ImportError as e: 45 | raise RuntimeError( 46 | "uWSGI could not be imported, are you running under uWSGI?" 47 | ) from e 48 | 49 | if "cache2" not in uwsgi.opt: 50 | raise RuntimeError( 51 | "You must enable cache2 in uWSGI configuration: " 52 | "https://uwsgi-docs.readthedocs.io/en/latest/Caching.html" 53 | ) 54 | 55 | @classmethod 56 | def factory(cls, app, config, args, kwargs): 57 | # The name of the caching instance to connect to, for 58 | # example: mycache@localhost:3031, defaults to an empty string, which 59 | # means uWSGI will cache in the local instance. If the cache is in the 60 | # same instance as the werkzeug app, you only have to provide the name 61 | # of the cache. 62 | uwsgi_cache_name = config.get("CACHE_UWSGI_NAME", "") 63 | kwargs.update(dict(cache=uwsgi_cache_name)) 64 | return cls(*args, **kwargs) 65 | -------------------------------------------------------------------------------- /src/flask_caching/jinja2ext.py: -------------------------------------------------------------------------------- 1 | """ 2 | flask_caching.jinja2ext 3 | ~~~~~~~~~~~~~~~~~~~~~~~ 4 | 5 | Jinja2 extension that adds support for caching template fragments. 6 | 7 | Usage:: 8 | 9 | {% cache timeout key1[, [key2, ...]] %} 10 | ... 11 | {% endcache %} 12 | 13 | By default, the value of "path to template file" + "block start line" 14 | is used as the cache key. Also, the key name can be set manually. 15 | Keys are concatenated together into a single string, that can be used 16 | to avoid the same block evaluating in different templates. 17 | 18 | Set the timeout to ``None`` for no timeout, but with custom keys:: 19 | 20 | {% cache None "key" %} 21 | ... 22 | {% endcache %} 23 | 24 | Set timeout to ``del`` to delete cached value:: 25 | 26 | {% cache 'del' key1 %} 27 | ... 28 | {% endcache %} 29 | 30 | Considering we have ``render_form_field`` and ``render_submit`` macros:: 31 | 32 | {% cache 60*5 'myform' %} 33 |
34 |
35 | {% render_form_field(form.username) %} 36 | {% render_submit() %} 37 |
38 |
39 | {% endcache %} 40 | 41 | :copyright: (c) 2010 by Thadeus Burgess. 42 | :license: BSD, see LICENSE for more details. 43 | """ 44 | 45 | from jinja2 import nodes 46 | from jinja2.ext import Extension 47 | 48 | from flask_caching import make_template_fragment_key 49 | 50 | JINJA_CACHE_ATTR_NAME = "_template_fragment_cache" 51 | 52 | 53 | class CacheExtension(Extension): 54 | tags = {"cache"} 55 | 56 | def parse(self, parser): 57 | lineno = next(parser.stream).lineno 58 | 59 | #: Parse timeout 60 | args = [parser.parse_expression()] 61 | 62 | #: Parse fragment name 63 | #: Grab the fragment name if it exists 64 | #: otherwise, default to the old method of using the templates 65 | #: lineno to maintain backwards compatibility. 66 | if parser.stream.skip_if("comma"): 67 | args.append(parser.parse_expression()) 68 | else: 69 | args.append(nodes.Const(f"{parser.filename}{lineno}")) 70 | 71 | #: Parse vary_on parameters 72 | vary_on = [] 73 | while parser.stream.skip_if("comma"): 74 | vary_on.append(parser.parse_expression()) 75 | 76 | if vary_on: 77 | args.append(nodes.List(vary_on)) 78 | else: 79 | args.append(nodes.Const([])) 80 | 81 | body = parser.parse_statements(["name:endcache"], drop_needle=True) 82 | return nodes.CallBlock( 83 | self.call_method("_cache", args), [], [], body 84 | ).set_lineno(lineno) 85 | 86 | def _cache(self, timeout, fragment_name, vary_on, caller): 87 | try: 88 | cache = getattr(self.environment, JINJA_CACHE_ATTR_NAME) 89 | except AttributeError as e: 90 | raise e 91 | 92 | key = make_template_fragment_key(fragment_name, vary_on=vary_on) 93 | 94 | #: Delete key if timeout is 'del' 95 | if timeout == "del": 96 | cache.delete(key) 97 | return caller() 98 | 99 | rv = cache.get(key) 100 | if rv is None: 101 | rv = caller() 102 | cache.set(key, rv, timeout) 103 | return rv 104 | -------------------------------------------------------------------------------- /src/flask_caching/py.typed: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pallets-eco/flask-caching/e59bc040cd47cd2b43e501d636d43d442c50b3ff/src/flask_caching/py.typed -------------------------------------------------------------------------------- /src/flask_caching/utils.py: -------------------------------------------------------------------------------- 1 | import inspect 2 | import string 3 | from typing import Callable 4 | from typing import List 5 | from typing import Optional 6 | 7 | TEMPLATE_FRAGMENT_KEY_TEMPLATE = "_template_fragment_cache_%s%s" 8 | # Used to remove control characters and whitespace from cache keys. 9 | valid_chars = set(string.ascii_letters + string.digits + "_.") 10 | del_chars = "".join(c for c in map(chr, range(256)) if c not in valid_chars) 11 | null_control = ({k: None for k in del_chars},) 12 | 13 | 14 | def wants_args(f: Callable) -> bool: 15 | """Check if the function wants any arguments""" 16 | arg_spec = inspect.getfullargspec(f) 17 | return bool(arg_spec.args or arg_spec.varargs or arg_spec.varkw) 18 | 19 | 20 | def get_function_parameters(f: Callable) -> List: 21 | """Get function parameters 22 | :param f 23 | :return: Parameter list of function 24 | """ 25 | return list(inspect.signature(f).parameters.values()) 26 | 27 | 28 | def get_arg_names(f: Callable) -> List[str]: 29 | """Return arguments of function 30 | :param f: 31 | :return: String list of arguments 32 | """ 33 | return [ 34 | parameter.name 35 | for parameter in get_function_parameters(f) 36 | if parameter.kind == parameter.POSITIONAL_OR_KEYWORD 37 | ] 38 | 39 | 40 | def get_arg_default(f: Callable, position: int): 41 | arg = get_function_parameters(f)[position] 42 | arg_def = arg.default 43 | return arg_def if arg_def != inspect.Parameter.empty else None 44 | 45 | 46 | def get_id(obj): 47 | return getattr(obj, "__caching_id__", repr)(obj) 48 | 49 | 50 | def function_namespace(f, args=None): 51 | """Attempts to returns unique namespace for function""" 52 | m_args = get_arg_names(f) 53 | 54 | instance_token = None 55 | 56 | instance_self = getattr(f, "__self__", None) 57 | 58 | if instance_self and not inspect.isclass(instance_self): 59 | instance_token = get_id(f.__self__) 60 | elif m_args and m_args[0] == "self" and args: 61 | instance_token = get_id(args[0]) 62 | 63 | module = f.__module__ 64 | 65 | if m_args and m_args[0] == "cls" and not inspect.isclass(args[0]): 66 | raise ValueError( 67 | "When using `delete_memoized` on a " 68 | "`@classmethod` you must provide the " 69 | "class as the first argument" 70 | ) 71 | 72 | if hasattr(f, "__qualname__"): 73 | name = f.__qualname__ 74 | else: 75 | klass = getattr(f, "__self__", None) 76 | 77 | if klass and not inspect.isclass(klass): 78 | klass = klass.__class__ 79 | 80 | if not klass: 81 | klass = getattr(f, "im_class", None) 82 | 83 | if not klass: 84 | if m_args and args: 85 | if m_args[0] == "self": 86 | klass = args[0].__class__ 87 | elif m_args[0] == "cls": 88 | klass = args[0] 89 | 90 | if klass: 91 | name = klass.__name__ + "." + f.__name__ 92 | else: 93 | name = f.__name__ 94 | 95 | ns = ".".join((module, name)).translate(*null_control) 96 | 97 | ins = ( 98 | ".".join((module, name, instance_token)).translate(*null_control) 99 | if instance_token 100 | else None 101 | ) 102 | 103 | return ns, ins 104 | 105 | 106 | def make_template_fragment_key( 107 | fragment_name: str, vary_on: Optional[List[str]] = None 108 | ) -> str: 109 | """Make a cache key for a specific fragment name.""" 110 | if vary_on: 111 | fragment_name = "%s_" % fragment_name 112 | else: 113 | vary_on = [] 114 | return TEMPLATE_FRAGMENT_KEY_TEMPLATE % (fragment_name, "_".join(vary_on)) 115 | -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | import errno 2 | import os 3 | 4 | import flask 5 | import pytest 6 | 7 | import flask_caching as fsc 8 | 9 | try: 10 | __import__("pytest_xprocess") 11 | from xprocess import ProcessStarter 12 | except ImportError: 13 | 14 | @pytest.fixture(scope="session") 15 | def xprocess(): 16 | pytest.skip("pytest-xprocess not installed.") 17 | 18 | 19 | @pytest.fixture 20 | def app(request): 21 | app = flask.Flask( 22 | request.module.__name__, template_folder=os.path.dirname(__file__) 23 | ) 24 | app.testing = True 25 | app.config["CACHE_TYPE"] = "simple" 26 | return app 27 | 28 | 29 | @pytest.fixture 30 | def cache(app): 31 | return fsc.Cache(app) 32 | 33 | 34 | @pytest.fixture( 35 | params=[method for method in fsc.SUPPORTED_HASH_FUNCTIONS], 36 | ids=[method.__name__ for method in fsc.SUPPORTED_HASH_FUNCTIONS], 37 | ) 38 | def hash_method(request): 39 | return request.param 40 | 41 | 42 | @pytest.fixture(scope="class") 43 | def redis_server(xprocess): 44 | try: 45 | import redis # noqa 46 | except ImportError: 47 | pytest.skip("Python package 'redis' is not installed.") 48 | 49 | class Starter(ProcessStarter): 50 | pattern = "[Rr]eady to accept connections" 51 | args = ["redis-server"] 52 | 53 | try: 54 | xprocess.ensure("redis_server", Starter) 55 | except OSError as e: 56 | # xprocess raises FileNotFoundError 57 | if e.errno == errno.ENOENT: 58 | pytest.skip("Redis is not installed.") 59 | else: 60 | raise 61 | 62 | yield 63 | xprocess.getinfo("redis_server").terminate() 64 | 65 | 66 | @pytest.fixture(scope="class") 67 | def memcache_server(xprocess): 68 | try: 69 | import pylibmc as memcache 70 | except ImportError: 71 | try: 72 | from google.appengine.api import memcache 73 | except ImportError: 74 | try: 75 | import memcache # noqa 76 | except ImportError: 77 | pytest.skip( 78 | "Python package for memcache is not installed. Need one of " 79 | "pylibmc', 'google.appengine', or 'memcache'." 80 | ) 81 | 82 | class Starter(ProcessStarter): 83 | pattern = "" 84 | args = ["memcached", "-vv"] 85 | 86 | try: 87 | xprocess.ensure("memcached", Starter) 88 | except OSError as e: 89 | # xprocess raises FileNotFoundError 90 | if e.errno == errno.ENOENT: 91 | pytest.skip("Memcached is not installed.") 92 | else: 93 | raise 94 | 95 | yield 96 | xprocess.getinfo("memcached").terminate() 97 | -------------------------------------------------------------------------------- /tests/test_backend_cache.py: -------------------------------------------------------------------------------- 1 | """ 2 | tests.cache 3 | ~~~~~~~~~~~ 4 | 5 | Tests the cache system 6 | 7 | :copyright: (c) 2014 by Armin Ronacher. 8 | :license: BSD, see LICENSE for more details. 9 | """ 10 | 11 | import pickle 12 | import time 13 | 14 | import pytest 15 | 16 | from flask_caching import backends 17 | from flask_caching.backends import RedisSentinelCache 18 | 19 | try: 20 | import redis 21 | except ImportError: 22 | redis = None 23 | 24 | try: 25 | import pylibmc as memcache 26 | except ImportError: 27 | try: 28 | from google.appengine.api import memcache 29 | except ImportError: 30 | try: 31 | import memcache 32 | except ImportError: 33 | memcache = None 34 | 35 | 36 | class CacheTestsBase: 37 | _can_use_fast_sleep = True 38 | _guaranteed_deletes = True 39 | 40 | @pytest.fixture 41 | def make_cache(self): 42 | """Return a cache class or factory.""" 43 | raise NotImplementedError() 44 | 45 | @pytest.fixture 46 | def c(self, make_cache): 47 | """Return a cache instance.""" 48 | return make_cache() 49 | 50 | 51 | class GenericCacheTests(CacheTestsBase): 52 | def test_generic_get_dict(self, c): 53 | assert c.set("a", "a") 54 | assert c.set("b", "b") 55 | d = c.get_dict("a", "b") 56 | assert "a" in d 57 | assert "a" == d["a"] 58 | assert "b" in d 59 | assert "b" == d["b"] 60 | 61 | def test_generic_set_get(self, c): 62 | for i in range(3): 63 | assert c.set(str(i), i * i) 64 | 65 | for i in range(3): 66 | result = c.get(str(i)) 67 | assert result == i * i, result 68 | 69 | def test_generic_get_set(self, c): 70 | assert c.set("foo", ["bar"]) 71 | assert c.get("foo") == ["bar"] 72 | 73 | def test_generic_get_many(self, c): 74 | assert c.set("foo", ["bar"]) 75 | assert c.set("spam", "eggs") 76 | assert c.get_many("foo", "spam") == [["bar"], "eggs"] 77 | 78 | def test_generic_set_many(self, c): 79 | assert c.set_many({"foo": "bar", "spam": ["eggs"]}) 80 | assert c.get("foo") == "bar" 81 | assert c.get("spam") == ["eggs"] 82 | 83 | def test_generic_add(self, c): 84 | # sanity check that add() works like set() 85 | assert c.add("foo", "bar") 86 | assert c.get("foo") == "bar" 87 | assert not c.add("foo", "qux") 88 | assert c.get("foo") == "bar" 89 | 90 | def test_generic_delete(self, c): 91 | assert c.add("foo", "bar") 92 | assert c.get("foo") == "bar" 93 | assert c.delete("foo") 94 | assert c.get("foo") is None 95 | 96 | def test_generic_delete_many(self, c): 97 | assert c.add("foo", "bar") 98 | assert c.add("spam", "eggs") 99 | assert c.delete_many("foo", "spam") 100 | assert c.get("foo") is None 101 | assert c.get("spam") is None 102 | 103 | def test_generic_inc_dec(self, c): 104 | assert c.set("foo", 1) 105 | assert c.inc("foo") == c.get("foo") == 2 106 | assert c.dec("foo") == c.get("foo") == 1 107 | assert c.delete("foo") 108 | 109 | def test_generic_true_false(self, c): 110 | assert c.set("foo", True) 111 | assert c.get("foo") in (True, 1) 112 | assert c.set("bar", False) 113 | assert c.get("bar") in (False, 0) 114 | 115 | def test_generic_timeout(self, c): 116 | c.set("foo", "bar", 0) 117 | assert c.get("foo") == "bar" 118 | c.set("baz", "qux", 1) 119 | assert c.get("baz") == "qux" 120 | time.sleep(3) 121 | # timeout of zero means no timeout 122 | assert c.get("foo") == "bar" 123 | if self._guaranteed_deletes: 124 | assert c.get("baz") is None 125 | 126 | def test_generic_has(self, c): 127 | assert c.has("foo") in (False, 0) 128 | assert c.has("spam") in (False, 0) 129 | assert c.set("foo", "bar") 130 | assert c.has("foo") in (True, 1) 131 | assert c.has("spam") in (False, 0) 132 | c.delete("foo") 133 | assert c.has("foo") in (False, 0) 134 | assert c.has("spam") in (False, 0) 135 | 136 | def test_generic_get_bytes(self, c): 137 | assert c.set("foo", b"bar") 138 | assert c.get("foo") == b"bar" 139 | 140 | 141 | class TestSimpleCache(GenericCacheTests): 142 | @pytest.fixture 143 | def make_cache(self): 144 | return backends.SimpleCache 145 | 146 | def test_purge(self): 147 | c = backends.SimpleCache(threshold=2) 148 | c.set("a", "a") 149 | c.set("b", "b") 150 | c.set("c", "c") 151 | c.set("d", "d") 152 | # Cache purges old items *before* it sets new ones. 153 | assert len(c._cache) == 3 154 | 155 | 156 | class TestFileSystemCache(GenericCacheTests): 157 | @pytest.fixture 158 | def make_cache(self, tmpdir): 159 | return lambda **kw: backends.FileSystemCache(cache_dir=str(tmpdir), **kw) 160 | 161 | 162 | # don't use pytest.mark.skipif on subclasses 163 | # https://bitbucket.org/hpk42/pytest/issue/568 164 | # skip happens in requirements fixture instead 165 | class TestRedisCache(GenericCacheTests): 166 | _can_use_fast_sleep = False 167 | 168 | def gen_key_prefix(self): 169 | return "werkzeug-test-case:" 170 | 171 | @pytest.fixture(scope="class", autouse=True) 172 | def requirements(self, redis_server): 173 | pass 174 | 175 | @pytest.fixture(params=(None, False, True, gen_key_prefix)) 176 | def make_cache(self, request): 177 | key_prefix = "werkzeug-test-case:" 178 | if request.param is None: 179 | host = "localhost" 180 | elif request.param: 181 | host = redis.StrictRedis() 182 | elif callable(request.param): 183 | key_prefix = gen_key_prefix # noqa (flake8 error: undefined) 184 | host = redis.Redis() 185 | else: 186 | host = redis.Redis() 187 | 188 | c = backends.RedisCache(host=host, key_prefix=key_prefix) 189 | yield lambda: c 190 | c.clear() 191 | 192 | def test_compat(self, c): 193 | assert c._write_client.set(c.key_prefix + "foo", "Awesome") 194 | assert c.get("foo") == b"Awesome" 195 | assert c._write_client.set(c.key_prefix + "foo", "42") 196 | assert c.get("foo") == 42 197 | 198 | def test_empty_host(self): 199 | with pytest.raises(ValueError) as exc_info: 200 | backends.RedisCache(host=None) 201 | assert str(exc_info.value) == "RedisCache host parameter may not be None" 202 | 203 | 204 | class TestRedisCacheClientsOverride(CacheTestsBase): 205 | _can_use_fast_sleep = False 206 | 207 | @pytest.fixture() 208 | def make_cache(self, request): 209 | c = RedisSentinelCache() 210 | yield lambda: c 211 | 212 | def test_client_override_reflected_on_cachelib_methods(self, c): 213 | EXPECTED_GET_MANY_VALUES = ["bacon", "spam", "eggs"] 214 | 215 | class DummyWriteClient: 216 | def setex(self, *args, **kwargs): 217 | return "spam" 218 | 219 | class DummyReadClient: 220 | def mget(self, *args, **kwargs): 221 | values = [ 222 | b"!" + pickle.dumps(v, pickle.HIGHEST_PROTOCOL) 223 | for v in EXPECTED_GET_MANY_VALUES 224 | ] 225 | return values 226 | 227 | c._write_client = DummyWriteClient() 228 | c._read_client = DummyReadClient() 229 | actual_values = c.get_many("foo") 230 | assert c.set("bacon", "eggs") == "spam" 231 | for actual, expected in zip(actual_values, EXPECTED_GET_MANY_VALUES): 232 | assert actual == expected 233 | 234 | 235 | class TestMemcachedCache(GenericCacheTests): 236 | _can_use_fast_sleep = False 237 | _guaranteed_deletes = False 238 | 239 | @pytest.fixture(scope="class", autouse=True) 240 | def requirements(self, memcache_server): 241 | pass 242 | 243 | @pytest.fixture 244 | def make_cache(self): 245 | c = backends.MemcachedCache(key_prefix="werkzeug-test-case:") 246 | yield lambda: c 247 | c.clear() 248 | 249 | def test_compat(self, c): 250 | assert c._client.set(c.key_prefix + "foo", "bar") 251 | assert c.get("foo") == "bar" 252 | 253 | def test_huge_timeouts(self, c): 254 | # Timeouts greater than epoch are interpreted as POSIX timestamps 255 | # (i.e. not relative to now, but relative to epoch) 256 | epoch = 2592000 257 | c.set("foo", "bar", epoch + 100) 258 | assert c.get("foo") == "bar" 259 | 260 | def test_timeouts(self, c): 261 | c.set("foo", "bar", 1) 262 | assert c.get("foo") == "bar" 263 | time.sleep(2) 264 | assert c.has("foo") is False 265 | 266 | 267 | class TestNullCache(CacheTestsBase): 268 | @pytest.fixture(scope="class", autouse=True) 269 | def make_cache(self): 270 | return backends.NullCache 271 | 272 | def test_has(self, c): 273 | assert not c.has("foo") 274 | -------------------------------------------------------------------------------- /tests/test_basic_app.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from flask import Flask 3 | 4 | from flask_caching import Cache 5 | from flask_caching.backends.simplecache import SimpleCache 6 | 7 | try: 8 | import redis # noqa 9 | 10 | HAS_NOT_REDIS = False 11 | except ImportError: 12 | HAS_NOT_REDIS = True 13 | 14 | 15 | class CustomCache(Cache): 16 | pass 17 | 18 | 19 | class CustomSimpleCache(SimpleCache): 20 | pass 21 | 22 | 23 | def newsimple(app, config, args, kwargs): 24 | return CustomSimpleCache(*args, **kwargs) 25 | 26 | 27 | def test_dict_config(app): 28 | cache = Cache(config={"CACHE_TYPE": "simple"}) 29 | cache.init_app(app) 30 | 31 | assert cache.config["CACHE_TYPE"] == "simple" 32 | 33 | 34 | def test_dict_config_initapp(app): 35 | cache = Cache() 36 | cache.init_app(app, config={"CACHE_TYPE": "simple"}) 37 | from flask_caching.backends.simplecache import SimpleCache 38 | 39 | assert isinstance(app.extensions["cache"][cache], SimpleCache) 40 | 41 | 42 | def test_dict_config_both(app): 43 | cache = Cache(config={"CACHE_TYPE": "null"}) 44 | cache.init_app(app, config={"CACHE_TYPE": "simple"}) 45 | from flask_caching.backends.simplecache import SimpleCache 46 | 47 | assert isinstance(app.extensions["cache"][cache], SimpleCache) 48 | 49 | 50 | def test_init_app_sets_app_attribute(app): 51 | cache = Cache() 52 | cache.init_app(app) 53 | assert cache.app == app 54 | 55 | 56 | @pytest.mark.skipif(HAS_NOT_REDIS, reason="requires Redis") 57 | def test_init_app_multi_apps(app, redis_server): 58 | cache = Cache() 59 | app1 = Flask(__name__) 60 | app1.config.from_mapping({"CACHE_TYPE": "redis", "CACHE_KEY_PREFIX": "foo"}) 61 | 62 | app2 = Flask(__name__) 63 | app2.config.from_mapping({"CACHE_TYPE": "redis", "CACHE_KEY_PREFIX": "bar"}) 64 | cache.init_app(app1) 65 | cache.init_app(app2) 66 | 67 | # When we have the app context, the prefix should be 68 | # different for each app. 69 | with app1.app_context(): 70 | assert cache.cache.key_prefix == "foo" 71 | 72 | with app2.app_context(): 73 | assert cache.cache.key_prefix == "bar" 74 | 75 | 76 | @pytest.mark.skipif(HAS_NOT_REDIS, reason="requires Redis") 77 | def test_app_redis_cache_backend_url_default_db(app, redis_server): 78 | config = { 79 | "CACHE_TYPE": "redis", 80 | "CACHE_REDIS_URL": "redis://localhost:6379", 81 | } 82 | cache = Cache() 83 | cache.init_app(app, config=config) 84 | from flask_caching.backends.rediscache import RedisCache 85 | 86 | assert isinstance(app.extensions["cache"][cache], RedisCache) 87 | rconn = app.extensions["cache"][cache]._write_client.connection_pool.get_connection( 88 | "foo" 89 | ) 90 | assert rconn.db == 0 91 | 92 | 93 | @pytest.mark.skipif(HAS_NOT_REDIS, reason="requires Redis") 94 | def test_app_redis_cache_backend_url_custom_db(app, redis_server): 95 | config = { 96 | "CACHE_TYPE": "redis", 97 | "CACHE_REDIS_URL": "redis://localhost:6379/2", 98 | } 99 | cache = Cache() 100 | cache.init_app(app, config=config) 101 | rconn = app.extensions["cache"][cache]._write_client.connection_pool.get_connection( 102 | "foo" 103 | ) 104 | assert rconn.db == 2 105 | 106 | 107 | @pytest.mark.skipif(HAS_NOT_REDIS, reason="requires Redis") 108 | def test_app_redis_cache_backend_url_explicit_db_arg(app, redis_server): 109 | config = { 110 | "CACHE_TYPE": "redis", 111 | "CACHE_REDIS_URL": "redis://localhost:6379", 112 | "CACHE_REDIS_DB": 1, 113 | } 114 | cache = Cache() 115 | cache.init_app(app, config=config) 116 | rconn = app.extensions["cache"][cache]._write_client.connection_pool.get_connection( 117 | "foo" 118 | ) 119 | assert rconn.db == 1 120 | 121 | 122 | def test_app_custom_cache_backend(app): 123 | cache = Cache() 124 | app.config["CACHE_TYPE"] = "test_basic_app.newsimple" 125 | cache.init_app(app) 126 | 127 | with app.app_context(): 128 | assert isinstance(cache.cache, CustomSimpleCache) 129 | 130 | 131 | def test_subclassed_cache_class(app): 132 | # just invoking it here proofs that everything worked when subclassing 133 | # otherwise an werkzeug.utils.ImportStringError exception will be raised 134 | # because flask-caching can't find the backend 135 | 136 | # testing for "not raises" looked more hacky like this.. 137 | CustomCache(app) 138 | -------------------------------------------------------------------------------- /tests/test_cache.py: -------------------------------------------------------------------------------- 1 | import random 2 | import time 3 | 4 | import pytest 5 | 6 | from flask_caching import Cache 7 | 8 | try: 9 | import redis # noqa 10 | 11 | HAS_NOT_REDIS = False 12 | except ImportError: 13 | HAS_NOT_REDIS = True 14 | 15 | 16 | def test_cache_set(app, cache): 17 | cache.set("hi", "hello") 18 | 19 | assert cache.get("hi") == "hello" 20 | 21 | 22 | def test_cache_has(app, cache): 23 | cache.add("hi", "hello") 24 | assert cache.has("hi") 25 | 26 | 27 | def test_cache_add(app, cache): 28 | cache.add("hi", "hello") 29 | assert cache.get("hi") == "hello" 30 | 31 | cache.add("hi", "foobar") 32 | assert cache.get("hi") == "hello" 33 | 34 | 35 | def test_cache_delete(app, cache): 36 | cache.set("hi", "hello") 37 | cache.delete("hi") 38 | assert cache.get("hi") is None 39 | 40 | 41 | def test_cache_delete_many(app, cache): 42 | cache.set("hi", "hello") 43 | cache.delete_many("ho", "hi") 44 | assert cache.get("hi") is not None 45 | 46 | 47 | @pytest.mark.skipif(HAS_NOT_REDIS, reason="requires Redis") 48 | def test_cache_unlink(app, redis_server): 49 | cache = Cache(config={"CACHE_TYPE": "redis"}) 50 | cache.init_app(app) 51 | cache.set("biggerkey", "test" * 100) 52 | cache.unlink("biggerkey") 53 | assert cache.get("biggerkey") is None 54 | 55 | cache.set("biggerkey1", "test" * 100) 56 | cache.set("biggerkey2", "test" * 100) 57 | cache.unlink("biggerkey1", "biggerkey2") 58 | assert cache.get("biggerkey1") is None 59 | assert cache.get("biggerkey2") is None 60 | 61 | 62 | def test_cache_unlink_if_not(app): 63 | cache = Cache(config={"CACHE_TYPE": "simple"}) 64 | cache.init_app(app) 65 | cache.set("biggerkey", "test" * 100) 66 | cache.unlink("biggerkey") 67 | assert cache.get("biggerkey") is None 68 | 69 | cache.set("biggerkey1", "test" * 100) 70 | cache.set("biggerkey2", "test" * 100) 71 | cache.unlink("biggerkey1", "biggerkey2") 72 | assert cache.get("biggerkey1") is None 73 | assert cache.get("biggerkey2") is None 74 | 75 | 76 | def test_cache_delete_many_ignored(app): 77 | cache = Cache(config={"CACHE_TYPE": "simple", "CACHE_IGNORE_ERRORS": True}) 78 | cache.init_app(app) 79 | 80 | cache.set("hi", "hello") 81 | assert cache.get("hi") == "hello" 82 | cache.delete_many("ho", "hi") 83 | assert cache.get("hi") is None 84 | 85 | 86 | def test_cache_cached_function(app, cache): 87 | with app.test_request_context(): 88 | 89 | @cache.cached(1, key_prefix="MyBits") 90 | def get_random_bits(): 91 | return [random.randrange(0, 2) for i in range(50)] 92 | 93 | my_list = get_random_bits() 94 | his_list = get_random_bits() 95 | 96 | assert my_list == his_list 97 | 98 | time.sleep(2) 99 | 100 | his_list = get_random_bits() 101 | 102 | assert my_list != his_list 103 | 104 | 105 | def test_cache_cached_function_with_source_check_enabled(app, cache): 106 | with app.test_request_context(): 107 | 108 | @cache.cached(key_prefix="MyBits", source_check=True) 109 | def get_random_bits(): 110 | return [random.randrange(0, 2) for i in range(50)] 111 | 112 | first_attempt = get_random_bits() 113 | second_attempt = get_random_bits() 114 | 115 | assert second_attempt == first_attempt 116 | 117 | # ... change the source to see if the return value changes when called 118 | @cache.cached(key_prefix="MyBits", source_check=True) 119 | def get_random_bits(): 120 | return {"val": [random.randrange(0, 2) for i in range(50)]} 121 | 122 | third_attempt = get_random_bits() 123 | 124 | assert third_attempt != first_attempt 125 | # We changed the return data type so we do a check to be sure 126 | assert isinstance(third_attempt, dict) 127 | 128 | # ... change the source back to what it was original and the data should 129 | # be the same 130 | @cache.cached(key_prefix="MyBits", source_check=True) 131 | def get_random_bits(): 132 | return [random.randrange(0, 2) for i in range(50)] 133 | 134 | forth_attempt = get_random_bits() 135 | 136 | assert forth_attempt == first_attempt 137 | 138 | 139 | def test_cache_cached_function_with_source_check_disabled(app, cache): 140 | with app.test_request_context(): 141 | 142 | @cache.cached(key_prefix="MyBits", source_check=False) 143 | def get_random_bits(): 144 | return [random.randrange(0, 2) for i in range(50)] 145 | 146 | first_attempt = get_random_bits() 147 | second_attempt = get_random_bits() 148 | 149 | assert second_attempt == first_attempt 150 | 151 | # ... change the source to see if the return value changes when called 152 | @cache.cached(key_prefix="MyBits", source_check=False) 153 | def get_random_bits(): 154 | return {"val": [random.randrange(0, 2) for i in range(50)]} 155 | 156 | third_attempt = get_random_bits() 157 | 158 | assert third_attempt == first_attempt 159 | 160 | 161 | def test_cache_accepts_multiple_ciphers(app, cache, hash_method): 162 | with app.test_request_context(): 163 | 164 | @cache.cached(1, key_prefix="MyBits", hash_method=hash_method) 165 | def get_random_bits(): 166 | return [random.randrange(0, 2) for i in range(50)] 167 | 168 | my_list = get_random_bits() 169 | his_list = get_random_bits() 170 | 171 | assert my_list == his_list 172 | 173 | time.sleep(2) 174 | 175 | his_list = get_random_bits() 176 | 177 | assert my_list != his_list 178 | 179 | 180 | def test_cached_none(app, cache): 181 | with app.test_request_context(): 182 | from collections import Counter 183 | 184 | call_counter = Counter() 185 | 186 | @cache.cached(cache_none=True) 187 | def cache_none(param): 188 | call_counter[param] += 1 189 | 190 | return None 191 | 192 | cache_none(1) 193 | 194 | assert call_counter[1] == 1 195 | assert cache_none(1) is None 196 | assert call_counter[1] == 1 197 | 198 | cache.clear() 199 | 200 | cache_none(1) 201 | assert call_counter[1] == 2 202 | 203 | 204 | def test_cached_doesnt_cache_none(app, cache): 205 | """Asserting that when cache_none is False, we always 206 | assume a None value returned from .get() means the key is not found 207 | """ 208 | with app.test_request_context(): 209 | from collections import Counter 210 | 211 | call_counter = Counter() 212 | 213 | @cache.cached() 214 | def cache_none(param): 215 | call_counter[param] += 1 216 | 217 | return None 218 | 219 | cache_none(1) 220 | 221 | # The cached function should have been called 222 | assert call_counter[1] == 1 223 | 224 | # Next time we call the function, the value should be coming from the cache… 225 | # But the value is None and so we treat it as uncached. 226 | assert cache_none(1) is None 227 | 228 | # …thus, the call counter should increment to 2 229 | assert call_counter[1] == 2 230 | 231 | cache.clear() 232 | 233 | cache_none(1) 234 | assert call_counter[1] == 3 235 | 236 | 237 | def test_cache_forced_update(app, cache): 238 | from collections import Counter 239 | 240 | with app.test_request_context(): 241 | need_update = False 242 | call_counter = Counter() 243 | 244 | @cache.cached(1, forced_update=lambda: need_update) 245 | def cached_function(param): 246 | call_counter[param] += 1 247 | 248 | return 1 249 | 250 | cached_function(1) 251 | assert call_counter[1] == 1 252 | 253 | assert cached_function(1) == 1 254 | assert call_counter[1] == 1 255 | 256 | need_update = True 257 | 258 | assert cached_function(1) == 1 259 | assert call_counter[1] == 2 260 | 261 | 262 | def test_cache_forced_update_params(app, cache): 263 | from collections import Counter 264 | 265 | with app.test_request_context(): 266 | cached_call_counter = Counter() 267 | call_counter = Counter() 268 | call_params = {} 269 | 270 | def need_update(param): 271 | """This helper function returns True if it has been called with 272 | the same params for more than 2 times 273 | """ 274 | 275 | call_counter[param] += 1 276 | call_params[call_counter[param] - 1] = (param,) 277 | 278 | return call_counter[param] > 2 279 | 280 | @cache.cached(1, forced_update=need_update) 281 | def cached_function(param): 282 | cached_call_counter[param] += 1 283 | 284 | return 1 285 | 286 | assert cached_function(1) == 1 287 | # need_update should have been called once 288 | assert call_counter[1] == 1 289 | # the parameters used to call need_update should be the same as the 290 | # parameters used to call cached_function 291 | assert call_params[0] == (1,) 292 | # the cached function should have been called once 293 | assert cached_call_counter[1] == 1 294 | 295 | assert cached_function(1) == 1 296 | # need_update should have been called twice by now as forced_update 297 | # should be called regardless of the arguments 298 | assert call_counter[1] == 2 299 | # the parameters used to call need_update should be the same as the 300 | # parameters used to call cached_function 301 | assert call_params[1] == (1,) 302 | # this time the forced_update should have returned False, so 303 | # cached_function should not have been called again 304 | assert cached_call_counter[1] == 1 305 | 306 | assert cached_function(1) == 1 307 | # need_update should have been called thrice by now as forced_update 308 | # should be called regardless of the arguments 309 | assert call_counter[1] == 3 310 | # the parameters used to call need_update should be the same as the 311 | # parameters used to call cached_function 312 | assert call_params[1] == (1,) 313 | # this time the forced_update should have returned True, so 314 | # cached_function should have been called again 315 | assert cached_call_counter[1] == 2 316 | 317 | 318 | def test_generator(app, cache): 319 | """test function return generator""" 320 | with app.test_request_context(): 321 | 322 | @cache.cached() 323 | def gen(): 324 | return (str(time.time()) for i in range(2)) 325 | 326 | time_str = gen() 327 | time.sleep(1) 328 | assert gen() == time_str 329 | 330 | @cache.cached() 331 | def gen_yield(): 332 | yield str(time.time()) 333 | yield str(time.time()) 334 | 335 | time_str = gen_yield() 336 | time.sleep(1) 337 | assert gen_yield() == time_str 338 | -------------------------------------------------------------------------------- /tests/test_init.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from flask import Flask 3 | 4 | from flask_caching import Cache 5 | from flask_caching.backends import FileSystemCache 6 | from flask_caching.backends import MemcachedCache 7 | from flask_caching.backends import NullCache 8 | from flask_caching.backends import RedisCache 9 | from flask_caching.backends import RedisSentinelCache 10 | from flask_caching.backends import SASLMemcachedCache 11 | from flask_caching.backends import SimpleCache 12 | from flask_caching.backends import SpreadSASLMemcachedCache 13 | 14 | 15 | @pytest.fixture 16 | def app(): 17 | app_ = Flask(__name__) 18 | 19 | return app_ 20 | 21 | 22 | @pytest.mark.parametrize( 23 | "cache_type", 24 | ( 25 | FileSystemCache, 26 | MemcachedCache, 27 | NullCache, 28 | RedisCache, 29 | RedisSentinelCache, 30 | SASLMemcachedCache, 31 | SimpleCache, 32 | SpreadSASLMemcachedCache, 33 | ), 34 | ) 35 | def test_init_nullcache(cache_type, app, tmp_path): 36 | extra_config = { 37 | FileSystemCache: { 38 | "CACHE_DIR": tmp_path, 39 | }, 40 | SASLMemcachedCache: { 41 | "CACHE_MEMCACHED_USERNAME": "test", 42 | "CACHE_MEMCACHED_PASSWORD": "test", 43 | }, 44 | } 45 | app.config["CACHE_TYPE"] = "flask_caching.backends." + cache_type.__name__ 46 | app.config.update(extra_config.get(cache_type, {})) 47 | cache = Cache(app=app) 48 | 49 | assert isinstance(app.extensions["cache"][cache], cache_type) 50 | -------------------------------------------------------------------------------- /tests/test_memoize.py: -------------------------------------------------------------------------------- 1 | import random 2 | import time 3 | 4 | import pytest 5 | 6 | from flask_caching import Cache 7 | from flask_caching import function_namespace 8 | 9 | 10 | def test_memoize(app, cache): 11 | with app.test_request_context(): 12 | 13 | @cache.memoize(3) 14 | def big_foo(a, b): 15 | return a + b + random.randrange(0, 100000) 16 | 17 | result = big_foo(5, 2) 18 | 19 | time.sleep(1) 20 | 21 | assert big_foo(5, 2) == result 22 | 23 | result2 = big_foo(5, 3) 24 | assert result2 != result 25 | 26 | time.sleep(3) 27 | 28 | assert big_foo(5, 2) != result 29 | 30 | time.sleep(1) 31 | 32 | assert big_foo(5, 3) != result2 33 | 34 | 35 | def test_memoize_hashes(app, cache, hash_method): 36 | with app.test_request_context(): 37 | 38 | @cache.memoize(3, hash_method=hash_method) 39 | def big_foo(a, b): 40 | return a + b + random.randrange(0, 100000) 41 | 42 | result = big_foo(5, 2) 43 | 44 | time.sleep(1) 45 | 46 | assert big_foo(5, 2) == result 47 | 48 | result2 = big_foo(5, 3) 49 | assert result2 != result 50 | 51 | time.sleep(3) 52 | 53 | assert big_foo(5, 2) != result 54 | 55 | time.sleep(1) 56 | 57 | assert big_foo(5, 3) != result2 58 | 59 | 60 | def test_memoize_timeout(app): 61 | app.config["CACHE_DEFAULT_TIMEOUT"] = 1 62 | cache = Cache(app) 63 | 64 | with app.test_request_context(): 65 | 66 | @cache.memoize() 67 | def big_foo(a, b): 68 | return a + b + random.randrange(0, 100000) 69 | 70 | result = big_foo(5, 2) 71 | assert big_foo(5, 2) == result 72 | time.sleep(2) 73 | assert big_foo(5, 2) != result 74 | 75 | 76 | def test_memoize_annotated(app, cache): 77 | with app.test_request_context(): 78 | 79 | @cache.memoize(50) 80 | def big_foo_annotated(a, b): 81 | return a + b + random.randrange(0, 100000) 82 | 83 | big_foo_annotated.__annotations__ = { 84 | "a": int, 85 | "b": int, 86 | "return": int, 87 | } 88 | 89 | result = big_foo_annotated(5, 2) 90 | 91 | time.sleep(1) 92 | 93 | assert big_foo_annotated(5, 2) == result 94 | 95 | 96 | def test_memoize_utf8_arguments(app, cache): 97 | with app.test_request_context(): 98 | 99 | @cache.memoize() 100 | def big_foo(a, b): 101 | return f"{a}-{b}" 102 | 103 | big_foo("æøå", "chars") 104 | 105 | 106 | def test_memoize_unicode_arguments(app, cache): 107 | with app.test_request_context(): 108 | 109 | @cache.memoize() 110 | def big_foo(a, b): 111 | return f"{a}-{b}" 112 | 113 | big_foo("æøå", "chars") 114 | 115 | 116 | def test_memoize_delete(app, cache): 117 | with app.test_request_context(): 118 | 119 | @cache.memoize(5) 120 | def big_foo(a, b): 121 | return a + b + random.randrange(0, 100000) 122 | 123 | result = big_foo(5, 2) 124 | result2 = big_foo(5, 3) 125 | 126 | time.sleep(1) 127 | 128 | assert big_foo(5, 2) == result 129 | assert big_foo(5, 2) == result 130 | assert big_foo(5, 3) != result 131 | assert big_foo(5, 3) == result2 132 | 133 | cache.delete_memoized(big_foo) 134 | 135 | assert big_foo(5, 2) != result 136 | assert big_foo(5, 3) != result2 137 | 138 | 139 | def test_memoize_no_timeout_delete(app, cache): 140 | with app.test_request_context(): 141 | 142 | @cache.memoize() 143 | def big_foo(a, b): 144 | return a + b + random.randrange(0, 100000) 145 | 146 | result_a = big_foo(5, 1) 147 | result_b = big_foo(5, 2) 148 | 149 | assert big_foo(5, 1) == result_a 150 | assert big_foo(5, 2) == result_b 151 | cache.delete_memoized(big_foo, 5, 2) 152 | 153 | assert big_foo(5, 1) == result_a 154 | assert big_foo(5, 2) != result_b 155 | 156 | # Cleanup bigfoo 5,1 5,2 or it might conflict with 157 | # following run if it also uses memecache 158 | cache.delete_memoized(big_foo, 5, 2) 159 | cache.delete_memoized(big_foo, 5, 1) 160 | 161 | 162 | def test_memoize_verhash_delete(app, cache): 163 | with app.test_request_context(): 164 | 165 | @cache.memoize(5) 166 | def big_foo(a, b): 167 | return a + b + random.randrange(0, 100000) 168 | 169 | result = big_foo(5, 2) 170 | result2 = big_foo(5, 3) 171 | 172 | time.sleep(1) 173 | 174 | assert big_foo(5, 2) == result 175 | assert big_foo(5, 2) == result 176 | assert big_foo(5, 3) != result 177 | assert big_foo(5, 3) == result2 178 | 179 | cache.delete_memoized_verhash(big_foo) 180 | 181 | _fname, _fname_instance = function_namespace(big_foo) 182 | version_key = cache._memvname(_fname) 183 | assert cache.get(version_key) is None 184 | 185 | assert big_foo(5, 2) != result 186 | assert big_foo(5, 3) != result2 187 | 188 | assert cache.get(version_key) is not None 189 | 190 | 191 | def test_memoize_annotated_delete(app, cache): 192 | with app.test_request_context(): 193 | 194 | @cache.memoize(5) 195 | def big_foo_annotated(a, b): 196 | return a + b + random.randrange(0, 100000) 197 | 198 | big_foo_annotated.__annotations__ = {"a": int, "b": int, "return": int} 199 | 200 | result = big_foo_annotated(5, 2) 201 | result2 = big_foo_annotated(5, 3) 202 | 203 | time.sleep(1) 204 | 205 | assert big_foo_annotated(5, 2) == result 206 | assert big_foo_annotated(5, 2) == result 207 | assert big_foo_annotated(5, 3) != result 208 | assert big_foo_annotated(5, 3) == result2 209 | 210 | cache.delete_memoized_verhash(big_foo_annotated) 211 | 212 | _fname, _fname_instance = function_namespace(big_foo_annotated) 213 | version_key = cache._memvname(_fname) 214 | assert cache.get(version_key) is None 215 | 216 | assert big_foo_annotated(5, 2) != result 217 | assert big_foo_annotated(5, 3) != result2 218 | 219 | assert cache.get(version_key) is not None 220 | 221 | 222 | def test_memoize_args(app, cache): 223 | with app.test_request_context(): 224 | 225 | @cache.memoize() 226 | def big_foo(a, b): 227 | return sum(a) + sum(b) + random.randrange(0, 100000) 228 | 229 | result_a = big_foo([5, 3, 2], [1]) 230 | result_b = big_foo([3, 3], [3, 1]) 231 | 232 | assert big_foo([5, 3, 2], [1]) == result_a 233 | assert big_foo([3, 3], [3, 1]) == result_b 234 | 235 | cache.delete_memoized(big_foo, [5, 3, 2], [1]) 236 | 237 | assert big_foo([5, 3, 2], [1]) != result_a 238 | assert big_foo([3, 3], [3, 1]) == result_b 239 | 240 | # Cleanup bigfoo 5,1 5,2 or it might conflict with 241 | # following run if it also uses memecache 242 | cache.delete_memoized(big_foo, [5, 3, 2], [1]) 243 | cache.delete_memoized(big_foo, [3, 3], [1]) 244 | 245 | 246 | def test_memoize_kwargs(app, cache): 247 | with app.test_request_context(): 248 | 249 | @cache.memoize() 250 | def big_foo(a, b=None): 251 | return a + sum(b.values()) + random.randrange(0, 100000) 252 | 253 | result_a = big_foo(1, dict(one=1, two=2)) 254 | result_b = big_foo(5, dict(three=3, four=4)) 255 | 256 | assert big_foo(1, dict(one=1, two=2)) == result_a 257 | assert big_foo(5, dict(three=3, four=4)) == result_b 258 | 259 | cache.delete_memoized(big_foo, 1, dict(one=1, two=2)) 260 | 261 | assert big_foo(1, dict(one=1, two=2)) != result_a 262 | assert big_foo(5, dict(three=3, four=4)) == result_b 263 | 264 | 265 | def test_memoize_kwargonly(app, cache): 266 | with app.test_request_context(): 267 | 268 | @cache.memoize() 269 | def big_foo(a=None): 270 | if a is None: 271 | a = 0 272 | return a + random.random() 273 | 274 | result_a = big_foo() 275 | result_b = big_foo(5) 276 | 277 | assert big_foo() == result_a 278 | assert big_foo() < 1 279 | assert big_foo(5) == result_b 280 | assert big_foo(5) >= 5 and big_foo(5) < 6 281 | 282 | 283 | def test_memoize_arg_kwarg(app, cache): 284 | with app.test_request_context(): 285 | 286 | @cache.memoize() 287 | def f(a, b, c=1): 288 | return a + b + c + random.randrange(0, 100000) 289 | 290 | assert f(1, 2) == f(1, 2, c=1) 291 | assert f(1, 2) == f(1, 2, 1) 292 | assert f(1, 2) == f(1, 2) 293 | assert f(1, 2, 3) != f(1, 2) 294 | 295 | with pytest.raises(TypeError): 296 | f(1) 297 | 298 | 299 | def test_memoize_arg_kwarg_var_keyword(app, cache): 300 | with app.test_request_context(): 301 | 302 | @cache.memoize() 303 | def f(a, b, c=1, **kwargs): 304 | return a + b + c + random.randrange(0, 100000) + sum(list(kwargs.values())) 305 | 306 | assert f(1, 2) == f(1, 2, c=1) 307 | assert f(1, 2) == f(1, 2, 1) 308 | assert f(1, 2) == f(1, 2) 309 | assert f(1, 2, d=5, e=8) == f(1, 2, e=8, d=5) 310 | assert f(1, b=2, c=3, d=5, e=8) == f(1, 2, e=8, d=5, b=2, c=3) 311 | assert f(1, 2, 3) != f(1, 2) 312 | assert f(1, 2, 3) != f(1, 2) 313 | 314 | with pytest.raises(TypeError): 315 | f(1) 316 | 317 | 318 | def test_memoize_classarg(app, cache): 319 | @cache.memoize() 320 | def bar(a): 321 | return a.value + random.random() 322 | 323 | class Adder: # noqa: B903 324 | def __init__(self, value): 325 | self.value = value 326 | 327 | adder = Adder(15) 328 | adder2 = Adder(20) 329 | 330 | y = bar(adder) 331 | z = bar(adder2) 332 | 333 | assert y != z 334 | assert bar(adder) == y 335 | assert bar(adder) != z 336 | adder.value = 14 337 | assert bar(adder) == y 338 | assert bar(adder) != z 339 | 340 | assert bar(adder) != bar(adder2) 341 | assert bar(adder2) == z 342 | 343 | 344 | def test_memoize_classfunc(app, cache): 345 | class Adder: 346 | def __init__(self, initial): 347 | self.initial = initial 348 | 349 | @cache.memoize() 350 | def add(self, b): 351 | return self.initial + b 352 | 353 | adder1 = Adder(1) 354 | adder2 = Adder(2) 355 | 356 | x = adder1.add(3) 357 | assert adder1.add(3) == x 358 | assert adder1.add(4) != x 359 | assert adder1.add(3) != adder2.add(3) 360 | 361 | 362 | def test_memoize_classfunc_repr(app, cache): 363 | class Adder: 364 | def __init__(self, initial): 365 | self.initial = initial 366 | 367 | @cache.memoize() 368 | def add(self, b): 369 | return self.initial + b 370 | 371 | def __repr__(self): 372 | return "42" 373 | 374 | def __caching_id__(self): 375 | return self.initial 376 | 377 | adder1 = Adder(1) 378 | adder2 = Adder(2) 379 | 380 | x = adder1.add(3) 381 | assert adder1.add(3) == x 382 | assert adder1.add(4) != x 383 | assert adder1.add(3) != adder2.add(3) 384 | 385 | 386 | def test_memoize_classfunc_delete(app, cache): 387 | with app.test_request_context(): 388 | 389 | class Adder: 390 | def __init__(self, initial): 391 | self.initial = initial 392 | 393 | @cache.memoize() 394 | def add(self, b): 395 | return self.initial + b + random.random() 396 | 397 | adder1 = Adder(1) 398 | adder2 = Adder(2) 399 | 400 | a1 = adder1.add(3) 401 | a2 = adder2.add(3) 402 | 403 | assert a1 != a2 404 | assert adder1.add(3) == a1 405 | assert adder2.add(3) == a2 406 | 407 | cache.delete_memoized(adder1.add) 408 | 409 | a3 = adder1.add(3) 410 | a4 = adder2.add(3) 411 | 412 | assert not a1 == a3 413 | # self.assertNotEqual(a1, a3) 414 | 415 | assert a1 != a3 416 | 417 | assert a2 == a4 418 | # self.assertEqual(a2, a4) 419 | 420 | cache.delete_memoized(Adder.add) 421 | 422 | a5 = adder1.add(3) 423 | a6 = adder2.add(3) 424 | 425 | assert not a5 == a6 426 | # self.assertNotEqual(a5, a6) 427 | assert not a3 == a5 428 | # self.assertNotEqual(a3, a5) 429 | assert not a4 == a6 430 | # self.assertNotEqual(a4, a6) 431 | 432 | 433 | def test_memoize_classmethod_delete(app, cache): 434 | with app.test_request_context(): 435 | 436 | class Mock: 437 | @classmethod 438 | @cache.memoize(5) 439 | def big_foo(cls, a, b): 440 | return a + b + random.randrange(0, 100000) 441 | 442 | result = Mock.big_foo(5, 2) 443 | result2 = Mock.big_foo(5, 3) 444 | 445 | time.sleep(1) 446 | 447 | assert Mock.big_foo(5, 2) == result 448 | assert Mock.big_foo(5, 2) == result 449 | assert Mock.big_foo(5, 3) != result 450 | assert Mock.big_foo(5, 3) == result2 451 | 452 | cache.delete_memoized(Mock.big_foo) 453 | 454 | assert Mock.big_foo(5, 2) != result 455 | assert Mock.big_foo(5, 3) != result2 456 | 457 | 458 | def test_memoize_classmethod_delete_with_args(app, cache): 459 | with app.test_request_context(): 460 | 461 | class Mock: 462 | @classmethod 463 | @cache.memoize(5) 464 | def big_foo(cls, a, b): 465 | return a + b + random.randrange(0, 100000) 466 | 467 | result = Mock.big_foo(5, 2) 468 | result2 = Mock.big_foo(5, 3) 469 | 470 | time.sleep(1) 471 | 472 | assert Mock.big_foo(5, 2) == result 473 | assert Mock.big_foo(5, 2) == result 474 | assert Mock.big_foo(5, 3) != result 475 | assert Mock.big_foo(5, 3) == result2 476 | 477 | with pytest.raises(ValueError): 478 | cache.delete_memoized(Mock.big_foo, 5, 2) 479 | 480 | assert Mock.big_foo(5, 2) == result 481 | assert Mock.big_foo(5, 3) == result2 482 | 483 | cache.delete_memoized(Mock.big_foo, Mock, 5, 2) 484 | 485 | assert Mock.big_foo(5, 2) != result 486 | assert Mock.big_foo(5, 3) == result2 487 | 488 | 489 | def test_memoize_forced_update(app, cache): 490 | with app.test_request_context(): 491 | forced_update = False 492 | 493 | @cache.memoize(5, forced_update=lambda: forced_update) 494 | def big_foo(a, b): 495 | return a + b + random.randrange(0, 100000) 496 | 497 | result = big_foo(5, 2) 498 | time.sleep(1) 499 | assert big_foo(5, 2) == result 500 | 501 | forced_update = True 502 | new_result = big_foo(5, 2) 503 | assert new_result != result 504 | 505 | forced_update = False 506 | time.sleep(1) 507 | assert big_foo(5, 2) == new_result 508 | 509 | 510 | def test_memoize_forced_update_parameters(app, cache): 511 | from collections import Counter 512 | 513 | with app.test_request_context(): 514 | call_counter = Counter() 515 | call_params = {} 516 | forced_update = False 517 | 518 | def forced_update_func(a, b): 519 | call_counter[1] += 1 520 | call_params[call_counter[1] - 1] = (a, b) 521 | 522 | return forced_update 523 | 524 | @cache.memoize(5, forced_update=forced_update_func) 525 | def memoized_func(a, b): 526 | return a + b + random.randrange(0, 100000) 527 | 528 | # Save the value for later inspection 529 | result = memoized_func(5, 2) 530 | # forced_update_func should have been called twice; once by memoize 531 | # itself, once by _memoize_version… 532 | assert call_counter[1] == 2 533 | # …with the values we called the function with 534 | assert call_params[0] == (5, 2) 535 | assert call_params[1] == (5, 2) 536 | time.sleep(1) 537 | 538 | # Calling the function again should return the cached value 539 | assert memoized_func(5, 2) == result 540 | # forced_update_func should have been called two more times… 541 | assert call_counter[1] == 4 542 | # …with the values we called the function with 543 | assert call_params[2] == (5, 2) 544 | assert call_params[3] == (5, 2) 545 | 546 | # Tell forced_update_func to return True next time 547 | forced_update = True 548 | # Save the new result… 549 | new_result = memoized_func(5, 2) 550 | # …which, due to the random number in the function, should be different 551 | # from the old one 552 | assert new_result != result 553 | # forced_update_func should have been called two more times again… 554 | assert call_counter[1] == 6 555 | # …with the values we called the function with 556 | assert call_params[4] == (5, 2) 557 | assert call_params[5] == (5, 2) 558 | 559 | # Now stop forced updating again 560 | forced_update = False 561 | time.sleep(1) 562 | # The function should return the same value as it did last time 563 | assert memoized_func(5, 2) == new_result 564 | # forced_update_func should have been called two more times again… 565 | assert call_counter[1] == 8 566 | # …with the values we called the function with 567 | assert call_params[6] == (5, 2) 568 | assert call_params[7] == (5, 2) 569 | 570 | 571 | def test_memoize_multiple_arg_kwarg_calls(app, cache): 572 | with app.test_request_context(): 573 | 574 | @cache.memoize() 575 | def big_foo(a, b, c=None, d=None): 576 | if c is None: 577 | c = [1, 1] 578 | if d is None: 579 | d = [1, 1] 580 | return ( 581 | sum(a) + sum(b) + sum(c) + sum(d) + random.randrange(0, 100000) 582 | ) # noqa 583 | 584 | result_a = big_foo([5, 3, 2], [1], c=[3, 3], d=[3, 3]) 585 | 586 | assert big_foo([5, 3, 2], [1], d=[3, 3], c=[3, 3]) == result_a 587 | assert big_foo(b=[1], a=[5, 3, 2], c=[3, 3], d=[3, 3]) == result_a 588 | assert big_foo([5, 3, 2], [1], [3, 3], [3, 3]) == result_a 589 | 590 | 591 | def test_memoize_multiple_arg_kwarg_delete(app, cache): 592 | with app.test_request_context(): 593 | 594 | @cache.memoize() 595 | def big_foo(a, b, c=None, d=None): 596 | if c is None: 597 | c = [1, 1] 598 | if d is None: 599 | d = [1, 1] 600 | return ( 601 | sum(a) + sum(b) + sum(c) + sum(d) + random.randrange(0, 100000) 602 | ) # noqa 603 | 604 | result_a = big_foo([5, 3, 2], [1], c=[3, 3], d=[3, 3]) 605 | cache.delete_memoized(big_foo, [5, 3, 2], [1], [3, 3], [3, 3]) 606 | result_b = big_foo([5, 3, 2], [1], c=[3, 3], d=[3, 3]) 607 | assert result_a != result_b 608 | 609 | cache.delete_memoized(big_foo, [5, 3, 2], b=[1], c=[3, 3], d=[3, 3]) 610 | result_b = big_foo([5, 3, 2], [1], c=[3, 3], d=[3, 3]) 611 | assert result_a != result_b 612 | 613 | cache.delete_memoized(big_foo, [5, 3, 2], [1], c=[3, 3], d=[3, 3]) 614 | result_a = big_foo([5, 3, 2], [1], c=[3, 3], d=[3, 3]) 615 | assert result_a != result_b 616 | 617 | cache.delete_memoized(big_foo, [5, 3, 2], b=[1], c=[3, 3], d=[3, 3]) 618 | result_a = big_foo([5, 3, 2], [1], c=[3, 3], d=[3, 3]) 619 | assert result_a != result_b 620 | 621 | cache.delete_memoized(big_foo, [5, 3, 2], [1], c=[3, 3], d=[3, 3]) 622 | result_b = big_foo([5, 3, 2], [1], c=[3, 3], d=[3, 3]) 623 | assert result_a != result_b 624 | 625 | cache.delete_memoized(big_foo, [5, 3, 2], [1], [3, 3], [3, 3]) 626 | result_a = big_foo([5, 3, 2], [1], c=[3, 3], d=[3, 3]) 627 | assert result_a != result_b 628 | 629 | 630 | def test_memoize_kwargs_to_args(app, cache): 631 | with app.test_request_context(): 632 | 633 | def big_foo(a, b, c=None, d=None): 634 | return sum(a) + sum(b) + random.randrange(0, 100000) 635 | 636 | expected = (1, 2, "foo", "bar") 637 | 638 | args, kwargs = cache._memoize_kwargs_to_args(big_foo, 1, 2, "foo", "bar") 639 | assert args == expected 640 | args, kwargs = cache._memoize_kwargs_to_args(big_foo, 2, "foo", "bar", a=1) 641 | assert args == expected 642 | args, kwargs = cache._memoize_kwargs_to_args( 643 | big_foo, a=1, b=2, c="foo", d="bar" 644 | ) 645 | assert args == expected 646 | args, kwargs = cache._memoize_kwargs_to_args( 647 | big_foo, d="bar", b=2, a=1, c="foo" 648 | ) 649 | assert args == expected 650 | args, kwargs = cache._memoize_kwargs_to_args(big_foo, 1, 2, d="bar", c="foo") 651 | assert args == expected 652 | 653 | 654 | def test_memoize_when_using_args_unpacking(app, cache): 655 | with app.test_request_context(): 656 | 657 | @cache.memoize() 658 | def big_foo(*args): 659 | return sum(args) + random.randrange(0, 100000) 660 | 661 | result_a = big_foo(1, 2) 662 | result_b = big_foo(1, 3) 663 | 664 | assert big_foo(1, 2) == result_a 665 | assert big_foo(1, 3) == result_b 666 | assert big_foo(1, 2) != result_b 667 | assert big_foo(1, 3) != result_a 668 | 669 | cache.delete_memoized(big_foo) 670 | 671 | assert big_foo(1, 2) != result_a 672 | assert big_foo(1, 3) != result_b 673 | 674 | 675 | def test_memoize_when_using_variable_mix_args_unpacking(app, cache): 676 | with app.test_request_context(): 677 | 678 | @cache.memoize() 679 | def big_foo(a, b, *args, **kwargs): 680 | return ( 681 | sum([a, b]) 682 | + sum(args) 683 | + sum(kwargs.values()) 684 | + random.randrange(0, 100000) 685 | ) 686 | 687 | result_a = big_foo(1, 2, 3, 4, x=2, y=5) 688 | result_b = big_foo(4, 7, 7, 2, x=1, y=4) 689 | 690 | assert big_foo(1, 2, 3, 4, x=2, y=5) == result_a 691 | assert big_foo(4, 7, 7, 2, x=1, y=4) == result_b 692 | assert big_foo(1, 2, 3, 4, x=2, y=5) != result_b 693 | assert big_foo(4, 7, 7, 2, x=1, y=4) != result_a 694 | 695 | cache.delete_memoized(big_foo) 696 | 697 | assert big_foo(1, 2, 3, 4, x=2, y=5) != result_a 698 | assert big_foo(4, 7, 7, 2, x=1, y=4) != result_b 699 | 700 | 701 | def test_memoize_none(app, cache): 702 | with app.test_request_context(): 703 | from collections import Counter 704 | 705 | call_counter = Counter() 706 | 707 | @cache.memoize(cache_none=True) 708 | def memoize_none(param): 709 | call_counter[param] += 1 710 | 711 | return None 712 | 713 | memoize_none(1) 714 | 715 | # The memoized function should have been called 716 | assert call_counter[1] == 1 717 | 718 | # Next time we call the function, the value should be coming from the 719 | # cache... 720 | assert memoize_none(1) is None 721 | 722 | # …thus, the call counter should remain 1 723 | assert call_counter[1] == 1 724 | 725 | cache.clear() 726 | 727 | memoize_none(1) 728 | assert call_counter[1] == 2 729 | 730 | 731 | def test_memoize_never_accept_none(app, cache): 732 | """Asserting that when cache_none is False, we always 733 | assume a None value returned from .get() means the key is not found 734 | """ 735 | with app.test_request_context(): 736 | from collections import Counter 737 | 738 | call_counter = Counter() 739 | 740 | @cache.memoize() 741 | def memoize_none(param): 742 | call_counter[param] += 1 743 | 744 | return None 745 | 746 | memoize_none(1) 747 | 748 | # The memoized function should have been called 749 | assert call_counter[1] == 1 750 | 751 | # Next time we call the function, the value should be coming from the 752 | # cache… 753 | # But the value is None and so we treat it as uncached. 754 | assert memoize_none(1) is None 755 | 756 | # …thus, the call counter should increment to 2 757 | assert call_counter[1] == 2 758 | 759 | cache.clear() 760 | 761 | memoize_none(1) 762 | assert call_counter[1] == 3 763 | 764 | 765 | def test_memoize_with_source_check_enabled(app, cache): 766 | with app.test_request_context(): 767 | 768 | @cache.memoize(source_check=True) 769 | def big_foo(a, b): 770 | return str(time.time()) 771 | 772 | first_try = big_foo(5, 2) 773 | 774 | second_try = big_foo(5, 2) 775 | 776 | assert second_try == first_try 777 | 778 | @cache.memoize(source_check=True) 779 | def big_foo(a, b): 780 | return str(time.time()) 781 | 782 | third_try = big_foo(5, 2) 783 | 784 | assert third_try[0] != first_try 785 | 786 | @cache.memoize(source_check=True) 787 | def big_foo(a, b): 788 | return str(time.time()) 789 | 790 | forth_try = big_foo(5, 2) 791 | 792 | assert forth_try == first_try 793 | 794 | 795 | def test_memoize_with_source_check_disabled(app, cache): 796 | with app.test_request_context(): 797 | 798 | @cache.memoize(source_check=False) 799 | def big_foo(a, b): 800 | return str(time.time()) 801 | 802 | first_try = big_foo(5, 2) 803 | 804 | second_try = big_foo(5, 2) 805 | 806 | assert second_try == first_try 807 | 808 | @cache.memoize(source_check=False) 809 | def big_foo(a, b): 810 | return time.time() 811 | 812 | third_try = big_foo(5, 2) 813 | 814 | assert third_try == first_try 815 | 816 | 817 | def test_memoize_ignore_args(app, cache): 818 | with app.test_request_context(): 819 | 820 | @cache.memoize(50, args_to_ignore=["b"]) 821 | def big_foo(a, b): 822 | return a + b + random.randrange(0, 100000) 823 | 824 | result = big_foo(5, 2) 825 | assert big_foo(5, 3) == result 826 | 827 | 828 | def test_memoize_method_ignore_self_arg(app, cache): 829 | with app.test_request_context(): 830 | 831 | class Foo: 832 | @cache.memoize(50, args_to_ignore=["self"]) 833 | def big_foo(self, a, b): 834 | return a + b + random.randrange(0, 100000) 835 | 836 | assert Foo().big_foo(5, 2) == Foo().big_foo(5, 2) 837 | 838 | 839 | def test_memoize_function_ignore_kwarg(app, cache): 840 | with app.test_request_context(): 841 | @cache.memoize(50, args_to_ignore=["b"]) 842 | def big_foo(a, b): 843 | return a + b + random.randrange(0, 100000) 844 | 845 | assert big_foo(5, 2) == big_foo(5, b=3) 846 | -------------------------------------------------------------------------------- /tests/test_template.html: -------------------------------------------------------------------------------- 1 | {% cache 60, "fragment1" %}{{somevar}}{% endcache %} 2 | {% cache 60, "fragment1", "key1" %}{{somevar}}{% endcache %} 3 | {% cache 60, "fragment1", "key1", somevar %}{{somevar}}{% endcache %} 4 | {% cache timeout, "fragment2" %}{{somevar}}{% endcache %} 5 | -------------------------------------------------------------------------------- /tests/test_templates.py: -------------------------------------------------------------------------------- 1 | import random 2 | import string 3 | 4 | from flask import render_template 5 | from flask import render_template_string 6 | 7 | from flask_caching import make_template_fragment_key 8 | 9 | 10 | def test_jinjaext_cache(app, cache): 11 | somevar = "".join([random.choice(string.ascii_letters) for x in range(6)]) 12 | 13 | testkeys = [ 14 | make_template_fragment_key("fragment1"), 15 | make_template_fragment_key("fragment1", vary_on=["key1"]), 16 | make_template_fragment_key("fragment1", vary_on=["key1", somevar]), 17 | ] 18 | delkey = make_template_fragment_key("fragment2") 19 | 20 | with app.test_request_context(): 21 | #: Test if elements are cached 22 | render_template("test_template.html", somevar=somevar, timeout=60) 23 | for k in testkeys: 24 | assert cache.get(k) == somevar 25 | assert cache.get(delkey) == somevar 26 | 27 | #: Test timeout=del to delete key 28 | render_template("test_template.html", somevar=somevar, timeout="del") 29 | for k in testkeys: 30 | assert cache.get(k) == somevar 31 | assert cache.get(delkey) is None 32 | 33 | #: Test rendering templates from strings 34 | output = render_template_string( 35 | """{% cache 60, "fragment3" %}{{somevar}}{% endcache %}""", 36 | somevar=somevar, 37 | ) 38 | assert cache.get(make_template_fragment_key("fragment3")) == somevar 39 | assert output == somevar 40 | 41 | #: Test backwards compatibility 42 | output = render_template_string( 43 | """{% cache 30 %}{{somevar}}{% endcache %}""", somevar=somevar 44 | ) 45 | assert cache.get(make_template_fragment_key("None1")) == somevar 46 | assert output == somevar 47 | 48 | output = render_template_string( 49 | """{% cache 30, "fragment4", "fragment5"%}{{somevar}}{% endcache %}""", 50 | somevar=somevar, 51 | ) 52 | k = make_template_fragment_key("fragment4", vary_on=["fragment5"]) 53 | assert cache.get(k) == somevar 54 | assert output == somevar 55 | -------------------------------------------------------------------------------- /tests/test_view.py: -------------------------------------------------------------------------------- 1 | import hashlib 2 | import time 3 | 4 | from flask import make_response 5 | from flask import request 6 | from flask.views import View 7 | 8 | from flask_caching import CachedResponse 9 | 10 | 11 | def test_cached_view(app, cache): 12 | @app.route("/") 13 | @cache.cached(2) 14 | def cached_view(): 15 | return str(time.time()) 16 | 17 | tc = app.test_client() 18 | 19 | rv = tc.get("/") 20 | the_time = rv.data.decode("utf-8") 21 | 22 | time.sleep(1) 23 | 24 | rv = tc.get("/") 25 | 26 | assert the_time == rv.data.decode("utf-8") 27 | 28 | time.sleep(1) 29 | 30 | rv = tc.get("/") 31 | assert the_time != rv.data.decode("utf-8") 32 | 33 | 34 | def test_cached_view_class(app, cache): 35 | class CachedView(View): 36 | @cache.cached(2) 37 | def dispatch_request(self): 38 | return str(time.time()) 39 | 40 | app.add_url_rule("/", view_func=CachedView.as_view("name")) 41 | 42 | tc = app.test_client() 43 | 44 | rv = tc.get("/") 45 | the_time = rv.data.decode("utf-8") 46 | 47 | time.sleep(1) 48 | 49 | rv = tc.get("/") 50 | 51 | assert the_time == rv.data.decode("utf-8") 52 | 53 | time.sleep(1) 54 | 55 | rv = tc.get("/") 56 | assert the_time != rv.data.decode("utf-8") 57 | 58 | 59 | def test_async_cached_view(app, cache): 60 | import asyncio 61 | import sys 62 | 63 | if sys.version_info < (3, 7): 64 | return 65 | 66 | @app.route("/test-async") 67 | @cache.cached(2) 68 | async def cached_async_view(): 69 | await asyncio.sleep(0.1) 70 | return str(time.time()) 71 | 72 | tc = app.test_client() 73 | rv = tc.get("/test-async") 74 | the_time = rv.data.decode("utf-8") 75 | 76 | time.sleep(1) 77 | 78 | rv = tc.get("/test-async") 79 | assert the_time == rv.data.decode("utf-8") 80 | 81 | 82 | def test_cached_view_unless(app, cache): 83 | @app.route("/a") 84 | @cache.cached(5, unless=lambda: True) 85 | def non_cached_view(): 86 | return str(time.time()) 87 | 88 | @app.route("/b") 89 | @cache.cached(5, unless=lambda: False) 90 | def cached_view(): 91 | return str(time.time()) 92 | 93 | tc = app.test_client() 94 | 95 | rv = tc.get("/a") 96 | the_time = rv.data.decode("utf-8") 97 | 98 | time.sleep(1) 99 | 100 | rv = tc.get("/a") 101 | assert the_time != rv.data.decode("utf-8") 102 | 103 | rv = tc.get("/b") 104 | the_time = rv.data.decode("utf-8") 105 | 106 | time.sleep(1) 107 | rv = tc.get("/b") 108 | 109 | assert the_time == rv.data.decode("utf-8") 110 | 111 | 112 | def test_cached_view_response_filter(app, cache): 113 | @app.route("/a") 114 | @cache.cached(5, response_filter=lambda x: x[1] < 400) 115 | def cached_view(): 116 | return (str(time.time()), app.return_code) 117 | 118 | tc = app.test_client() 119 | 120 | # 500 response does not cache 121 | app.return_code = 500 122 | rv = tc.get("/a") 123 | the_time = rv.data.decode("utf-8") 124 | 125 | time.sleep(1) 126 | 127 | rv = tc.get("/a") 128 | assert the_time != rv.data.decode("utf-8") 129 | 130 | # 200 response caches 131 | app.return_code = 200 132 | rv = tc.get("/a") 133 | the_time = rv.data.decode("utf-8") 134 | 135 | time.sleep(1) 136 | 137 | rv = tc.get("/a") 138 | assert the_time == rv.data.decode("utf-8") 139 | 140 | 141 | def test_cached_view_forced_update(app, cache): 142 | forced_update = False 143 | 144 | @app.route("/a") 145 | @cache.cached(5, forced_update=lambda: forced_update) 146 | def view(): 147 | return str(time.time()) 148 | 149 | tc = app.test_client() 150 | 151 | rv = tc.get("/a") 152 | the_time = rv.data.decode("utf-8") 153 | time.sleep(1) 154 | rv = tc.get("/a") 155 | assert the_time == rv.data.decode("utf-8") 156 | 157 | forced_update = True 158 | rv = tc.get("/a") 159 | new_time = rv.data.decode("utf-8") 160 | assert new_time != the_time 161 | 162 | forced_update = False 163 | time.sleep(1) 164 | rv = tc.get("/a") 165 | assert new_time == rv.data.decode("utf-8") 166 | 167 | 168 | def test_generate_cache_key_from_different_view(app, cache): 169 | @app.route("/cake/") 170 | @cache.cached() 171 | def view_cake(flavor): 172 | # What's the cache key for apple cake? thanks for making me hungry 173 | view_cake.cake_cache_key = view_cake.make_cache_key("apple") 174 | return str(time.time()) 175 | 176 | view_cake.cake_cache_key = "" 177 | 178 | @app.route("/pie/") 179 | @cache.cached() 180 | def view_pie(flavor): 181 | # What's the cache key for apple cake? 182 | view_pie.cake_cache_key = view_cake.make_cache_key("apple") 183 | return str(time.time()) 184 | 185 | view_pie.cake_cache_key = "" 186 | 187 | tc = app.test_client() 188 | tc.get("/cake/chocolate") 189 | tc.get("/pie/chocolate") 190 | 191 | assert view_cake.cake_cache_key == view_pie.cake_cache_key 192 | 193 | 194 | # rename/move to seperate module? 195 | def test_cache_key_property(app, cache): 196 | @app.route("/") 197 | @cache.cached(5) 198 | def cached_view(): 199 | return str(time.time()) 200 | 201 | assert hasattr(cached_view, "make_cache_key") 202 | assert callable(cached_view.make_cache_key) 203 | 204 | tc = app.test_client() 205 | 206 | rv = tc.get("/") 207 | the_time = rv.data.decode("utf-8") 208 | 209 | with app.test_request_context(): 210 | cache_data = cache.get(cached_view.make_cache_key()) 211 | assert the_time == cache_data 212 | 213 | 214 | def test_set_make_cache_key_property(app, cache): 215 | @app.route("/") 216 | @cache.cached(5) 217 | def cached_view(): 218 | return str(time.time()) 219 | 220 | cached_view.make_cache_key = lambda *args, **kwargs: request.args["foo"] 221 | 222 | tc = app.test_client() 223 | 224 | rv = tc.get("/?foo=a") 225 | a = rv.data.decode("utf-8") 226 | 227 | rv = tc.get("/?foo=b") 228 | b = rv.data.decode("utf-8") 229 | assert a != b 230 | 231 | tc = app.test_client() 232 | rv = tc.get("/?foo=a") 233 | a_2 = rv.data.decode("utf-8") 234 | assert a == a_2 235 | 236 | rv = tc.get("/?foo=b") 237 | b_2 = rv.data.decode("utf-8") 238 | assert b == b_2 239 | 240 | 241 | def test_make_cache_key_function_property(app, cache): 242 | @app.route("//") 243 | @cache.memoize(5) 244 | def cached_view(foo, bar): 245 | return str(time.time()) 246 | 247 | assert hasattr(cached_view, "make_cache_key") 248 | assert callable(cached_view.make_cache_key) 249 | 250 | tc = app.test_client() 251 | 252 | rv = tc.get("/a/b") 253 | the_time = rv.data.decode("utf-8") 254 | 255 | cache_key = cached_view.make_cache_key(cached_view.uncached, foo="a", bar="b") 256 | cache_data = cache.get(cache_key) 257 | assert the_time == cache_data 258 | 259 | different_key = cached_view.make_cache_key(cached_view.uncached, foo="b", bar="a") 260 | different_data = cache.get(different_key) 261 | assert the_time != different_data 262 | 263 | 264 | def test_cache_timeout_property(app, cache): 265 | @app.route("/") 266 | @cache.memoize(2) 267 | def cached_view1(): 268 | return str(time.time()) 269 | 270 | @app.route("//") 271 | @cache.memoize(4) 272 | def cached_view2(foo, bar): 273 | return str(time.time()) 274 | 275 | assert hasattr(cached_view1, "cache_timeout") 276 | assert hasattr(cached_view2, "cache_timeout") 277 | assert cached_view1.cache_timeout == 2 278 | assert cached_view2.cache_timeout == 4 279 | 280 | # test that this is a read-write property 281 | cached_view1.cache_timeout = 5 282 | cached_view2.cache_timeout = 7 283 | 284 | assert cached_view1.cache_timeout == 5 285 | assert cached_view2.cache_timeout == 7 286 | tc = app.test_client() 287 | 288 | rv1 = tc.get("/") 289 | time1 = rv1.data.decode("utf-8") 290 | time.sleep(1) 291 | rv2 = tc.get("/a/b") 292 | time2 = rv2.data.decode("utf-8") 293 | 294 | # VIEW1 295 | # it's been 1 second, cache is still active 296 | assert time1 == tc.get("/").data.decode("utf-8") 297 | time.sleep(5) 298 | # it's been >5 seconds, cache is not still active 299 | assert time1 != tc.get("/").data.decode("utf-8") 300 | 301 | # VIEW2 302 | # it's been >17 seconds, cache is still active 303 | # self.assertEqual(time2, tc.get('/a/b').data.decode('utf-8')) 304 | assert time2 == tc.get("/a/b").data.decode("utf-8") 305 | time.sleep(3) 306 | # it's been >7 seconds, cache is not still active 307 | assert time2 != tc.get("/a/b").data.decode("utf-8") 308 | 309 | 310 | def test_cache_timeout_dynamic(app, cache): 311 | @app.route("/") 312 | @cache.cached(timeout=1) 313 | def cached_view(): 314 | # This should override the timeout to be 2 seconds 315 | return CachedResponse(response=make_response(str(time.time())), timeout=2) 316 | 317 | tc = app.test_client() 318 | 319 | rv1 = tc.get("/") 320 | time1 = rv1.data.decode("utf-8") 321 | time.sleep(1) 322 | 323 | # it's been 1 second, cache is still active 324 | assert time1 == tc.get("/").data.decode("utf-8") 325 | time.sleep(1) 326 | # it's been >2 seconds, cache is not still active 327 | assert time1 != tc.get("/").data.decode("utf-8") 328 | 329 | 330 | def test_generate_cache_key_from_query_string(app, cache): 331 | """Test the _make_cache_key_query_string() cache key maker. 332 | 333 | Create three requests to verify that the same query string 334 | parameters (key/value) always reference the same cache, 335 | regardless of the order of parameters. 336 | 337 | Also test to make sure that the same cache isn't being used for 338 | any/all query string parameters. 339 | 340 | For example, these two requests should yield the same 341 | cache/cache key: 342 | 343 | * GET /v1/works?mock=true&offset=20&limit=15 344 | * GET /v1/works?limit=15&mock=true&offset=20 345 | 346 | Caching functionality is verified by a `@cached` route `/works` which 347 | produces a time in its response. The time in the response can verify that 348 | two requests with the same query string parameters/values, though 349 | differently ordered, produce responses with the same time. 350 | """ 351 | 352 | @app.route("/works") 353 | @cache.cached(query_string=True) 354 | def view_works(): 355 | return str(time.time()) 356 | 357 | tc = app.test_client() 358 | 359 | # Make our first query... 360 | first_response = tc.get("/works?mock=true&offset=20&limit=15") 361 | first_time = first_response.get_data(as_text=True) 362 | 363 | # Make the second query... 364 | second_response = tc.get("/works?limit=15&mock=true&offset=20") 365 | second_time = second_response.get_data(as_text=True) 366 | 367 | # Now make sure the time for the first and second 368 | # query are the same! 369 | assert second_time == first_time 370 | 371 | # Last/third query with different parameters/values should 372 | # produce a different time. 373 | third_response = tc.get("/v1/works?limit=20&mock=true&offset=60") 374 | third_time = third_response.get_data(as_text=True) 375 | 376 | # ... making sure that different query parameter values 377 | # don't yield the same cache! 378 | assert not third_time == second_time 379 | 380 | 381 | def test_generate_cache_key_from_query_string_repeated_paramaters(app, cache): 382 | """Test the _make_cache_key_query_string() cache key maker's support for 383 | repeated query paramaters 384 | 385 | URL params can be repeated with different values. Flask's MultiDict 386 | supports them 387 | """ 388 | 389 | @app.route("/works") 390 | @cache.cached(query_string=True) 391 | def view_works(): 392 | flatted_values = sum(request.args.listvalues(), []) 393 | return str(sorted(flatted_values)) + str(time.time()) 394 | 395 | tc = app.test_client() 396 | 397 | # Make our first query... 398 | first_response = tc.get("/works?mock=true&offset=20&limit=15&user[]=123&user[]=124") 399 | first_time = first_response.get_data(as_text=True) 400 | 401 | # Make the second query... 402 | second_response = tc.get( 403 | "/works?mock=true&offset=20&limit=15&user[]=124&user[]=123" 404 | ) 405 | second_time = second_response.get_data(as_text=True) 406 | 407 | # Now make sure the time for the first and second 408 | # query are the same! 409 | assert second_time == first_time 410 | 411 | # Last/third query with different parameters/values should 412 | # produce a different time. 413 | third_response = tc.get("/works?mock=true&offset=20&limit=15&user[]=125&user[]=124") 414 | third_time = third_response.get_data(as_text=True) 415 | 416 | # ... making sure that different query parameter values 417 | # don't yield the same cache! 418 | assert not third_time == second_time 419 | 420 | 421 | def test_generate_cache_key_from_request_body(app, cache): 422 | """Test a user supplied cache key maker. 423 | Create three requests to verify that the same request body 424 | always reference the same cache 425 | Also test to make sure that the same cache isn't being used for 426 | any/all query string parameters. 427 | Caching functionality is verified by a `@cached` route `/works` which 428 | produces a time in its response. The time in the response can verify that 429 | two requests with the same request body produce responses with the same 430 | time. 431 | """ 432 | 433 | def _make_cache_key_request_body(argument): 434 | """Create keys based on request body.""" 435 | # now hash the request body so it can be 436 | # used as a key for cache. 437 | request_body = request.get_data(as_text=False) 438 | hashed_body = str(hashlib.md5(request_body).hexdigest()) 439 | cache_key = request.path + hashed_body 440 | return cache_key 441 | 442 | @app.route("/works/", methods=["POST"]) 443 | @cache.cached(make_cache_key=_make_cache_key_request_body) 444 | def view_works(argument): 445 | return str(time.time()) + request.get_data().decode() 446 | 447 | tc = app.test_client() 448 | 449 | # Make our request... 450 | first_response = tc.post("/works/arg", data=dict(mock=True, value=1, test=2)) 451 | first_time = first_response.get_data(as_text=True) 452 | 453 | # Make the request... 454 | second_response = tc.post("/works/arg", data=dict(mock=True, value=1, test=2)) 455 | second_time = second_response.get_data(as_text=True) 456 | 457 | # Now make sure the time for the first and second 458 | # requests are the same! 459 | assert second_time == first_time 460 | 461 | # Last/third request with different body should 462 | # produce a different time. 463 | third_response = tc.post("/works/arg", data=dict(mock=True, value=2, test=3)) 464 | third_time = third_response.get_data(as_text=True) 465 | 466 | # ... making sure that different request bodies 467 | # don't yield the same cache! 468 | assert not third_time == second_time 469 | 470 | 471 | def test_cache_with_query_string_and_source_check_enabled(app, cache): 472 | """Test the _make_cache_key_query_string() cache key maker with 473 | source_check set to True to include the view's function's source code as 474 | part of the cache hash key. 475 | """ 476 | 477 | @cache.cached(query_string=True, source_check=True) 478 | def view_works(): 479 | return str(time.time()) 480 | 481 | app.add_url_rule("/works", "works", view_works) 482 | 483 | tc = app.test_client() 484 | 485 | # Make our first query... 486 | first_response = tc.get("/works?mock=true&offset=20&limit=15") 487 | first_time = first_response.get_data(as_text=True) 488 | 489 | # Make our second query... 490 | second_response = tc.get("/works?mock=true&offset=20&limit=15") 491 | second_time = second_response.get_data(as_text=True) 492 | 493 | # The cache should yield the same data first and second time 494 | assert first_time == second_time 495 | 496 | # Change the source of the function attached to the view 497 | @cache.cached(query_string=True, source_check=True) 498 | def view_works(): 499 | return str(time.time()) 500 | 501 | # ... and we override the function attached to the view 502 | app.view_functions["works"] = view_works 503 | 504 | tc = app.test_client() 505 | 506 | # Make the second query... 507 | third_response = tc.get("/works?mock=true&offset=20&limit=15") 508 | third_time = third_response.get_data(as_text=True) 509 | 510 | # Now make sure the time for the first and third 511 | # responses are not the same i.e. cached is not used! 512 | assert third_time[0] != first_time 513 | 514 | # Change the source of the function to what it was originally 515 | @cache.cached(query_string=True, source_check=True) 516 | def view_works(): 517 | return str(time.time()) 518 | 519 | app.view_functions["works"] = view_works 520 | 521 | tc = app.test_client() 522 | 523 | # Last/third query with different parameters/values should 524 | # produce a different time. 525 | forth_response = tc.get("/works?mock=true&offset=20&limit=15") 526 | forth_time = forth_response.get_data(as_text=True) 527 | 528 | # ... making sure that the first value and the forth value are the same 529 | # since the source is the same 530 | assert forth_time == first_time 531 | 532 | 533 | def test_cache_with_query_string_and_source_check_disabled(app, cache): 534 | """Test the _make_cache_key_query_string() cache key maker with 535 | source_check set to False to exclude the view's function's source code as 536 | part of the cache hash key and to see if changing the source changes the 537 | data. 538 | """ 539 | 540 | @cache.cached(query_string=True, source_check=False) 541 | def view_works(): 542 | return str(time.time()) 543 | 544 | app.add_url_rule("/works", "works", view_works) 545 | 546 | tc = app.test_client() 547 | 548 | # Make our first query... 549 | first_response = tc.get("/works?mock=true&offset=20&limit=15") 550 | first_time = first_response.get_data(as_text=True) 551 | 552 | # Make our second query... 553 | second_response = tc.get("/works?mock=true&offset=20&limit=15") 554 | second_time = second_response.get_data(as_text=True) 555 | 556 | # The cache should yield the same data first and second time 557 | assert first_time == second_time 558 | 559 | # Change the source of the function attached to the view 560 | @cache.cached(query_string=True, source_check=False) 561 | def view_works(): 562 | return str(time.time()) 563 | 564 | # ... and we override the function attached to the view 565 | app.view_functions["works"] = view_works 566 | 567 | tc = app.test_client() 568 | 569 | # Make the second query... 570 | third_response = tc.get("/works?mock=true&offset=20&limit=15") 571 | third_time = third_response.get_data(as_text=True) 572 | 573 | # Now make sure the time for the first and third responses are the same 574 | # i.e. cached is used since cache will not check for source changes! 575 | assert third_time == first_time 576 | 577 | 578 | def test_hit_cache(app, cache): 579 | @app.route("/") 580 | @cache.cached(10, response_hit_indication=True) 581 | def cached_view(): 582 | # This should override the timeout to be 2 seconds 583 | return {"data": "data"} 584 | 585 | tc = app.test_client() 586 | 587 | assert tc.get("/").headers.get("hit_cache") is None 588 | assert tc.get("/").headers.get("hit_cache") == "True" 589 | assert tc.get("/").headers.get("hit_cache") == "True" 590 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | [tox] 2 | envlist = 3 | py{39,38,py3,310,311} 4 | style 5 | typing 6 | docs 7 | skip_missing_interpreters = true 8 | 9 | [testenv] 10 | setenv = TMPDIR={envtmpdir} 11 | deps = -r requirements/tests.txt 12 | commands = 13 | pytest -v --capture=tee-sys --tb=short --basetemp={envtmpdir} {posargs} 14 | 15 | [testenv:style] 16 | deps = pre-commit 17 | skip_install = true 18 | commands = pre-commit run --all-files --show-diff-on-failure 19 | 20 | [testenv:typing] 21 | deps = -r requirements/typing.txt 22 | commands = mypy 23 | 24 | [testenv:docs] 25 | changedir = docs 26 | deps = -r requirements/docs.txt 27 | commands = sphinx-build -b html . _build/ 28 | --------------------------------------------------------------------------------