├── .gitignore ├── .travis.yml ├── CHANGES.md ├── LICENSE.md ├── MANIFEST.in ├── README.md ├── dev-requirements.txt ├── docs ├── Makefile ├── advance_tag.md ├── cache_null_and_miss.md ├── conf.py ├── index.md ├── installation.md └── use_kwargs.md ├── requirements.txt ├── setup.cfg ├── setup.py ├── tache ├── __init__.py ├── _compat.py ├── backend.py ├── batch.py ├── cached.py ├── serializer.py ├── shortid.py ├── tache.py └── utils.py ├── tests ├── __init__.py ├── test_cache_key.py ├── test_redis_batch.py ├── test_redis_cache.py └── test_redis_tag.py └── tox.ini /.gitignore: -------------------------------------------------------------------------------- 1 | *.py[cod] 2 | *.swp 3 | 4 | # C extensions 5 | *.so 6 | 7 | # pycharm 8 | .idea/ 9 | .idea 10 | 11 | # Packages 12 | *.egg 13 | *.egg-info 14 | build 15 | eggs 16 | parts 17 | bin 18 | var 19 | sdist 20 | dist 21 | develop-eggs 22 | .installed.cfg 23 | lib 24 | lib64 25 | 26 | # Installer logs 27 | pip-log.txt 28 | 29 | # Unit test / coverage reports 30 | .coverage 31 | .tox 32 | nosetests.xml 33 | coverage.xml 34 | 35 | # Complexity 36 | output/*.html 37 | output/*/index.html 38 | .bak 39 | 40 | # Sphinx 41 | docs/_build 42 | 43 | # Cookiecutter 44 | output/ 45 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: python 2 | python: 3 | - "2.7" 4 | - "3.6" 5 | install: pip install tox-travis 6 | script: tox 7 | -------------------------------------------------------------------------------- /CHANGES.md: -------------------------------------------------------------------------------- 1 | Changelog 2 | ========== 3 | 4 | 0.2.1 (2019-11-07) 5 | ------------------ 6 | - fix UnicodeEncodeError in PY2 when occur chinese key 7 | 8 | 0.2.0 (2017-12-20) 9 | ------------------- 10 | - first release on pypi. 11 | -------------------------------------------------------------------------------- /LICENSE.md: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2017-present, zhihu, Inc. 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include README.md 2 | include requirements.txt -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Tache 2 | [![Build Status](https://travis-ci.org/zhihu/tache.svg?branch=master)](https://travis-ci.org/zhihu/tache) 3 | [![Pypi Status](https://img.shields.io/pypi/v/tache.svg)](https://pypi.python.org/pypi/tache) 4 | 5 | Tache 是一个 Python 的缓存框架。它基于如下的目标而设计: 6 | 7 | * 同时支持 Python2 和 Python3 8 | * 支持缓存普通函数/实例方法/类方法/静态方法 9 | * 支持 Batch 批量缓存 10 | * 支持基于 Tag 的缓存和失效 11 | * 支持基于参数显式声明 key 格式 12 | 13 | [Documention](http://zhihu.github.io/tache) 14 | 15 | [项目地址](https://github.com/zhihu/tache) 16 | 17 | 18 | Contents 19 | --------- 20 | * [Tag 详细用法](docs/advance_tag.md) 21 | * [使用关键字参数](docs/use_kwargs.md) 22 | * [Cache 空值与缓存穿透](docs/cache_null_and_miss.md) 23 | 24 | 25 | ## Features 26 | 27 | * 默认缓存空值,防止穿透 28 | * 基于tag 批量失效缓存 29 | * batch 批量缓存 30 | * 支持 `YAML` `JSON` `PICKLE` 多种 Backend Serializer 31 | 32 | ## Getting Started 33 | 34 | * 基本用法 35 | 36 | ``` 37 | import random 38 | import fakeredis 39 | from tache import RedisCache 40 | 41 | redis_client = fakeredis.FakeStrictRedis() 42 | cache = RedisCache(conn=redis_client, format="JSON") 43 | 44 | @cache.cached() 45 | def add(a, b): 46 | return a + b + random.randint(1,100) 47 | 48 | result1 = add(5, 6) 49 | # 缓存生效值不变 50 | assert add(5, 6) == result1 51 | # 失效缓存 52 | add.invalidate(5, 6) 53 | assert add(5, 6) != result1 54 | ``` 55 | 56 | * 基于 tag 的批量缓存失效 57 | 58 | tag 可以是固定也可以是动态的,其中动态参数代表在函数中的参数位置。 59 | 失效某个 tag 时,代表这个函数下拥有相同 tag 的缓存全部失效。 60 | 61 | ``` 62 | @cache.cached(tags=["a:{0}"]) 63 | def add(a, b): 64 | return a + b + random.randint(1,100) 65 | 66 | result1 = add(5, 6) 67 | result2 = add(5, 7) 68 | add.invalidate_tag("a:5") 69 | assert result1 != add(5, 6) 70 | assert result2 != add(5, 7) 71 | ``` 72 | 73 | 74 | * refresh 刷新缓存 75 | 76 | 当调用refresh 时,将会重新刷新缓存并返回最新值。 77 | 78 | 79 | ``` 80 | class A(object): 81 | 82 | def __init__(self): 83 | self.extra = 0 84 | 85 | @cache.cached() 86 | def add(self, a, b): 87 | self.extra += 1 88 | return a + b + self.extra 89 | 90 | a = A() 91 | assert a.add(5, 6) == 12 92 | assert a.extra == 1 93 | assert a.add.refresh(5, 6) == 13 94 | assert a.extra == 2 95 | ``` 96 | 97 | * batch 缓存模式 98 | 99 | ``` 100 | @cache.batch() 101 | def get_comments(*comment_ids): 102 | return [get_comment(c) for c in comment_ids] 103 | 104 | get_comments(1,2,3,4,5) # no cache, 调用完毕全部缓存 105 | get_comments(2,3,4,5,6) # 2,3,4,5 从缓存中取,6 在调用完缓存 106 | get_comments.invalidate(3,4,5) # 失效 3,4,5 的缓存 107 | ``` 108 | 109 | * 显式声明 Key 110 | 111 | Tache 允许你显式声明 Key 的生成规则, 不论代码如何重构, 生成的 key 都不会改变。 112 | 113 | ``` 114 | class B: 115 | 116 | def __init__(self): 117 | self.count = 0 118 | 119 | @cache.cached("counter.B.add|{0}-{1}") 120 | def add(self, a, b): 121 | self.count += 1 122 | return a + b + self.count 123 | ``` 124 | 125 | 126 | ## Notice 127 | 128 | * 支持 `classmethod/staticmethod` 描述符, 但在使用 `classmethod` 时目前必须把 129 | `classmethod` 放在内层 130 | 131 | 132 | ``` 133 | class AC(object): 134 | 135 | @cache.cached() 136 | @classmethod 137 | def add(cls, a, b): 138 | return a + b + random.randint(1,100) 139 | ``` 140 | 141 | * 设置 namespace, 处理对象属性修改的问题 142 | 143 | key 的生成规则默认为 `namespace:module.classname.func|arg1-arg2|tag1-tag2`。 144 | 其中 `namespace` 为空, `classname` 不存在时也为空。 145 | 146 | ``` 147 | class A(object): 148 | @cache.cache(namespace="v1") 149 | def add(self, a, b): 150 | return db.execute(sql).fetchone() 151 | ``` 152 | 153 | 这个例子中,如果数据库字段发生更改,可以通过修改 namespace 的方式,让新老代码使用不同的缓存结果。 154 | -------------------------------------------------------------------------------- /dev-requirements.txt: -------------------------------------------------------------------------------- 1 | # Dev/Deployment 2 | sphinx 3 | recommonmark 4 | nose 5 | coverage 6 | sqlalchemy 7 | redis 8 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | PAPER = 8 | BUILDDIR = _build 9 | 10 | # User-friendly check for sphinx-build 11 | ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) 12 | $(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don\'t have Sphinx installed, grab it from http://sphinx-doc.org/) 13 | endif 14 | 15 | # Internal variables. 16 | PAPEROPT_a4 = -D latex_paper_size=a4 17 | PAPEROPT_letter = -D latex_paper_size=letter 18 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 19 | # the i18n builder cannot share the environment and doctrees with the others 20 | I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 21 | 22 | .PHONY: help 23 | help: 24 | @echo "Please use \`make ' where is one of" 25 | @echo " html to make standalone HTML files" 26 | @echo " dirhtml to make HTML files named index.html in directories" 27 | @echo " singlehtml to make a single large HTML file" 28 | @echo " pickle to make pickle files" 29 | @echo " json to make JSON files" 30 | @echo " htmlhelp to make HTML files and a HTML help project" 31 | @echo " qthelp to make HTML files and a qthelp project" 32 | @echo " applehelp to make an Apple Help Book" 33 | @echo " devhelp to make HTML files and a Devhelp project" 34 | @echo " epub to make an epub" 35 | @echo " epub3 to make an epub3" 36 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" 37 | @echo " latexpdf to make LaTeX files and run them through pdflatex" 38 | @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" 39 | @echo " text to make text files" 40 | @echo " man to make manual pages" 41 | @echo " texinfo to make Texinfo files" 42 | @echo " info to make Texinfo files and run them through makeinfo" 43 | @echo " gettext to make PO message catalogs" 44 | @echo " changes to make an overview of all changed/added/deprecated items" 45 | @echo " xml to make Docutils-native XML files" 46 | @echo " pseudoxml to make pseudoxml-XML files for display purposes" 47 | @echo " linkcheck to check all external links for integrity" 48 | @echo " doctest to run all doctests embedded in the documentation (if enabled)" 49 | @echo " coverage to run coverage check of the documentation (if enabled)" 50 | @echo " dummy to check syntax errors of document sources" 51 | 52 | .PHONY: clean 53 | clean: 54 | rm -rf $(BUILDDIR)/* 55 | 56 | .PHONY: html 57 | html: 58 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html 59 | @echo 60 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." 61 | 62 | .PHONY: dirhtml 63 | dirhtml: 64 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml 65 | @echo 66 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." 67 | 68 | .PHONY: singlehtml 69 | singlehtml: 70 | $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml 71 | @echo 72 | @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." 73 | 74 | .PHONY: pickle 75 | pickle: 76 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle 77 | @echo 78 | @echo "Build finished; now you can process the pickle files." 79 | 80 | .PHONY: json 81 | json: 82 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json 83 | @echo 84 | @echo "Build finished; now you can process the JSON files." 85 | 86 | .PHONY: htmlhelp 87 | htmlhelp: 88 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp 89 | @echo 90 | @echo "Build finished; now you can run HTML Help Workshop with the" \ 91 | ".hhp project file in $(BUILDDIR)/htmlhelp." 92 | 93 | .PHONY: qthelp 94 | qthelp: 95 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp 96 | @echo 97 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \ 98 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:" 99 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/twitterpandas.qhcp" 100 | @echo "To view the help file:" 101 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/twitterpandas.qhc" 102 | 103 | .PHONY: applehelp 104 | applehelp: 105 | $(SPHINXBUILD) -b applehelp $(ALLSPHINXOPTS) $(BUILDDIR)/applehelp 106 | @echo 107 | @echo "Build finished. The help book is in $(BUILDDIR)/applehelp." 108 | @echo "N.B. You won't be able to view it unless you put it in" \ 109 | "~/Library/Documentation/Help or install it in your application" \ 110 | "bundle." 111 | 112 | .PHONY: devhelp 113 | devhelp: 114 | $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp 115 | @echo 116 | @echo "Build finished." 117 | @echo "To view the help file:" 118 | @echo "# mkdir -p $$HOME/.local/share/devhelp/twitterpandas" 119 | @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/twitterpandas" 120 | @echo "# devhelp" 121 | 122 | .PHONY: epub 123 | epub: 124 | $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub 125 | @echo 126 | @echo "Build finished. The epub file is in $(BUILDDIR)/epub." 127 | 128 | .PHONY: epub3 129 | epub3: 130 | $(SPHINXBUILD) -b epub3 $(ALLSPHINXOPTS) $(BUILDDIR)/epub3 131 | @echo 132 | @echo "Build finished. The epub3 file is in $(BUILDDIR)/epub3." 133 | 134 | .PHONY: latex 135 | latex: 136 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 137 | @echo 138 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." 139 | @echo "Run \`make' in that directory to run these through (pdf)latex" \ 140 | "(use \`make latexpdf' here to do that automatically)." 141 | 142 | .PHONY: latexpdf 143 | latexpdf: 144 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 145 | @echo "Running LaTeX files through pdflatex..." 146 | $(MAKE) -C $(BUILDDIR)/latex all-pdf 147 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 148 | 149 | .PHONY: latexpdfja 150 | latexpdfja: 151 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 152 | @echo "Running LaTeX files through platex and dvipdfmx..." 153 | $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja 154 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 155 | 156 | .PHONY: text 157 | text: 158 | $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text 159 | @echo 160 | @echo "Build finished. The text files are in $(BUILDDIR)/text." 161 | 162 | .PHONY: man 163 | man: 164 | $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man 165 | @echo 166 | @echo "Build finished. The manual pages are in $(BUILDDIR)/man." 167 | 168 | .PHONY: texinfo 169 | texinfo: 170 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 171 | @echo 172 | @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." 173 | @echo "Run \`make' in that directory to run these through makeinfo" \ 174 | "(use \`make info' here to do that automatically)." 175 | 176 | .PHONY: info 177 | info: 178 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 179 | @echo "Running Texinfo files through makeinfo..." 180 | make -C $(BUILDDIR)/texinfo info 181 | @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." 182 | 183 | .PHONY: gettext 184 | gettext: 185 | $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale 186 | @echo 187 | @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." 188 | 189 | .PHONY: changes 190 | changes: 191 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes 192 | @echo 193 | @echo "The overview file is in $(BUILDDIR)/changes." 194 | 195 | .PHONY: linkcheck 196 | linkcheck: 197 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck 198 | @echo 199 | @echo "Link check complete; look for any errors in the above output " \ 200 | "or in $(BUILDDIR)/linkcheck/output.txt." 201 | 202 | .PHONY: doctest 203 | doctest: 204 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest 205 | @echo "Testing of doctests in the sources finished, look at the " \ 206 | "results in $(BUILDDIR)/doctest/output.txt." 207 | 208 | .PHONY: coverage 209 | coverage: 210 | $(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage 211 | @echo "Testing of coverage in the sources finished, look at the " \ 212 | "results in $(BUILDDIR)/coverage/python.txt." 213 | 214 | .PHONY: xml 215 | xml: 216 | $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml 217 | @echo 218 | @echo "Build finished. The XML files are in $(BUILDDIR)/xml." 219 | 220 | .PHONY: pseudoxml 221 | pseudoxml: 222 | $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml 223 | @echo 224 | @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." 225 | 226 | .PHONY: dummy 227 | dummy: 228 | $(SPHINXBUILD) -b dummy $(ALLSPHINXOPTS) $(BUILDDIR)/dummy 229 | @echo 230 | @echo "Build finished. Dummy builder generates no files." 231 | -------------------------------------------------------------------------------- /docs/advance_tag.md: -------------------------------------------------------------------------------- 1 | # Tag 详细用法 2 | 3 | .. note:: 不建议用来缓存细粒度接口,每次读取时会多取一次 tag, 因此对接口缓存的请求量会放大一倍 4 | 5 | ## 可以有多个tag 6 | 7 | ``` 8 | @cache.cached(tags=["a:{0}", "b:{1}", "c"]) 9 | def add(a, b): 10 | return a + b + random.randint(1,100) 11 | ``` 12 | 13 | ## tag 可以是函数 14 | 15 | 接受参数 `*args`, `**kwargs`, 返回字符串。由于默认的 16 | key_generator 不接受 kwargs, 通常用不上这个参数。 17 | 18 | ``` 19 | @cache.cached(tags=[lambda *args, **kwargs: "add:{0}".format(args[0] + args[1])]) 20 | def add(a, b): 21 | return a + b + random.randint(1,100) 22 | 23 | add(5, 6) 24 | add(4, 7) 25 | add(5, 8) 26 | add.invalidate_tag("add:11") # 前两个函数的缓存失效 27 | ``` 28 | -------------------------------------------------------------------------------- /docs/cache_null_and_miss.md: -------------------------------------------------------------------------------- 1 | # Cache 空值与缓存穿透 2 | 3 | 当查询一个不存在的数据时,会得到一个空值。如果缓存层不缓存这个空值,那么查询就会穿透到 4 | 数据库,给数据库带来查询压力。当一个爬虫大量抓取不存在的数据时,这个问题尤为明显。 5 | 缓存穿透使得缓存层失去了保护后端存储的能力,如果后端存储层关联业务比较多的情况下,甚至可能引起雪崩。 6 | 7 | 但是缓存空值也会带来新问题: 8 | 9 | - 其一, 浪费缓存存储空间。比起不缓存空值,会缓存更多的数据。但我们认为以空间换时间是值得的,对于 10 | 这类数据我们也会缓存更短的时间(在5 分钟,和正常过期时间的十分之一两者间去最小值) 11 | 12 | - 其二, 业务层新增数据可能不能被正常取到。当缓存了一条不存在的数据后,在缓存过期前这条数据可能刚好 13 | 被创建了出来,这时就造成了一定时间内的数据不一致。业务层需要注意到这点,如果在新建数据后就立刻访问, 14 | 你需要 refresh 一下来刷新缓存。 15 | 16 | 对于访问量比较小的业务,可以直接禁掉缓存空值。`Tache.cached` 方法中提供一个 `should_cache_fn` 的参数, 17 | 接受一个 function, 传入被装饰函数执行的结果,返回一个布尔值来决定是否可以缓存。 18 | 19 | ``` 20 | @cache.cached(should_cache_fn=lambda value: value is not None) 21 | def incr(by): 22 | ... 23 | ``` 24 | 这个例子中的 `should_cache_fn` 接受的函数就表示,除了 `None` 外都缓存。 25 | 26 | 也可以很方便的禁用缓存,以便在开发阶段排除是否是缓存引起的 bug 27 | 28 | ``` 29 | @cache.cached(should_cache_fn=lambda _: False) 30 | def incr(by): 31 | ... 32 | ``` 33 | 34 | `Tache.batch` 不支持此用法。 35 | -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # tache documentation build configuration file, created by 4 | # sphinx-quickstart on Tue Jul 19 18:12:00 2016. 5 | # 6 | # This file is execfile()d with the current directory set to its 7 | # containing dir. 8 | # 9 | # Note that not all possible configuration values are present in this 10 | # autogenerated file. 11 | # 12 | # All configuration values have a default; values that are commented out 13 | # serve to show the default. 14 | 15 | # If extensions (or modules to document with autodoc) are in another directory, 16 | # add these directories to sys.path here. If the directory is relative to the 17 | # documentation root, use os.path.abspath to make it absolute, like shown here. 18 | # 19 | # import os 20 | # import sys 21 | # sys.path.insert(0, os.path.abspath('.')) 22 | 23 | # -- General configuration ------------------------------------------------ 24 | 25 | # If your documentation needs a minimal Sphinx version, state it here. 26 | # 27 | # needs_sphinx = '1.0' 28 | 29 | # Add any Sphinx extension module names here, as strings. They can be 30 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 31 | # ones. 32 | 33 | from recommonmark.parser import CommonMarkParser 34 | from recommonmark.transform import AutoStructify 35 | 36 | 37 | extensions = [ 38 | 'sphinx.ext.autodoc', 39 | ] 40 | 41 | source_parsers = { 42 | '.md': CommonMarkParser, 43 | } 44 | 45 | # Add any paths that contain templates here, relative to this directory. 46 | templates_path = ['_templates'] 47 | 48 | # The suffix(es) of source filenames. 49 | # You can specify multiple suffix as a list of string: 50 | # 51 | source_suffix = ['.rst', '.md'] 52 | 53 | # The encoding of source files. 54 | # 55 | # source_encoding = 'utf-8-sig' 56 | 57 | # The master toctree document. 58 | master_doc = 'index' 59 | 60 | # General information about the project. 61 | project = "tache" 62 | copyright = u"2017, wayhome" 63 | author = u'wayhome' 64 | 65 | # The version info for the project you're documenting, acts as replacement for 66 | # |version| and |release|, also used in various other places throughout the 67 | # built documents. 68 | # 69 | # The short X.Y version. 70 | version = '0.0.1' 71 | # The full version, including alpha/beta/rc tags. 72 | release = '0.0.1' 73 | 74 | # The language for content autogenerated by Sphinx. Refer to documentation 75 | # for a list of supported languages. 76 | # 77 | # This is also used if you do content translation via gettext catalogs. 78 | # Usually you set "language" from the command line for these cases. 79 | language = 'zh' 80 | 81 | # There are two options for replacing |today|: either, you set today to some 82 | # non-false value, then it is used: 83 | # 84 | # today = '' 85 | # 86 | # Else, today_fmt is used as the format for a strftime call. 87 | # 88 | # today_fmt = '%B %d, %Y' 89 | 90 | # List of patterns, relative to source directory, that match files and 91 | # directories to ignore when looking for source files. 92 | # This patterns also effect to html_static_path and html_extra_path 93 | exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] 94 | 95 | # The reST default role (used for this markup: `text`) to use for all 96 | # documents. 97 | # 98 | # default_role = None 99 | 100 | # If true, '()' will be appended to :func: etc. cross-reference text. 101 | # 102 | # add_function_parentheses = True 103 | 104 | # If true, the current module name will be prepended to all description 105 | # unit titles (such as .. function::). 106 | # 107 | # add_module_names = True 108 | 109 | # If true, sectionauthor and moduleauthor directives will be shown in the 110 | # output. They are ignored by default. 111 | # 112 | # show_authors = False 113 | 114 | # The name of the Pygments (syntax highlighting) style to use. 115 | pygments_style = 'sphinx' 116 | 117 | # A list of ignored prefixes for module index sorting. 118 | # modindex_common_prefix = [] 119 | 120 | # If true, keep warnings as "system message" paragraphs in the built documents. 121 | # keep_warnings = False 122 | 123 | # If true, `todo` and `todoList` produce output, else they produce nothing. 124 | todo_include_todos = False 125 | 126 | 127 | # -- Options for HTML output ---------------------------------------------- 128 | 129 | # The theme to use for HTML and HTML Help pages. See the documentation for 130 | # a list of builtin themes. 131 | # 132 | html_theme = 'alabaster' 133 | 134 | # Theme options are theme-specific and customize the look and feel of a theme 135 | # further. For a list of options available for each theme, see the 136 | # documentation. 137 | # 138 | # html_theme_options = {} 139 | 140 | html_theme_options = { 141 | 'show_related': True, 142 | } 143 | 144 | # Add any paths that contain custom themes here, relative to this directory. 145 | # html_theme_path = [] 146 | 147 | # The name for this set of Sphinx documents. 148 | # " v documentation" by default. 149 | # 150 | # html_title = u'xxxx v0.1.0' 151 | 152 | # A shorter title for the navigation bar. Default is the same as html_title. 153 | # 154 | # html_short_title = None 155 | 156 | # The name of an image file (relative to this directory) to place at the top 157 | # of the sidebar. 158 | # 159 | # html_logo = None 160 | 161 | # The name of an image file (relative to this directory) to use as a favicon of 162 | # the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 163 | # pixels large. 164 | # 165 | # html_favicon = None 166 | 167 | # Add any paths that contain custom static files (such as style sheets) here, 168 | # relative to this directory. They are copied after the builtin static files, 169 | # so a file named "default.css" will overwrite the builtin "default.css". 170 | html_static_path = ['_static'] 171 | 172 | # Add any extra paths that contain custom files (such as robots.txt or 173 | # .htaccess) here, relative to this directory. These files are copied 174 | # directly to the root of the documentation. 175 | # 176 | # html_extra_path = [] 177 | 178 | # If not None, a 'Last updated on:' timestamp is inserted at every page 179 | # bottom, using the given strftime format. 180 | # The empty string is equivalent to '%b %d, %Y'. 181 | # 182 | # html_last_updated_fmt = None 183 | 184 | # If true, SmartyPants will be used to convert quotes and dashes to 185 | # typographically correct entities. 186 | # 187 | # html_use_smartypants = True 188 | 189 | # Custom sidebar templates, maps document names to template names. 190 | # 191 | # html_sidebars = {} 192 | 193 | # Additional templates that should be rendered to pages, maps page names to 194 | # template names. 195 | # 196 | # html_additional_pages = {} 197 | 198 | # If false, no module index is generated. 199 | # 200 | # html_domain_indices = True 201 | 202 | # If false, no index is generated. 203 | # 204 | # html_use_index = True 205 | 206 | # If true, the index is split into individual pages for each letter. 207 | # 208 | # html_split_index = False 209 | 210 | # If true, links to the reST sources are added to the pages. 211 | # 212 | # html_show_sourcelink = True 213 | 214 | # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. 215 | # 216 | # html_show_sphinx = True 217 | 218 | # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. 219 | # 220 | # html_show_copyright = True 221 | 222 | # If true, an OpenSearch description file will be output, and all pages will 223 | # contain a tag referring to it. The value of this option must be the 224 | # base URL from which the finished HTML is served. 225 | # 226 | # html_use_opensearch = '' 227 | 228 | # This is the file name suffix for HTML files (e.g. ".xhtml"). 229 | # html_file_suffix = None 230 | 231 | # Language to be used for generating the HTML full-text search index. 232 | # Sphinx supports the following languages: 233 | # 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' 234 | # 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr', 'zh' 235 | # 236 | # html_search_language = 'en' 237 | 238 | # A dictionary with options for the search language support, empty by default. 239 | # 'ja' uses this config value. 240 | # 'zh' user can custom change `jieba` dictionary path. 241 | # 242 | # html_search_options = {'type': 'default'} 243 | 244 | # The name of a javascript file (relative to the configuration directory) that 245 | # implements a search results scorer. If empty, the default will be used. 246 | # 247 | # html_search_scorer = 'scorer.js' 248 | 249 | # Output file base name for HTML help builder. 250 | htmlhelp_basename = 'tachedoc' 251 | 252 | # -- Options for LaTeX output --------------------------------------------- 253 | 254 | latex_elements = { 255 | # The paper size ('letterpaper' or 'a4paper'). 256 | # 257 | # 'papersize': 'letterpaper', 258 | 259 | # The font size ('10pt', '11pt' or '12pt'). 260 | # 261 | # 'pointsize': '10pt', 262 | 263 | # Additional stuff for the LaTeX preamble. 264 | # 265 | # 'preamble': '', 266 | 267 | # Latex figure (float) alignment 268 | # 269 | # 'figure_align': 'htbp', 270 | } 271 | 272 | # Grouping the document tree into LaTeX files. List of tuples 273 | # (source start file, target name, title, 274 | # author, documentclass [howto, manual, or own class]). 275 | latex_documents = [ 276 | (master_doc, 'tache.tex', u'tache Documentation', 277 | u'wayhome', 'manual'), 278 | ] 279 | 280 | # The name of an image file (relative to this directory) to place at the top of 281 | # the title page. 282 | # 283 | # latex_logo = None 284 | 285 | # For "manual" documents, if this is true, then toplevel headings are parts, 286 | # not chapters. 287 | # 288 | # latex_use_parts = False 289 | 290 | # If true, show page references after internal links. 291 | # 292 | # latex_show_pagerefs = False 293 | 294 | # If true, show URL addresses after external links. 295 | # 296 | # latex_show_urls = False 297 | 298 | # Documents to append as an appendix to all manuals. 299 | # 300 | # latex_appendices = [] 301 | 302 | # It false, will not define \strong, \code, itleref, \crossref ... but only 303 | # \sphinxstrong, ..., \sphinxtitleref, ... To help avoid clash with user added 304 | # packages. 305 | # 306 | # latex_keep_old_macro_names = True 307 | 308 | # If false, no module index is generated. 309 | # 310 | # latex_domain_indices = True 311 | 312 | 313 | # -- Options for manual page output --------------------------------------- 314 | 315 | # One entry per manual page. List of tuples 316 | # (source start file, name, description, authors, manual section). 317 | man_pages = [ 318 | (master_doc, 'tache', u'tache Documentation', 319 | [author], 1) 320 | ] 321 | 322 | # If true, show URL addresses after external links. 323 | # 324 | # man_show_urls = False 325 | 326 | 327 | # -- Options for Texinfo output ------------------------------------------- 328 | 329 | # Grouping the document tree into Texinfo files. List of tuples 330 | # (source start file, target name, title, author, 331 | # dir menu entry, description, category) 332 | texinfo_documents = [ 333 | (master_doc, 'tache', u'tache Documentation', 334 | author, 'tache', 'One line description of project.', 335 | 'Miscellaneous'), 336 | ] 337 | 338 | # Documents to append as an appendix to all manuals. 339 | # 340 | # texinfo_appendices = [] 341 | 342 | # If false, no module index is generated. 343 | # 344 | # texinfo_domain_indices = True 345 | 346 | # How to display URL addresses: 'footnote', 'no', or 'inline'. 347 | # 348 | # texinfo_show_urls = 'footnote' 349 | 350 | # If true, do not generate a @detailmenu in the "Top" node's menu. 351 | # 352 | # texinfo_no_detailmenu = False 353 | 354 | # At the bottom of conf.py 355 | 356 | 357 | def setup(app): 358 | app.add_config_value('recommonmark_config', { 359 | 'url_resolver': lambda url: 'https://zhihu.github.io/' + url, 360 | 'enable_auto_toc_tree': True, 361 | 'auto_toc_tree_section': 'Contents', 362 | }, True) 363 | app.add_transform(AutoStructify) 364 | -------------------------------------------------------------------------------- /docs/index.md: -------------------------------------------------------------------------------- 1 | ../README.md -------------------------------------------------------------------------------- /docs/installation.md: -------------------------------------------------------------------------------- 1 | # Installation / Usage 2 | 3 | To install use pip: 4 | 5 | $ pip install tache 6 | -------------------------------------------------------------------------------- /docs/use_kwargs.md: -------------------------------------------------------------------------------- 1 | # 使用关键字参数 2 | 3 | 在同时使用位置参数和关键字参数时,生成的 key 很难在较小的代价下保证一致。如对下面这个函数: 4 | 5 | ``` 6 | def f(a, b=None): 7 | pass 8 | ``` 9 | 10 | 在一些缓存库中,使用 `f(2, 3)` 和 `f(a=2, b=3)` 这两个等价的函数调用会生成两个 key, 11 | 需要分别进行缓存和失效。 12 | 13 | 基于最小惊讶的原则,我们希望同一函数在所有使用的地方都按照一致的方式调用。默认我们使用的是位置参数, 14 | 因为它的使用方式单一,顺序确定,生成的 key 比较短。开源的较出名的缓存库,如 beaker,dogpile 等也都是 15 | 默认使用位置参数。 16 | 17 | 18 | ## 基本用法 19 | 20 | 虽然希望整体风格都能统一,但如果出于个人的偏爱,更喜欢用关键字参数,这里也给予了支持,需要做的是替换 21 | cache 装饰器的 key_func 函数。 22 | 23 | ``` 24 | from tache.utils import kwargs_key_generator 25 | 26 | @cache.cached(key_func=kwargs_key_generator) 27 | def add(a, b): 28 | return a + b 29 | ``` 30 | 31 | 使用了上述 key_func 后,`add` 函数便只能使用关键字参数来调用了。 失效也要用关键字参数失效: 32 | 33 | ``` 34 | add.invalidate(a=x, b=x) 35 | ``` 36 | 37 | 包括刷新缓存: 38 | 39 | ``` 40 | add.refresh(a=x, b=x) 41 | ``` 42 | 43 | ## tag 用法的改变 44 | 45 | 因为不再传入位置参数,现在 tag 的用法也要相应改变。 46 | 47 | 单个 tag: 48 | 49 | ``` 50 | @cache.cached(key_func=kwargs_key_generator, tags=["a:{a}"]) 51 | def add(a, b): 52 | return a + b 53 | ``` 54 | 55 | 多 tag: 56 | 57 | ``` 58 | @cache.cached(key_func=kwargs_key_generator, tags=["a:{a}", "b:{b}"]) 59 | def add(a, b): 60 | return a + b 61 | ``` 62 | 63 | 基于函数的 tag: 64 | 65 | ``` 66 | @cache.cached(key_func=kwargs_key_generator, tags=[lambda *args, **kwargs: "add:{0}".format(kwargs['a'] + kwargs['b'])]) 67 | def add(a, b): 68 | return a + b 69 | ``` 70 | 71 | ## 使用 batch ? 72 | 73 | 目前没有什么办法来支持,只能使用位置参数 74 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | # Dev/Deployment 2 | six 3 | sqlalchemy 4 | redis 5 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [bdist_wheel] 2 | universal=1 3 | 4 | [metadata] 5 | description-file=README.md -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | from setuptools import setup, find_packages 2 | from codecs import open 3 | from os import path 4 | 5 | __version__ = '0.2.1' 6 | 7 | here = path.abspath(path.dirname(__file__)) 8 | 9 | # Get the long description from the README file 10 | with open(path.join(here, 'README.md'), 'r') as f: 11 | long_description = f.read() 12 | 13 | # get the dependencies and installs 14 | with open(path.join(here, 'requirements.txt'), encoding='utf-8') as f: 15 | all_reqs = f.read().split('\n') 16 | 17 | install_requires = [x.strip() for x in all_reqs if x] 18 | 19 | setup( 20 | name='tache', 21 | version=__version__, 22 | description='A tag based invalidation caching library', 23 | long_description=long_description, 24 | long_description_content_type='text/markdown', 25 | url='https://github.com/zhihu/tache', 26 | license='MIT', 27 | classifiers=[ 28 | 'Intended Audience :: Developers', 29 | 'Programming Language :: Python :: 2', 30 | 'Programming Language :: Python :: 3', 31 | 'License :: OSI Approved :: MIT License' 32 | ], 33 | keywords='', 34 | packages=find_packages(exclude=['docs', 'tests*']), 35 | include_package_data=True, 36 | author='wayhome', 37 | install_requires=install_requires, 38 | author_email='y@zhihu.com' 39 | ) 40 | -------------------------------------------------------------------------------- /tache/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | import functools 4 | from .backend import RedisBackend 5 | from .tache import Tache 6 | 7 | RedisCache = functools.partial(Tache, RedisBackend) 8 | 9 | __all__ = ['RedisCache'] 10 | -------------------------------------------------------------------------------- /tache/_compat.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | import six 4 | 5 | 6 | if six.PY2: 7 | basestring = basestring 8 | else: 9 | basestring = (str, bytes) 10 | -------------------------------------------------------------------------------- /tache/backend.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | """ 4 | Tache 5 | """ 6 | from functools import wraps 7 | 8 | from .utils import NO_VALUE 9 | from .shortid import short_id 10 | from .serializer import Serializer 11 | 12 | class BaseBackend(object): 13 | """ 14 | Based cache implemention 15 | """ 16 | 17 | def get(self, cache_key): 18 | raise NotImplementedError() 19 | 20 | def set(self, cache_key, data, timeout): 21 | raise NotImplementedError() 22 | 23 | def delete(self, cache_key): 24 | raise NotImplementedError() 25 | 26 | def mget(self, cache_keys): 27 | raise NotImplementedError() 28 | 29 | def mset(self, mapping, timeout): 30 | raise NotImplementedError() 31 | 32 | 33 | class RedisBackend(BaseBackend): 34 | def __init__(self, conn, format="JSON"): 35 | self.conn = conn 36 | self.serializer = Serializer(format=format) 37 | 38 | def get(self, cache_key): 39 | data = self.conn.get(cache_key) 40 | if data is None: 41 | return NO_VALUE 42 | return self.serializer.decode(data) 43 | 44 | def set(self, cache_key, data, timeout): 45 | if data is None: 46 | timeout = int(max(min(300, 0.1 * timeout), 1)) 47 | data = self.serializer.encode(data) 48 | self.conn.setex(cache_key, timeout, data) 49 | 50 | def delete(self, *cache_keys): 51 | self.conn.delete(*cache_keys) 52 | 53 | def mget(self, cache_keys): 54 | result = self.conn.mget(cache_keys) 55 | return [self.serializer.decode(r) if r else NO_VALUE for r in result] 56 | 57 | def mset(self, mapping, timeout): 58 | pipe = self.conn.pipeline(transaction=False) 59 | for k, v in mapping.items(): 60 | v = self.serializer.encode(v) 61 | pipe.setex(k, timeout, v) 62 | pipe.execute() 63 | -------------------------------------------------------------------------------- /tache/batch.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | """ 4 | 批量缓存接口 5 | """ 6 | import functools 7 | import types 8 | 9 | from .utils import NO_VALUE 10 | 11 | 12 | class Batch(object): 13 | 14 | def __init__(self, func, backend, keys_func, timeout, 15 | namespace): 16 | self._func = func 17 | self._backend = backend 18 | self._keys_func = keys_func 19 | self._timeout = timeout 20 | self._namespace = namespace 21 | if isinstance(self._func, (classmethod, staticmethod)): 22 | functools.update_wrapper(self, self._func.__func__) 23 | else: 24 | functools.update_wrapper(self, self._func) 25 | 26 | def __get__(self, instance, owner): 27 | wrapped_self = object.__new__(self.__class__) 28 | wrapped_self.__dict__ = self.__dict__.copy() 29 | if instance is None: 30 | if not hasattr(self._func, "__call__"): 31 | wrapped_self._func = self._func.__get__(None, owner) 32 | return wrapped_self 33 | if not isinstance(self._func, types.MethodType): 34 | wrapped_self._func = self._func.__get__(instance, owner) 35 | return wrapped_self 36 | 37 | def __call__(self, *args, **kwargs): 38 | if kwargs: 39 | raise ValueError("batch decorators only support positional arguments") 40 | if not args: 41 | return [] 42 | cache_keys = self._keys_func(self._namespace, self._func, *args) 43 | key_lookup = dict(zip(args, cache_keys)) 44 | mapping = dict(zip(args, self._backend.mget(cache_keys))) 45 | miss_args = [] 46 | for arg, value in mapping.items(): 47 | if value is NO_VALUE: 48 | miss_args.append(arg) 49 | if miss_args: 50 | miss_mapping = dict(zip(miss_args, self._func(*[arg for arg in miss_args]))) 51 | miss_cache_mapping = dict((key_lookup[arg], v) for (arg, v) in miss_mapping.items()) 52 | self._backend.mset(miss_cache_mapping, self._timeout) 53 | mapping.update(miss_mapping) 54 | return [mapping[arg] for arg in args] 55 | 56 | def invalidate(self, *args): 57 | cache_keys = self._keys_func(self._namespace, self._func, *args) 58 | self._backend.delete(*cache_keys) 59 | -------------------------------------------------------------------------------- /tache/cached.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | import functools 4 | import types 5 | 6 | from .utils import tag_key_generator, NO_VALUE 7 | 8 | 9 | class Cached(object): 10 | 11 | def __init__(self, func, backend, key_func, timeout, 12 | namespace, tags, should_cache_fn, tag_prefix): 13 | self._func = func 14 | self._backend = backend 15 | self._key_func = key_func 16 | self._timeout = timeout 17 | self._tags = tags 18 | self._namespace = namespace 19 | self._should_cache_fn = should_cache_fn 20 | self._tag_prefix = tag_prefix 21 | if isinstance(self._func, (classmethod, staticmethod)): 22 | functools.update_wrapper(self, self._func.__func__) 23 | else: 24 | functools.update_wrapper(self, self._func) 25 | 26 | def __get__(self, instance, owner): 27 | wrapped_self = object.__new__(self.__class__) 28 | wrapped_self.__dict__ = self.__dict__.copy() 29 | if instance is None: 30 | if not hasattr(self._func, "__call__"): 31 | wrapped_self._func = self._func.__get__(None, owner) 32 | return wrapped_self 33 | if not isinstance(self._func, types.MethodType): 34 | wrapped_self._func = self._func.__get__(instance, owner) 35 | return wrapped_self 36 | 37 | def __call__(self, *args, **kwargs): 38 | cache_key = self._key_func(self._namespace, self._func, *args, **kwargs) 39 | if self._tags: 40 | cache_key = tag_key_generator(self._backend, cache_key, self._tag_prefix, 41 | self._tags, self._timeout, *args, **kwargs) 42 | result = self._backend.get(cache_key) 43 | if result is NO_VALUE: 44 | result = self._func(*args, **kwargs) 45 | if self._should_cache_fn(result): 46 | self._backend.set(cache_key, result, self._timeout) 47 | return result 48 | 49 | def invalidate(self, *args, **kwargs): 50 | cache_key = self._key_func(self._namespace, self._func, *args, **kwargs) 51 | if self._tags: 52 | cache_key = tag_key_generator(self._backend, cache_key, self._tag_prefix, 53 | self._tags, self._timeout, *args, **kwargs) 54 | self._backend.delete(cache_key) 55 | 56 | def invalidate_tag(self, tag): 57 | key = self._tag_prefix + tag 58 | self._backend.delete(key) 59 | 60 | def nocache(self, *args, **kwargs): 61 | """ 62 | directly call, without cache 63 | """ 64 | return self._func(*args, **kwargs) 65 | 66 | def refresh(self, *args, **kwargs): 67 | cache_key = self._key_func(self._namespace, self._func, *args, **kwargs) 68 | result = self._func(*args, **kwargs) 69 | if self._should_cache_fn(result): 70 | self._backend.set(cache_key, result, self._timeout) 71 | return result 72 | -------------------------------------------------------------------------------- /tache/serializer.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | """ 4 | 提供主要用于数据交换的序列化处理机制。 5 | """ 6 | 7 | __all__ = ['Serializer'] 8 | 9 | import datetime 10 | import decimal 11 | import json 12 | import logging 13 | 14 | try: 15 | import cPickle as pickle 16 | except ImportError: 17 | import pickle 18 | logging.warning("can't import cpickle, use pickle instead") 19 | 20 | from sqlalchemy.engine import ResultProxy, RowProxy 21 | from sqlalchemy.ext.declarative import DeclarativeMeta 22 | 23 | from ._compat import basestring 24 | 25 | 26 | SEQUENCE_TYPES = [list, tuple, set, frozenset] 27 | 28 | 29 | class ObjectDict(dict): 30 | """Makes a dictionary behave like an object.""" 31 | 32 | def __getattr__(self, name): 33 | try: 34 | return self[name] 35 | except KeyError: 36 | raise AttributeError(name) 37 | 38 | def __setattr__(self, name, value): 39 | self[name] = value 40 | 41 | def __delattr__(self, name): 42 | del self[name] 43 | 44 | 45 | class Serializer(object): 46 | """序列化处理器""" 47 | 48 | #: 支持的序列化格式。 49 | SUPPORTED_FORMATS = ['YAML', 'JSON', 'PICKLE'] 50 | 51 | def __init__(self, format='JSON'): 52 | """创建一个序列化处理器。 53 | 54 | :param str format: 指定该序列化处理器采用的格式,如 YAML、JSON 等。 55 | """ 56 | format = format.upper() 57 | if format in self.SUPPORTED_FORMATS: 58 | self.format = format 59 | else: 60 | raise ValueError('unsupported serializaion format') 61 | 62 | def load(self, stream): 63 | """从参数 ``stream`` 中获取数据。 64 | 65 | :param stream: 要载入数据的来源。可以是字符串或文件等类型。 66 | :type stream: mixed 67 | :rtype: str|unicode|file 68 | """ 69 | func_name = ''.join(['_from_', self.format.lower()]) 70 | func = globals()[func_name] 71 | return func(stream) 72 | 73 | def dump(self, data): 74 | """将指定数据 ``data`` 转换为序列化后的信息。 75 | 76 | :param data: 要序列化的数据。通常是某种映射或序列类型。 77 | :type data: mixed 78 | :rtype: str|unicode 79 | """ 80 | func_name = ''.join(['_to_', self.format.lower()]) 81 | func = globals()[func_name] 82 | return func(data) 83 | 84 | def serialize(self, data): 85 | """:meth:`dump` 方法的别名。""" 86 | return self.dump(data) 87 | 88 | def unserialize(self, stream): 89 | """:meth:`load` 方法的别名。""" 90 | return self.load(stream) 91 | 92 | def encode(self, data): 93 | """:meth:`dump` 方法的别名。""" 94 | return self.dump(data) 95 | 96 | def decode(self, stream): 97 | """:meth:`load` 方法的别名。""" 98 | return self.load(stream) 99 | 100 | 101 | def _from_yaml(stream): 102 | """Load data form a YAML file or string.""" 103 | from yaml import load 104 | try: 105 | from yaml import CLoader as Loader 106 | except ImportError: 107 | from yaml import Loader 108 | data = load(stream, Loader=Loader) 109 | return data 110 | 111 | 112 | def _to_yaml(data): 113 | """Dump data into a YAML string.""" 114 | from yaml import dump 115 | try: 116 | from yaml import CDumper as Dumper 117 | except ImportError: 118 | from yaml import Dumper 119 | return dump(data, Dumper=Dumper) 120 | 121 | 122 | def _from_pickle(stream): 123 | """Load data from PICKLE file or string""" 124 | if isinstance(stream, basestring): 125 | data = pickle.loads(stream) 126 | else: 127 | data = pickle.load(stream) 128 | return data 129 | 130 | 131 | def _to_pickle(data): 132 | """Dump data into a PICKLE string.""" 133 | return pickle.dumps(data) 134 | 135 | 136 | def _from_json(stream): 137 | """Load data form a JSON file or string.""" 138 | if isinstance(stream, basestring): 139 | data = json.loads(stream, object_hook=lambda d: ObjectDict(d)) 140 | else: 141 | data = json.load(stream, object_hook=lambda d: ObjectDict(d)) 142 | return data 143 | 144 | 145 | def _to_json(data): 146 | """Dump data into a JSON string.""" 147 | return json.dumps(data, cls=AwareJSONEncoder) 148 | 149 | 150 | class AwareJSONEncoder(json.JSONEncoder): 151 | """JSONEncoder subclass that knows how to encode date/time and 152 | decimal types, and also ResultProxy/RowProxy of SQLAlchemy. 153 | """ 154 | 155 | DATE_FORMAT = "%Y-%m-%d" 156 | TIME_FORMAT = "%H:%M:%S" 157 | 158 | def default(self, o): 159 | if isinstance(o, datetime.datetime): 160 | # d = datetime_safe.new_datetime(o) 161 | # return d.strftime("%s %s" % (self.DATE_FORMAT, self.TIME_FORMAT)) 162 | return o.strftime("%s %s" % (self.DATE_FORMAT, self.TIME_FORMAT)) 163 | elif isinstance(o, datetime.date): 164 | # d = datetime_safe.new_date(o) 165 | return o.strftime(self.DATE_FORMAT) 166 | elif isinstance(o, datetime.time): 167 | return o.strftime(self.TIME_FORMAT) 168 | elif isinstance(o, decimal.Decimal): 169 | return str(o) 170 | elif isinstance(o, ResultProxy): 171 | return list(o) 172 | elif isinstance(o, RowProxy): 173 | return dict(o) 174 | elif isinstance(o.__class__, DeclarativeMeta): 175 | fields = {} 176 | instance_dict = o.__dict__ 177 | for field in instance_dict: 178 | if not field.startswith('_'): 179 | fields[field] = instance_dict[field] 180 | return fields 181 | else: 182 | return super(AwareJSONEncoder, self).default(o) 183 | 184 | 185 | def _encode_object(o): 186 | """Encode date/time and decimal types, and also ResultProxy/RowProxy 187 | of SQLAlchemy. 188 | """ 189 | 190 | DATE_FORMAT = "%Y-%m-%d" 191 | TIME_FORMAT = "%H:%M:%S" 192 | 193 | if type(o) in (list, tuple): 194 | return [_encode_object(i) for i in o] 195 | elif type(o) in (int, long, float, str, unicode, bool, dict, None): 196 | return o 197 | elif isinstance(o, datetime.datetime): 198 | return o.strftime("%s %s" % (DATE_FORMAT, TIME_FORMAT)) 199 | elif isinstance(o, datetime.date): 200 | return o.strftime(DATE_FORMAT) 201 | elif isinstance(o, datetime.time): 202 | return o.strftime(TIME_FORMAT) 203 | elif isinstance(o, decimal.Decimal): 204 | return str(o) 205 | elif isinstance(o, ResultProxy): 206 | return _encode_object(list(o)) 207 | elif isinstance(o, RowProxy): 208 | return dict(o) 209 | elif isinstance(o.__class__, DeclarativeMeta): 210 | fields = {} 211 | instance_dict = o.__dict__ 212 | for field in instance_dict: 213 | if not field.startswith('_'): 214 | fields[field] = instance_dict[field] 215 | return fields 216 | else: 217 | return o 218 | -------------------------------------------------------------------------------- /tache/shortid.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | import time 4 | import random 5 | 6 | #: Epoch for simpleflake timestamps, starts at the year 2000 7 | SIMPLEFLAKE_EPOCH = 946702800 8 | 9 | 10 | SIMPLEFLAKE_RANDOM_LENGTH = 16 11 | SIMPLEFLAKE_TIMESTAMP_SHIFT = 8 12 | 13 | 14 | 15 | def simpleflake(timestamp=None, random_bits=None, epoch=SIMPLEFLAKE_EPOCH): 16 | """Generate a 64 bit, roughly-ordered, globally-unique ID.""" 17 | second_time = timestamp if timestamp is not None else time.time() 18 | second_time -= epoch 19 | millisecond_time = int(second_time * 1000) 20 | 21 | randomness = random.SystemRandom().getrandbits(SIMPLEFLAKE_RANDOM_LENGTH) 22 | randomness = random_bits if random_bits is not None else randomness 23 | 24 | flake = (millisecond_time << SIMPLEFLAKE_TIMESTAMP_SHIFT) + randomness 25 | 26 | return flake 27 | 28 | 29 | class BaseConverter(object): 30 | """ 31 | Convert numbers from base 10 integers to base X strings and back again. 32 | 33 | Sample usage: 34 | 35 | >>> base20 = BaseConverter('0123456789abcdefghij') 36 | >>> base20.from_decimal(1234) 37 | '31e' 38 | >>> base20.to_decimal('31e') 39 | 1234 40 | """ 41 | decimal_digits = "0123456789" 42 | 43 | def __init__(self, digits): 44 | self.digits = digits 45 | 46 | def from_decimal(self, i): 47 | return self.convert(i, self.decimal_digits, self.digits) 48 | 49 | def to_decimal(self, s): 50 | return int(self.convert(s, self.digits, self.decimal_digits)) 51 | 52 | def convert(number, fromdigits, todigits): 53 | # Based on http://code.activestate.com/recipes/111286/ 54 | if str(number)[0] == '-': 55 | number = str(number)[1:] 56 | neg = 1 57 | else: 58 | neg = 0 59 | 60 | # make an integer out of the number 61 | x = 0 62 | for digit in str(number): 63 | x = x * len(fromdigits) + fromdigits.index(digit) 64 | 65 | # create the result in base 'len(todigits)' 66 | if x == 0: 67 | res = todigits[0] 68 | else: 69 | res = "" 70 | while x > 0: 71 | digit = x % len(todigits) 72 | res = todigits[digit] + res 73 | x = int(x / len(todigits)) 74 | if neg: 75 | res = '-' + res 76 | return res 77 | convert = staticmethod(convert) 78 | 79 | 80 | base62 = BaseConverter( 81 | 'ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789abcdefghijklmnopqrstuvwxyz' 82 | ) 83 | 84 | 85 | def short_id(): 86 | flake = simpleflake() 87 | return base62.from_decimal(flake) 88 | 89 | 90 | if __name__ == "__main__": 91 | print(short_id(), len(short_id())) 92 | -------------------------------------------------------------------------------- /tache/tache.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | """ 4 | Tache 5 | """ 6 | from .batch import Batch 7 | from .cached import Cached 8 | from .utils import (arguments_key_generator, 9 | arguments_batch_keys_generator, 10 | ) 11 | from ._compat import basestring 12 | 13 | 14 | class Tache(object): 15 | def __init__(self, backend_cls, default_key_generator=arguments_key_generator, tag_prefix="tag:", **kwargs): 16 | self.backend = backend_cls(**kwargs) 17 | self.default_key_generator = default_key_generator 18 | self.tag_prefix = tag_prefix 19 | 20 | def cached(self, key_func=None, timeout=3600, namespace=None, tags=None, 21 | should_cache_fn=lambda _: True): 22 | origin_key_func = key_func 23 | if isinstance(origin_key_func, basestring): 24 | key_func = lambda namespace, fn, *args, **kwargs: origin_key_func.format(*args, **kwargs) # noqa 25 | else: 26 | key_func = key_func or self.default_key_generator 27 | return lambda fn: Cached(fn, backend=self.backend, 28 | key_func=key_func, 29 | timeout=timeout, 30 | namespace=namespace, 31 | tags=tags, 32 | should_cache_fn=should_cache_fn, 33 | tag_prefix=self.tag_prefix 34 | ) 35 | 36 | def invalidate_tag(self, tag): 37 | key = self.tag_prefix + tag 38 | self.backend.delete(key) 39 | 40 | def batch(self, keys_func=arguments_batch_keys_generator, timeout=3600, namespace=None): 41 | return lambda fn: Batch(fn, backend=self.backend, 42 | keys_func=keys_func, 43 | timeout=timeout, 44 | namespace=namespace, 45 | ) 46 | -------------------------------------------------------------------------------- /tache/utils.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | import six 4 | 5 | from .shortid import short_id 6 | 7 | 8 | def key_for_fn(namespace, fn): 9 | classname = None 10 | if six.PY2: 11 | if hasattr(fn, 'im_class'): 12 | classname = fn.im_class.__name__ 13 | if classname == 'type': 14 | classname = fn.im_self.__name__ 15 | else: 16 | if hasattr(fn, '__self__'): 17 | classname = fn.__self__.__class__.__name__ 18 | if classname == 'type': 19 | classname = fn.__self__.__name__ 20 | if classname: 21 | key = "{0}.{1}.{2}".format(fn.__module__, classname, fn.__name__) 22 | else: 23 | key = "{0}.{1}".format(fn.__module__, fn.__name__) 24 | if namespace is None: 25 | return key 26 | else: 27 | return '{0}:{1}'.format(namespace, key) 28 | 29 | 30 | def arguments_key_generator(namespace, fn, *args, **kwargs): 31 | key = key_for_fn(namespace, fn) 32 | if kwargs: 33 | raise ValueError( 34 | "tcache's default key_func" 35 | "function does not accept keyword arguments.") 36 | args = [six.ensure_str(arg) if isinstance(arg, six.string_types) else arg for arg in args ] 37 | return key + "|" + "-".join(map(str, args)) 38 | 39 | 40 | def kwargs_key_generator(namespace, fn, *args, **kwargs): 41 | key = key_for_fn(namespace, fn) 42 | if args: 43 | raise ValueError( 44 | "kwargs key generator does not accept positional arguments") 45 | 46 | return key + "|" + ','.join(map(str, sorted(kwargs.items(), key=lambda x: x[0]))) 47 | 48 | 49 | def arguments_batch_keys_generator(namespace, fn, *args): 50 | key = key_for_fn(namespace, fn) 51 | return [key + "|" + k for k in map(str, args)] 52 | 53 | 54 | def tag_key_generator(backend, prefix, tag_prefix, tags, timeout, *args, **kwargs): 55 | src_keys = [] 56 | for t in tags: 57 | if callable(t): 58 | tag = str(t(*args, **kwargs)) 59 | else: 60 | tag = str(t.format(*args, **kwargs)) 61 | key = tag_prefix + tag 62 | src_keys.append(key) 63 | dst_keys = backend.mget(src_keys) 64 | for idx, dst_key in enumerate(dst_keys): 65 | if dst_key is NO_VALUE: 66 | src_key = src_keys[idx] 67 | tag_key = short_id() 68 | backend.set(src_key, tag_key, timeout) 69 | dst_keys[idx] = tag_key 70 | return prefix + "|" + "-".join(map(str, dst_keys)) 71 | 72 | 73 | class NoValue(object): 74 | """Describe a missing cache value. 75 | 76 | The :attr:`.NO_VALUE` module global 77 | should be used. 78 | """ 79 | __slots__ = tuple() 80 | 81 | def __repr__(self): 82 | return 'NoValue' 83 | 84 | 85 | NO_VALUE = NoValue() 86 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/zhihu/tache/e6f358bf01421dc97c067f9bd3a9bc4f1dd0842e/tests/__init__.py -------------------------------------------------------------------------------- /tests/test_cache_key.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | import random 4 | from tache.utils import (arguments_key_generator, kwargs_key_generator, 5 | arguments_batch_keys_generator) 6 | 7 | 8 | def add(a, b): 9 | return a + b + random.randint(0, 100) 10 | 11 | 12 | class A: 13 | def plus(self, a, b): 14 | return a + b + random.randint(0, 100) 15 | 16 | 17 | def test_function_key(): 18 | key = arguments_key_generator("prefix", add, 5, 6) 19 | assert key == "prefix:tests.test_cache_key.add|5-6" 20 | key = arguments_key_generator("prefix", add, 5, u"测试") 21 | assert key == "prefix:tests.test_cache_key.add|5-测试" 22 | key = arguments_key_generator(None, A().plus, 5, 6) 23 | assert key == "tests.test_cache_key.A.plus|5-6" 24 | 25 | 26 | def test_kwargs_key(): 27 | key = kwargs_key_generator("prefix", add, a=5, b=6) 28 | assert key == "prefix:tests.test_cache_key.add|('a', 5),('b', 6)" 29 | key = kwargs_key_generator(None, add, a=5, b=6) 30 | assert key == "tests.test_cache_key.add|('a', 5),('b', 6)" 31 | 32 | 33 | def test_batch_key(): 34 | keys = arguments_batch_keys_generator("prefix", add, 5, 6) 35 | assert keys == ['prefix:tests.test_cache_key.add|5', 'prefix:tests.test_cache_key.add|6'] 36 | -------------------------------------------------------------------------------- /tests/test_redis_batch.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | import random 4 | import fakeredis 5 | from tache import RedisCache 6 | 7 | 8 | def test_batch_function(): 9 | r = fakeredis.FakeStrictRedis() 10 | r.flushall() 11 | cache = RedisCache(conn=r) 12 | 13 | class A(object): 14 | 15 | def __init__(self): 16 | self.count = 0 17 | 18 | @cache.batch() 19 | def list(self, *ids): 20 | for _id in ids: 21 | self.count += 1 22 | return [2*_id for _id in ids] 23 | 24 | a = A() 25 | assert a.list(1,2,3,4,5) == [2,4,6,8,10] 26 | assert a.count ==5 27 | assert a.list(1, 2) == [2, 4] 28 | assert a.count ==5 29 | assert a.list(5, 6, 7) == [10, 12, 14] 30 | assert a.count ==7 31 | assert a.list() == [] 32 | assert a.count == 7 33 | a.list.invalidate(5, 7) 34 | assert a.list(1, 2, 5, 6, 7) == [2, 4, 10, 12, 14] 35 | assert a.count ==9 36 | 37 | 38 | def test_batch_classmethod(): 39 | r = fakeredis.FakeStrictRedis() 40 | r.flushall() 41 | cache = RedisCache(conn=r) 42 | 43 | class AB(object): 44 | 45 | count = 0 46 | 47 | @cache.batch() 48 | @classmethod 49 | def list(cls, *ids): 50 | result = [] 51 | for i in ids: 52 | result.append(i + random.randint(1, 100)) 53 | return result 54 | 55 | @cache.batch() 56 | @classmethod 57 | def list2(cls, *ids): 58 | cls.__name__ = 'ABC' 59 | result = [] 60 | for i in ids: 61 | result.append(i + random.randint(1, 100)) 62 | return result 63 | 64 | assert AB.list(3, 4) == AB.list(3, 4) == AB().list(3, 4) 65 | assert AB.list2(3, 4) != AB.list2(3, 4) 66 | 67 | 68 | def test_batch_staticmethod(): 69 | r = fakeredis.FakeStrictRedis() 70 | r.flushall() 71 | cache = RedisCache(conn=r) 72 | 73 | class ABS(object): 74 | 75 | count = 0 76 | 77 | @cache.batch() 78 | @staticmethod 79 | def list(*ids): 80 | result = [] 81 | for i in ids: 82 | result.append(i + random.randint(1, 100)) 83 | return result 84 | 85 | assert ABS.list(3, 4) == ABS.list(3, 4) == ABS().list(3, 4) 86 | -------------------------------------------------------------------------------- /tests/test_redis_cache.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | import random 4 | 5 | import fakeredis 6 | from tache import RedisCache 7 | from tache.utils import kwargs_key_generator 8 | 9 | 10 | def test_cache_function(): 11 | r = fakeredis.FakeStrictRedis() 12 | r.flushall() 13 | cache = RedisCache(conn=r) 14 | 15 | class A: 16 | 17 | def __init__(self): 18 | self.count = 0 19 | 20 | @cache.cached() 21 | def add(self, a, b): 22 | self.count += 1 23 | return a + b + self.count 24 | 25 | class B: 26 | 27 | def __init__(self): 28 | self.count = 0 29 | 30 | @cache.cached('tests.test_redis_cache.A.add|{0}-{1}') 31 | def add_explicit(self, a, b): 32 | self.count += 1 33 | return a + b + self.count 34 | 35 | a = A() 36 | assert a.add(5, 6) == 12 37 | assert a.add(5, 6) == 12 38 | assert a.count == 1 39 | a.add.invalidate(5, 6) 40 | assert a.add(5, 6) == 13 41 | assert a.count == 2 42 | assert a.add.refresh(5, 6) == 14 43 | assert a.count == 3 44 | assert a.add(5, 6) == 14 45 | b = B() 46 | assert b.add_explicit(5, 6) == 14 47 | 48 | 49 | def test_cache_kwargs(): 50 | r = fakeredis.FakeStrictRedis() 51 | r.flushall() 52 | cache = RedisCache(conn=r) 53 | 54 | class A(object): 55 | 56 | def __init__(self): 57 | self.count = 0 58 | 59 | @cache.cached(key_func=kwargs_key_generator) 60 | def add(self, a, b): 61 | self.count += 1 62 | return a + b + self.count 63 | 64 | class B(object): 65 | 66 | def __init__(self): 67 | self.count = 0 68 | 69 | @cache.cached("tests.test_redis_cache.A.add|('a', {a}),('b', {b})") 70 | def add_explicit(self, a, b): 71 | self.count += 1 72 | return a + b + self.count 73 | 74 | a = A() 75 | assert a.add(a=5, b=6) == 12 76 | assert a.add(a=5, b=6) == 12 77 | assert a.count == 1 78 | a.add.invalidate(a=5, b=6) 79 | assert a.add(a=5, b=6) == 13 80 | assert a.count == 2 81 | assert a.add.refresh(a=5, b=6) == 14 82 | assert a.count == 3 83 | assert a.add(a=5, b=6) == 14 84 | b = B() 85 | assert b.add_explicit(a=5, b=6) == 14 86 | 87 | 88 | def test_cache_None(): 89 | r = fakeredis.FakeStrictRedis() 90 | r.flushall() 91 | cache = RedisCache(conn=r) 92 | 93 | global i 94 | i = 0 95 | 96 | @cache.cached() 97 | def incr(): 98 | global i 99 | i += 1 100 | 101 | incr() 102 | assert i == 1 103 | incr() 104 | assert i == 1 105 | incr.invalidate() 106 | incr() 107 | assert i == 2 108 | 109 | 110 | def test_not_cache_None(): 111 | r = fakeredis.FakeStrictRedis() 112 | r.flushall() 113 | cache = RedisCache(conn=r) 114 | 115 | global i 116 | i = 0 117 | 118 | @cache.cached(should_cache_fn=lambda value: value is not None) 119 | def incr(by): 120 | global i 121 | i += 1 122 | return by 123 | 124 | incr(None) 125 | incr(None) 126 | assert i == 2 127 | incr(1) 128 | incr(1) 129 | assert i == 3 130 | 131 | 132 | def test_cache_classmethod(): 133 | r = fakeredis.FakeStrictRedis() 134 | r.flushall() 135 | cache = RedisCache(conn=r) 136 | 137 | class AC(object): 138 | 139 | count = 0 140 | 141 | @cache.cached() 142 | @classmethod 143 | def add(cls, a, b): 144 | return a + b + random.randint(1, 100) 145 | 146 | @cache.cached() 147 | @classmethod 148 | def plus(cls, a, b): 149 | cls.__name__ = 'AD' 150 | return a + b + random.randint(1, 100) 151 | 152 | assert AC.add(3, 4) == AC.add(3, 4) == AC().add(3, 4) 153 | assert AC.plus(3, 4) != AC.plus(3, 4) 154 | 155 | 156 | def test_cache_staticmethod(): 157 | r = fakeredis.FakeStrictRedis() 158 | r.flushall() 159 | cache = RedisCache(conn=r) 160 | 161 | class AS(object): 162 | 163 | count = 0 164 | 165 | @cache.cached() 166 | @staticmethod 167 | def add(a, b): 168 | return a + b + random.randint(1, 100) 169 | 170 | assert AS.add(3, 4) == AS.add(3, 4) == AS().add(3, 4) 171 | -------------------------------------------------------------------------------- /tests/test_redis_tag.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | import random 4 | 5 | import fakeredis 6 | from tache import RedisCache 7 | from tache.utils import kwargs_key_generator 8 | 9 | 10 | def test_tag_cache_function(): 11 | r = fakeredis.FakeStrictRedis() 12 | r.flushall() 13 | cache = RedisCache(conn=r) 14 | 15 | @cache.cached(tags=["add:{0}"]) 16 | def add(a, b): 17 | return a + b + random.randint(1, 10000) 18 | 19 | add_result1 = add(5, 6) 20 | add_result2 = add(5, 7) 21 | add_result3 = add(5, 8) 22 | add_result4 = add(6, 8) 23 | 24 | # cache 生效 25 | assert add(5, 6) == add_result1 26 | assert add(5, 7) == add_result2 27 | assert add(5, 8) == add_result3 28 | assert add(6, 8) == add_result4 29 | 30 | # 精确失效 31 | add.invalidate(5, 6) 32 | assert add(5, 6) != add_result1 33 | assert add(5, 7) == add_result2 34 | assert add(5, 8) == add_result3 35 | assert add(6, 8) == add_result4 36 | 37 | # 批量失效 38 | add.invalidate_tag("add:5") 39 | assert add(5, 7) != add_result2 40 | assert add(5, 8) != add_result3 41 | assert add(6, 8) == add_result4 42 | 43 | 44 | def test_tag_cache_kwargs_function(): 45 | r = fakeredis.FakeStrictRedis() 46 | r.flushall() 47 | cache = RedisCache(conn=r) 48 | 49 | @cache.cached(key_func=kwargs_key_generator, tags=["add:{a}"]) 50 | def add(a, b): 51 | return a + b + random.randint(1, 10000) 52 | 53 | add_result1 = add(a=5, b=6) 54 | add_result2 = add(a=5, b=7) 55 | add_result3 = add(a=5, b=8) 56 | add_result4 = add(a=6, b=8) 57 | 58 | # cache 生效 59 | assert add(a=5, b=6) == add_result1 60 | assert add(a=5, b=7) == add_result2 61 | assert add(a=5, b=8) == add_result3 62 | assert add(a=6, b=8) == add_result4 63 | 64 | # 精确失效 65 | add.invalidate(a=5, b=6) 66 | assert add(a=5, b=6) != add_result1 67 | assert add(a=5, b=7) == add_result2 68 | assert add(a=5, b=8) == add_result3 69 | assert add(a=6, b=8) == add_result4 70 | 71 | # 批量失效 72 | add.invalidate_tag("add:5") 73 | assert add(a=5, b=7) != add_result2 74 | assert add(a=5, b=8) != add_result3 75 | assert add(a=6, b=8) == add_result4 76 | 77 | 78 | def test_cache_method(): 79 | r = fakeredis.FakeStrictRedis() 80 | r.flushall() 81 | cache = RedisCache(conn=r) 82 | 83 | class A(object): 84 | @cache.cached(tags=["add:{0}"]) 85 | def add(self, a, b): 86 | return a + b + random.randint(1, 10000) 87 | 88 | @cache.cached(tags=["plus:{0}"]) 89 | @classmethod 90 | def plus(cls, a, b): 91 | return a + b + random.randint(1, 10000) 92 | 93 | plus_result1 = A.plus(5, 6) 94 | assert A.plus(5, 6) == plus_result1 95 | add_result1 = A().add(5, 6) 96 | assert A().add(5, 6) == add_result1 97 | 98 | 99 | def test_multi_tag(): 100 | r = fakeredis.FakeStrictRedis() 101 | r.flushall() 102 | cache = RedisCache(conn=r) 103 | 104 | class A(object): 105 | @cache.cached(tags=["a:{0}", "b:{1}", "c"]) 106 | def add(self, a, b): 107 | return a + b + random.randint(1, 10000) 108 | 109 | add_result1 = A().add(5, 6) 110 | add_result2 = A().add(5, 7) 111 | add_result3 = A().add(1, 8) 112 | add_result4 = A().add(1, 8) 113 | add_result5 = A().add(2, 9) 114 | A().add.invalidate_tag("a:5") 115 | A().add.invalidate_tag("b:8") 116 | assert add_result1 != A().add(5, 6) 117 | assert add_result2 != A().add(5, 7) 118 | assert add_result3 != A().add(1, 8) 119 | assert add_result4 != A().add(1, 8) 120 | assert add_result5 == A().add(2, 9) 121 | A().add.invalidate_tag("c") 122 | assert add_result5 != A().add(2, 9) 123 | 124 | 125 | def test_multi_tag_kwargs(): 126 | r = fakeredis.FakeStrictRedis() 127 | r.flushall() 128 | cache = RedisCache(conn=r) 129 | 130 | class A(object): 131 | @cache.cached(key_func=kwargs_key_generator, tags=["a:{a}", "b:{b}", "c"]) 132 | def add(self, a, b): 133 | return a + b + random.randint(1, 10000) 134 | 135 | add_result1 = A().add(a=5, b=6) 136 | add_result2 = A().add(a=5, b=7) 137 | add_result3 = A().add(a=1, b=8) 138 | add_result4 = A().add(a=1, b=8) 139 | add_result5 = A().add(a=2, b=9) 140 | A().add.invalidate_tag("a:5") 141 | A().add.invalidate_tag("b:8") 142 | assert add_result1 != A().add(a=5, b=6) 143 | assert add_result2 != A().add(a=5, b=7) 144 | assert add_result3 != A().add(a=1, b=8) 145 | assert add_result4 != A().add(a=1, b=8) 146 | assert add_result5 == A().add(a=2, b=9) 147 | A().add.invalidate_tag("c") 148 | assert add_result5 != A().add(a=2, b=9) 149 | 150 | 151 | def test_function_tag(): 152 | r = fakeredis.FakeStrictRedis() 153 | r.flushall() 154 | cache = RedisCache(conn=r) 155 | 156 | @cache.cached(tags=[lambda *args, **kwargs: "add:{0}".format(args[0] + args[1])]) 157 | def add(a, b): 158 | return a + b + random.randint(1, 10000) 159 | 160 | add_result1 = add(5, 6) 161 | add_result2 = add(4, 7) 162 | add_result3 = add(5, 8) 163 | 164 | # cache 生效 165 | assert add(5, 6) == add_result1 166 | assert add(4, 7) == add_result2 167 | assert add(5, 8) == add_result3 168 | 169 | add.invalidate_tag("add:11") 170 | assert add(5, 6) != add_result1 171 | assert add(4, 7) != add_result2 172 | assert add(5, 8) == add_result3 173 | 174 | 175 | def test_function_tag_kwargs(): 176 | r = fakeredis.FakeStrictRedis() 177 | r.flushall() 178 | cache = RedisCache(conn=r) 179 | 180 | @cache.cached(key_func=kwargs_key_generator, tags=[lambda *args, **kwargs: "add:{0}".format(kwargs['a'] + kwargs['b'])]) 181 | def add(a, b): 182 | return a + b + random.randint(1, 10000) 183 | 184 | add_result1 = add(a=5, b=6) 185 | add_result2 = add(a=4, b=7) 186 | add_result3 = add(a=5, b=8) 187 | 188 | # cache 生效 189 | assert add(a=5, b=6) == add_result1 190 | assert add(a=4, b=7) == add_result2 191 | assert add(a=5, b=8) == add_result3 192 | 193 | add.invalidate_tag("add:11") 194 | assert add(a=5, b=6) != add_result1 195 | assert add(a=4, b=7) != add_result2 196 | assert add(a=5, b=8) == add_result3 197 | 198 | 199 | def test_global_tag(): 200 | r = fakeredis.FakeStrictRedis() 201 | r.flushall() 202 | cache = RedisCache(conn=r) 203 | 204 | @cache.cached(tags=["add:{0}"]) 205 | def add(a, b): 206 | return a + b + random.randint(1, 10000) 207 | 208 | @cache.cached(tags=["add:{0}"]) 209 | def add2(a, b): 210 | return a + b + random.randint(1, 10000) 211 | 212 | add_result1 = add(5, 6) 213 | add_result2 = add(5, 7) 214 | add2_result1 = add2(5, 6) 215 | 216 | # cache 生效 217 | assert add(5, 6) == add_result1 218 | assert add(5, 7) == add_result2 219 | assert add2(5, 6) == add2_result1 220 | 221 | # cache 失效,影响 add2 222 | add.invalidate_tag("add:5") 223 | assert add(5, 6) != add_result1 224 | assert add(5, 7) != add_result2 225 | assert add2(5, 6) != add2_result1 226 | 227 | # 使用 global invalidate tag 228 | add_result1 = add(5, 6) 229 | add_result2 = add(5, 7) 230 | add2_result1 = add2(5, 6) 231 | assert add(5, 6) == add_result1 232 | assert add(5, 7) == add_result2 233 | assert add2(5, 6) == add2_result1 234 | cache.invalidate_tag("add:5") 235 | assert add(5, 6) != add_result1 236 | assert add(5, 7) != add_result2 237 | assert add2(5, 6) != add2_result1 238 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | [tox] 2 | envlist = py27,py36 3 | [testenv] 4 | deps= nose 5 | fakeredis 6 | commands= nosetests {posargs} 7 | --------------------------------------------------------------------------------