├── .gitignore ├── CHANGES ├── LICENSE ├── Makefile ├── README.rst ├── TODO ├── bench └── bench.py ├── docs ├── Makefile ├── _static │ └── rc.png ├── _themes │ ├── LICENSE │ ├── README │ ├── flask │ │ ├── layout.html │ │ ├── relations.html │ │ ├── static │ │ │ └── flasky.css_t │ │ └── theme.conf │ ├── flask_small │ │ ├── layout.html │ │ ├── static │ │ │ └── flasky.css_t │ │ └── theme.conf │ └── flask_theme_support.py ├── api.rst ├── cache.rst ├── cache_cluster_config.rst ├── cache_config.rst ├── changelog.rst ├── conf.py ├── foreword.rst ├── index.rst ├── installation.rst ├── license.rst ├── make.bat ├── patterns │ ├── index.rst │ └── memory_cache.rst ├── quickstart.rst ├── redis_cluster_router.rst ├── serializer.rst ├── testing.rst └── tutorial.rst ├── examples └── tutorial.py ├── logo └── rc.psd ├── rc ├── __init__.py ├── cache.py ├── ketama.py ├── poller.py ├── promise.py ├── redis_clients.py ├── redis_cluster.py ├── redis_router.py ├── serializer.py ├── testing.py └── utils.py ├── setup.py └── tests ├── conftest.py ├── test_cache.py ├── test_ketama.py ├── test_poller.py ├── test_promise.py ├── test_redis_clients.py ├── test_redis_cluster.py ├── test_redis_router.py ├── test_serializer.py └── test_utils.py /.gitignore: -------------------------------------------------------------------------------- 1 | docs/_build 2 | *.pyc 3 | *.pyo 4 | .DS_Store 5 | build 6 | dist 7 | *.egg-info 8 | .cache/ 9 | -------------------------------------------------------------------------------- /CHANGES: -------------------------------------------------------------------------------- 1 | RC Changelog 2 | ============ 3 | 4 | 5 | Version 0.1 6 | ----------- 7 | 8 | First public preview release. 9 | 10 | 11 | Version 0.1.1 12 | ------------- 13 | 14 | Bugfix release, released on Dec 18th 2015 15 | 16 | - Make the cache decorated function always return result through the 17 | serializer, so we get consistent result 18 | 19 | 20 | Version 0.2 21 | ----------- 22 | 23 | Released on Dec 23th 2015 24 | 25 | - Added include_self parameter for cache decorated function, now we can 26 | cache instance method. 27 | 28 | 29 | Version 0.2.1 30 | ------------- 31 | 32 | Enhancement release, released on Jan 15th 2016 33 | 34 | - Only check has_self once to get rid of the inspect performance issue 35 | 36 | 37 | Version 0.3 38 | ----------- 39 | 40 | Released on May 3th 2016 41 | 42 | - Added bypass_values parameter for cache object, now we can bypass 43 | certain return values of cache decorated function 44 | 45 | 46 | Version 0.3.1 47 | ------------- 48 | 49 | Bugfix release, released on June 28th 2016 50 | 51 | - Fixed ketama get node when key is unicode 52 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2016 by Shipeng Feng. 2 | 3 | Some rights reserved. 4 | 5 | Redistribution and use in source and binary forms, with or without 6 | modification, are permitted provided that the following conditions are 7 | met: 8 | 9 | * Redistributions of source code must retain the above copyright 10 | notice, this list of conditions and the following disclaimer. 11 | 12 | * Redistributions in binary form must reproduce the above 13 | copyright notice, this list of conditions and the following 14 | disclaimer in the documentation and/or other materials provided 15 | with the distribution. 16 | 17 | * The names of the contributors may not be used to endorse or 18 | promote products derived from this software without specific 19 | prior written permission. 20 | 21 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 22 | "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 23 | LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 24 | A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 25 | OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 26 | SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 27 | LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 28 | DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 29 | THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 30 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 31 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 32 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | all: clean-pyc test 2 | 3 | tox-test: 4 | tox 5 | 6 | clean-pyc: 7 | find . -name '*.pyc' -exec rm -f {} + 8 | find . -name '*.pyo' -exec rm -f {} + 9 | find . -name '*~' -exec rm -f {} + 10 | 11 | lines: 12 | find . -name "*.py"|xargs cat|wc -l 13 | 14 | release: 15 | python setup.py register 16 | python setup.py sdist upload 17 | python setup.py bdist_wheel upload 18 | 19 | test: 20 | @py.test -vv --tb=short 21 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | rc 2 | == 3 | 4 | .. image:: https://github.com/fengsp/rc/blob/master/docs/_static/rc.png?raw=true 5 | :alt: rc: the redis cache 6 | 7 | rc - the redis cache. 8 | 9 | - easy to use 10 | - can build cache cluster 11 | - batch-fetch multiple cache results (do it in parallel for cluster) 12 | 13 | For full documentation see `rc.readthedocs.org `_. 14 | 15 | 16 | Installation 17 | ------------ 18 | 19 | .. code-block:: bash 20 | 21 | $ pip install rc 22 | 23 | 24 | Quickstart 25 | ---------- 26 | 27 | A minimal cache example looks like this: 28 | 29 | .. code-block:: python 30 | 31 | from rc import Cache 32 | 33 | cache = Cache() 34 | assert cache.set('key', 'value') 35 | assert cache.get('key') == 'value' 36 | assert cache.get('foo') is None 37 | assert cache.set('list', [1]) 38 | assert cache.get('list') == [1] 39 | 40 | A cache cluster use a redis cluster as backend: 41 | 42 | .. code-block:: python 43 | 44 | from rc import CacheCluster 45 | 46 | cache = CacheCluster({ 47 | 'cache01': {'host': 'redis-host01'}, 48 | 'cache02': {'host': 'redis-host02'}, 49 | 'cache03': {'host': 'redis-host03'}, 50 | 'cache04': {'host': 'redis-host04', 'db': 1}, 51 | }) 52 | 53 | Cache decorator: 54 | 55 | .. code-block:: python 56 | 57 | @cache.cache() 58 | def load(name, offset): 59 | return load_from_database(name, offset) 60 | 61 | rv = load('name', offset=10) 62 | 63 | Batch fetch multiple cache results: 64 | 65 | .. code-block:: python 66 | 67 | assert cache.get_many('key', 'foo') == ['value', None] 68 | 69 | # for cache decorated function 70 | @cache.cache() 71 | def cached_func(param): 72 | return param 73 | 74 | results = [] 75 | # with the context manager, the function 76 | # is executed and return a promise 77 | with cache.batch_mode(): 78 | for i in range(10): 79 | results.append(cached_func(i)) 80 | for i, rv in enumerate(results): 81 | assert rv.value == i 82 | 83 | Cache invalidation: 84 | 85 | .. code-block:: python 86 | 87 | cache.delete('key') 88 | # for decorated function 89 | cache.invalidate(load, 'name', offset=10) 90 | 91 | 92 | Better 93 | ------ 94 | 95 | If you feel anything wrong, feedbacks or pull requests are welcome. 96 | -------------------------------------------------------------------------------- /TODO: -------------------------------------------------------------------------------- 1 | TODO 2 | ==== 3 | 4 | - compressor support 5 | - cache slave support 6 | - key mangler support 7 | -------------------------------------------------------------------------------- /bench/bench.py: -------------------------------------------------------------------------------- 1 | import time 2 | from itertools import izip 3 | 4 | from redis import StrictRedis 5 | from rc.redis_cluster import RedisCluster 6 | 7 | 8 | cluster = RedisCluster({ 9 | 0: {'host': 'redis01.aws.dev', 'port': 4001, 'db': 0}, 10 | 1: {'host': 'redis01.aws.dev', 'port': 4001, 'db': 1}, 11 | 2: {'host': 'redis01.aws.dev', 'port': 4001, 'db': 2}, 12 | 3: {'host': 'redis01.aws.dev', 'port': 4001, 'db': 3}, 13 | 4: {'host': 'redis01.aws.dev', 'port': 4001, 'db': 4}, 14 | 5: {'host': 'redis01.aws.dev', 'port': 4001, 'db': 5}, 15 | 6: {'host': 'redis01.aws.dev', 'port': 4001, 'db': 6}, 16 | }) 17 | cluster_router = cluster.router 18 | cluster_client = cluster.get_client() 19 | clients = [ 20 | StrictRedis(host='redis01.aws.dev', port=4001, db=0), 21 | StrictRedis(host='redis01.aws.dev', port=4001, db=1), 22 | StrictRedis(host='redis01.aws.dev', port=4001, db=2), 23 | StrictRedis(host='redis01.aws.dev', port=4001, db=3), 24 | StrictRedis(host='redis01.aws.dev', port=4001, db=4), 25 | StrictRedis(host='redis01.aws.dev', port=4001, db=5), 26 | StrictRedis(host='redis01.aws.dev', port=4001, db=6), 27 | ] 28 | 29 | 30 | def bench_cluster(keys): 31 | print 'Benching cluster...' 32 | start = time.time() 33 | while keys: 34 | cluster_client.mget(keys[:1000]) 35 | keys = keys[1000:] 36 | print time.time() - start 37 | 38 | 39 | def bench_clients(keys): 40 | print 'Benching clients...' 41 | start = time.time() 42 | while keys: 43 | current_keys = keys[:1000] 44 | hostname_to_keys = {} 45 | for key in current_keys: 46 | hostname = cluster_router.get_host_for_key(key) 47 | hostname_to_keys.setdefault(hostname, []).append(key) 48 | result = {} 49 | for hostname, hostkeys in hostname_to_keys.iteritems(): 50 | rv = clients[hostname].mget(hostkeys) 51 | result.update(dict(izip(hostkeys, rv))) 52 | [result[k] for k in current_keys] 53 | keys = keys[1000:] 54 | print time.time() - start 55 | 56 | 57 | if __name__ == '__main__': 58 | keys = range(100000) 59 | bench_cluster(keys) 60 | bench_clients(keys) 61 | """Benching cluster... 62 | 2.86095499992 63 | Benching clients... 64 | 10.8946011066 65 | """ 66 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | PAPER = 8 | BUILDDIR = _build 9 | 10 | # User-friendly check for sphinx-build 11 | ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) 12 | $(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) 13 | endif 14 | 15 | # Internal variables. 16 | PAPEROPT_a4 = -D latex_paper_size=a4 17 | PAPEROPT_letter = -D latex_paper_size=letter 18 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 19 | # the i18n builder cannot share the environment and doctrees with the others 20 | I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 21 | 22 | .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext 23 | 24 | help: 25 | @echo "Please use \`make ' where is one of" 26 | @echo " html to make standalone HTML files" 27 | @echo " dirhtml to make HTML files named index.html in directories" 28 | @echo " singlehtml to make a single large HTML file" 29 | @echo " pickle to make pickle files" 30 | @echo " json to make JSON files" 31 | @echo " htmlhelp to make HTML files and a HTML help project" 32 | @echo " qthelp to make HTML files and a qthelp project" 33 | @echo " devhelp to make HTML files and a Devhelp project" 34 | @echo " epub to make an epub" 35 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" 36 | @echo " latexpdf to make LaTeX files and run them through pdflatex" 37 | @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" 38 | @echo " text to make text files" 39 | @echo " man to make manual pages" 40 | @echo " texinfo to make Texinfo files" 41 | @echo " info to make Texinfo files and run them through makeinfo" 42 | @echo " gettext to make PO message catalogs" 43 | @echo " changes to make an overview of all changed/added/deprecated items" 44 | @echo " xml to make Docutils-native XML files" 45 | @echo " pseudoxml to make pseudoxml-XML files for display purposes" 46 | @echo " linkcheck to check all external links for integrity" 47 | @echo " doctest to run all doctests embedded in the documentation (if enabled)" 48 | 49 | clean: 50 | rm -rf $(BUILDDIR)/* 51 | 52 | html: 53 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html 54 | @echo 55 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." 56 | 57 | dirhtml: 58 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml 59 | @echo 60 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." 61 | 62 | singlehtml: 63 | $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml 64 | @echo 65 | @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." 66 | 67 | pickle: 68 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle 69 | @echo 70 | @echo "Build finished; now you can process the pickle files." 71 | 72 | json: 73 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json 74 | @echo 75 | @echo "Build finished; now you can process the JSON files." 76 | 77 | htmlhelp: 78 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp 79 | @echo 80 | @echo "Build finished; now you can run HTML Help Workshop with the" \ 81 | ".hhp project file in $(BUILDDIR)/htmlhelp." 82 | 83 | qthelp: 84 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp 85 | @echo 86 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \ 87 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:" 88 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/rc.qhcp" 89 | @echo "To view the help file:" 90 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/rc.qhc" 91 | 92 | devhelp: 93 | $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp 94 | @echo 95 | @echo "Build finished." 96 | @echo "To view the help file:" 97 | @echo "# mkdir -p $$HOME/.local/share/devhelp/rc" 98 | @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/rc" 99 | @echo "# devhelp" 100 | 101 | epub: 102 | $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub 103 | @echo 104 | @echo "Build finished. The epub file is in $(BUILDDIR)/epub." 105 | 106 | latex: 107 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 108 | @echo 109 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." 110 | @echo "Run \`make' in that directory to run these through (pdf)latex" \ 111 | "(use \`make latexpdf' here to do that automatically)." 112 | 113 | latexpdf: 114 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 115 | @echo "Running LaTeX files through pdflatex..." 116 | $(MAKE) -C $(BUILDDIR)/latex all-pdf 117 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 118 | 119 | latexpdfja: 120 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 121 | @echo "Running LaTeX files through platex and dvipdfmx..." 122 | $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja 123 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 124 | 125 | text: 126 | $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text 127 | @echo 128 | @echo "Build finished. The text files are in $(BUILDDIR)/text." 129 | 130 | man: 131 | $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man 132 | @echo 133 | @echo "Build finished. The manual pages are in $(BUILDDIR)/man." 134 | 135 | texinfo: 136 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 137 | @echo 138 | @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." 139 | @echo "Run \`make' in that directory to run these through makeinfo" \ 140 | "(use \`make info' here to do that automatically)." 141 | 142 | info: 143 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 144 | @echo "Running Texinfo files through makeinfo..." 145 | make -C $(BUILDDIR)/texinfo info 146 | @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." 147 | 148 | gettext: 149 | $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale 150 | @echo 151 | @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." 152 | 153 | changes: 154 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes 155 | @echo 156 | @echo "The overview file is in $(BUILDDIR)/changes." 157 | 158 | linkcheck: 159 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck 160 | @echo 161 | @echo "Link check complete; look for any errors in the above output " \ 162 | "or in $(BUILDDIR)/linkcheck/output.txt." 163 | 164 | doctest: 165 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest 166 | @echo "Testing of doctests in the sources finished, look at the " \ 167 | "results in $(BUILDDIR)/doctest/output.txt." 168 | 169 | xml: 170 | $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml 171 | @echo 172 | @echo "Build finished. The XML files are in $(BUILDDIR)/xml." 173 | 174 | pseudoxml: 175 | $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml 176 | @echo 177 | @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." 178 | -------------------------------------------------------------------------------- /docs/_static/rc.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fengsp/rc/32c4d4e2cb7ba734b2dbd9bd83bcc85a2f09499f/docs/_static/rc.png -------------------------------------------------------------------------------- /docs/_themes/LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2010 by Armin Ronacher. 2 | 3 | Some rights reserved. 4 | 5 | Redistribution and use in source and binary forms of the theme, with or 6 | without modification, are permitted provided that the following conditions 7 | are met: 8 | 9 | * Redistributions of source code must retain the above copyright 10 | notice, this list of conditions and the following disclaimer. 11 | 12 | * Redistributions in binary form must reproduce the above 13 | copyright notice, this list of conditions and the following 14 | disclaimer in the documentation and/or other materials provided 15 | with the distribution. 16 | 17 | * The names of the contributors may not be used to endorse or 18 | promote products derived from this software without specific 19 | prior written permission. 20 | 21 | We kindly ask you to only use these themes in an unmodified manner just 22 | for Flask and Flask-related products, not for unrelated projects. If you 23 | like the visual style and want to use it for your own projects, please 24 | consider making some larger changes to the themes (such as changing 25 | font faces, sizes, colors or margins). 26 | 27 | THIS THEME IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 28 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 29 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE 30 | ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE 31 | LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR 32 | CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF 33 | SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS 34 | INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN 35 | CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) 36 | ARISING IN ANY WAY OUT OF THE USE OF THIS THEME, EVEN IF ADVISED OF THE 37 | POSSIBILITY OF SUCH DAMAGE. 38 | -------------------------------------------------------------------------------- /docs/_themes/README: -------------------------------------------------------------------------------- 1 | Flask Sphinx Styles 2 | =================== 3 | 4 | This repository contains sphinx styles for Flask and Flask related 5 | projects. To use this style in your Sphinx documentation, follow 6 | this guide: 7 | 8 | 1. put this folder as _themes into your docs folder. Alternatively 9 | you can also use git submodules to check out the contents there. 10 | 2. add this to your conf.py: 11 | 12 | sys.path.append(os.path.abspath('_themes')) 13 | html_theme_path = ['_themes'] 14 | html_theme = 'flask' 15 | 16 | The following themes exist: 17 | 18 | - 'flask' - the standard flask documentation theme for large 19 | projects 20 | - 'flask_small' - small one-page theme. Intended to be used by 21 | very small addon libraries for flask. 22 | 23 | The following options exist for the flask_small theme: 24 | 25 | [options] 26 | index_logo = '' filename of a picture in _static 27 | to be used as replacement for the 28 | h1 in the index.rst file. 29 | index_logo_height = 120px height of the index logo 30 | github_fork = '' repository name on github for the 31 | "fork me" badge 32 | -------------------------------------------------------------------------------- /docs/_themes/flask/layout.html: -------------------------------------------------------------------------------- 1 | {%- extends "basic/layout.html" %} 2 | {%- block extrahead %} 3 | {{ super() }} 4 | {% if theme_touch_icon %} 5 | 6 | {% endif %} 7 | 8 | {% endblock %} 9 | {%- block relbar2 %}{% endblock %} 10 | {% block header %} 11 | {{ super() }} 12 | {% if pagename == 'index' %} 13 |
14 | {% endif %} 15 | {% endblock %} 16 | {%- block footer %} 17 | 21 | 22 | Fork me on GitHub 23 | 24 | {% if pagename == 'index' %} 25 |
26 | {% endif %} 27 | {%- endblock %} 28 | -------------------------------------------------------------------------------- /docs/_themes/flask/relations.html: -------------------------------------------------------------------------------- 1 |

Related Topics

2 | 20 | -------------------------------------------------------------------------------- /docs/_themes/flask/static/flasky.css_t: -------------------------------------------------------------------------------- 1 | /* 2 | * flasky.css_t 3 | * ~~~~~~~~~~~~ 4 | * 5 | * :copyright: Copyright 2010 by Armin Ronacher. 6 | * :license: Flask Design License, see LICENSE for details. 7 | */ 8 | 9 | {% set page_width = '940px' %} 10 | {% set sidebar_width = '220px' %} 11 | 12 | @import url("basic.css"); 13 | 14 | /* -- page layout ----------------------------------------------------------- */ 15 | 16 | body { 17 | font-family: 'Georgia', serif; 18 | font-size: 17px; 19 | background-color: white; 20 | color: #000; 21 | margin: 0; 22 | padding: 0; 23 | } 24 | 25 | div.document { 26 | width: {{ page_width }}; 27 | margin: 30px auto 0 auto; 28 | } 29 | 30 | div.documentwrapper { 31 | float: left; 32 | width: 100%; 33 | } 34 | 35 | div.bodywrapper { 36 | margin: 0 0 0 {{ sidebar_width }}; 37 | } 38 | 39 | div.sphinxsidebar { 40 | width: {{ sidebar_width }}; 41 | } 42 | 43 | hr { 44 | border: 1px solid #B1B4B6; 45 | } 46 | 47 | div.body { 48 | background-color: #ffffff; 49 | color: #3E4349; 50 | padding: 0 30px 0 30px; 51 | } 52 | 53 | img.floatingflask { 54 | padding: 0 0 10px 10px; 55 | float: right; 56 | } 57 | 58 | div.footer { 59 | width: {{ page_width }}; 60 | margin: 20px auto 30px auto; 61 | font-size: 14px; 62 | color: #888; 63 | text-align: right; 64 | } 65 | 66 | div.footer a { 67 | color: #888; 68 | } 69 | 70 | div.related { 71 | display: none; 72 | } 73 | 74 | div.sphinxsidebar a { 75 | color: #444; 76 | text-decoration: none; 77 | border-bottom: 1px dotted #999; 78 | } 79 | 80 | div.sphinxsidebar a:hover { 81 | border-bottom: 1px solid #999; 82 | } 83 | 84 | div.sphinxsidebar { 85 | font-size: 14px; 86 | line-height: 1.5; 87 | } 88 | 89 | div.sphinxsidebarwrapper { 90 | padding: 18px 10px; 91 | } 92 | 93 | div.sphinxsidebarwrapper p.logo { 94 | padding: 0 0 20px 0; 95 | margin: 0; 96 | text-align: center; 97 | } 98 | 99 | div.sphinxsidebar h3, 100 | div.sphinxsidebar h4 { 101 | font-family: 'Garamond', 'Georgia', serif; 102 | color: #444; 103 | font-size: 24px; 104 | font-weight: normal; 105 | margin: 0 0 5px 0; 106 | padding: 0; 107 | } 108 | 109 | div.sphinxsidebar h4 { 110 | font-size: 20px; 111 | } 112 | 113 | div.sphinxsidebar h3 a { 114 | color: #444; 115 | } 116 | 117 | div.sphinxsidebar p.logo a, 118 | div.sphinxsidebar h3 a, 119 | div.sphinxsidebar p.logo a:hover, 120 | div.sphinxsidebar h3 a:hover { 121 | border: none; 122 | } 123 | 124 | div.sphinxsidebar p { 125 | color: #555; 126 | margin: 10px 0; 127 | } 128 | 129 | div.sphinxsidebar ul { 130 | margin: 10px 0; 131 | padding: 0; 132 | color: #000; 133 | } 134 | 135 | div.sphinxsidebar input { 136 | border: 1px solid #ccc; 137 | font-family: 'Georgia', serif; 138 | font-size: 1em; 139 | } 140 | 141 | /* -- body styles ----------------------------------------------------------- */ 142 | 143 | a { 144 | color: #004B6B; 145 | text-decoration: underline; 146 | } 147 | 148 | a:hover { 149 | color: #6D4100; 150 | text-decoration: underline; 151 | } 152 | 153 | div.body h1, 154 | div.body h2, 155 | div.body h3, 156 | div.body h4, 157 | div.body h5, 158 | div.body h6 { 159 | font-family: 'Garamond', 'Georgia', serif; 160 | font-weight: normal; 161 | margin: 30px 0px 10px 0px; 162 | padding: 0; 163 | } 164 | 165 | {% if theme_index_logo %} 166 | div.indexwrapper h1 { 167 | text-indent: -999999px; 168 | background: url({{ theme_index_logo }}) no-repeat center center; 169 | height: {{ theme_index_logo_height }}; 170 | } 171 | {% endif %} 172 | div.body h1 { margin-top: 0; padding-top: 0; font-size: 240%; } 173 | div.body h2 { font-size: 180%; } 174 | div.body h3 { font-size: 150%; } 175 | div.body h4 { font-size: 130%; } 176 | div.body h5 { font-size: 100%; } 177 | div.body h6 { font-size: 100%; } 178 | 179 | a.headerlink { 180 | color: #ddd; 181 | padding: 0 4px; 182 | text-decoration: none; 183 | } 184 | 185 | a.headerlink:hover { 186 | color: #444; 187 | background: #eaeaea; 188 | } 189 | 190 | div.body p, div.body dd, div.body li { 191 | line-height: 1.4em; 192 | } 193 | 194 | div.admonition { 195 | background: #fafafa; 196 | margin: 20px -30px; 197 | padding: 10px 30px; 198 | border-top: 1px solid #ccc; 199 | border-bottom: 1px solid #ccc; 200 | } 201 | 202 | div.admonition tt.xref, div.admonition a tt { 203 | border-bottom: 1px solid #fafafa; 204 | } 205 | 206 | dd div.admonition { 207 | margin-left: -60px; 208 | padding-left: 60px; 209 | } 210 | 211 | div.admonition p.admonition-title { 212 | font-family: 'Garamond', 'Georgia', serif; 213 | font-weight: normal; 214 | font-size: 24px; 215 | margin: 0 0 10px 0; 216 | padding: 0; 217 | line-height: 1; 218 | } 219 | 220 | div.admonition p.last { 221 | margin-bottom: 0; 222 | } 223 | 224 | div.highlight { 225 | background-color: white; 226 | } 227 | 228 | dt:target, .highlight { 229 | background: #FAF3E8; 230 | } 231 | 232 | div.note { 233 | background-color: #eee; 234 | border: 1px solid #ccc; 235 | } 236 | 237 | div.seealso { 238 | background-color: #ffc; 239 | border: 1px solid #ff6; 240 | } 241 | 242 | div.topic { 243 | background-color: #eee; 244 | } 245 | 246 | p.admonition-title { 247 | display: inline; 248 | } 249 | 250 | p.admonition-title:after { 251 | content: ":"; 252 | } 253 | 254 | pre, tt { 255 | font-family: 'Consolas', 'Menlo', 'Deja Vu Sans Mono', 'Bitstream Vera Sans Mono', monospace; 256 | font-size: 0.9em; 257 | } 258 | 259 | img.screenshot { 260 | } 261 | 262 | tt.descname, tt.descclassname { 263 | font-size: 0.95em; 264 | } 265 | 266 | tt.descname { 267 | padding-right: 0.08em; 268 | } 269 | 270 | img.screenshot { 271 | -moz-box-shadow: 2px 2px 4px #eee; 272 | -webkit-box-shadow: 2px 2px 4px #eee; 273 | box-shadow: 2px 2px 4px #eee; 274 | } 275 | 276 | table.docutils { 277 | border: 1px solid #888; 278 | -moz-box-shadow: 2px 2px 4px #eee; 279 | -webkit-box-shadow: 2px 2px 4px #eee; 280 | box-shadow: 2px 2px 4px #eee; 281 | } 282 | 283 | table.docutils td, table.docutils th { 284 | border: 1px solid #888; 285 | padding: 0.25em 0.7em; 286 | } 287 | 288 | table.field-list, table.footnote { 289 | border: none; 290 | -moz-box-shadow: none; 291 | -webkit-box-shadow: none; 292 | box-shadow: none; 293 | } 294 | 295 | table.footnote { 296 | margin: 15px 0; 297 | width: 100%; 298 | border: 1px solid #eee; 299 | background: #fdfdfd; 300 | font-size: 0.9em; 301 | } 302 | 303 | table.footnote + table.footnote { 304 | margin-top: -15px; 305 | border-top: none; 306 | } 307 | 308 | table.field-list th { 309 | padding: 0 0.8em 0 0; 310 | } 311 | 312 | table.field-list td { 313 | padding: 0; 314 | } 315 | 316 | table.footnote td.label { 317 | width: 0px; 318 | padding: 0.3em 0 0.3em 0.5em; 319 | } 320 | 321 | table.footnote td { 322 | padding: 0.3em 0.5em; 323 | } 324 | 325 | dl { 326 | margin: 0; 327 | padding: 0; 328 | } 329 | 330 | dl dd { 331 | margin-left: 30px; 332 | } 333 | 334 | blockquote { 335 | margin: 0 0 0 30px; 336 | padding: 0; 337 | } 338 | 339 | ul, ol { 340 | margin: 10px 0 10px 30px; 341 | padding: 0; 342 | } 343 | 344 | pre { 345 | background: #eee; 346 | padding: 7px 30px; 347 | margin: 15px -30px; 348 | line-height: 1.3em; 349 | } 350 | 351 | dl pre, blockquote pre, li pre { 352 | margin-left: -60px; 353 | padding-left: 60px; 354 | } 355 | 356 | dl dl pre { 357 | margin-left: -90px; 358 | padding-left: 90px; 359 | } 360 | 361 | tt { 362 | background-color: #ecf0f3; 363 | color: #222; 364 | /* padding: 1px 2px; */ 365 | } 366 | 367 | tt.xref, a tt { 368 | background-color: #FBFBFB; 369 | border-bottom: 1px solid white; 370 | } 371 | 372 | a.reference { 373 | text-decoration: none; 374 | border-bottom: 1px dotted #004B6B; 375 | } 376 | 377 | a.reference:hover { 378 | border-bottom: 1px solid #6D4100; 379 | } 380 | 381 | a.footnote-reference { 382 | text-decoration: none; 383 | font-size: 0.7em; 384 | vertical-align: top; 385 | border-bottom: 1px dotted #004B6B; 386 | } 387 | 388 | a.footnote-reference:hover { 389 | border-bottom: 1px solid #6D4100; 390 | } 391 | 392 | a:hover tt { 393 | background: #EEE; 394 | } 395 | 396 | 397 | @media screen and (max-width: 870px) { 398 | 399 | div.sphinxsidebar { 400 | display: none; 401 | } 402 | 403 | div.document { 404 | width: 100%; 405 | 406 | } 407 | 408 | div.documentwrapper { 409 | margin-left: 0; 410 | margin-top: 0; 411 | margin-right: 0; 412 | margin-bottom: 0; 413 | } 414 | 415 | div.bodywrapper { 416 | margin-top: 0; 417 | margin-right: 0; 418 | margin-bottom: 0; 419 | margin-left: 0; 420 | } 421 | 422 | ul { 423 | margin-left: 0; 424 | } 425 | 426 | .document { 427 | width: auto; 428 | } 429 | 430 | .footer { 431 | width: auto; 432 | } 433 | 434 | .bodywrapper { 435 | margin: 0; 436 | } 437 | 438 | .footer { 439 | width: auto; 440 | } 441 | 442 | .github { 443 | display: none; 444 | } 445 | 446 | 447 | 448 | } 449 | 450 | 451 | 452 | @media screen and (max-width: 875px) { 453 | 454 | body { 455 | margin: 0; 456 | padding: 20px 30px; 457 | } 458 | 459 | div.documentwrapper { 460 | float: none; 461 | background: white; 462 | } 463 | 464 | div.sphinxsidebar { 465 | display: block; 466 | float: none; 467 | width: 102.5%; 468 | margin: 50px -30px -20px -30px; 469 | padding: 10px 20px; 470 | background: #333; 471 | color: white; 472 | } 473 | 474 | div.sphinxsidebar h3, div.sphinxsidebar h4, div.sphinxsidebar p, 475 | div.sphinxsidebar h3 a { 476 | color: white; 477 | } 478 | 479 | div.sphinxsidebar a { 480 | color: #aaa; 481 | } 482 | 483 | div.sphinxsidebar p.logo { 484 | display: none; 485 | } 486 | 487 | div.document { 488 | width: 100%; 489 | margin: 0; 490 | } 491 | 492 | div.related { 493 | display: block; 494 | margin: 0; 495 | padding: 10px 0 20px 0; 496 | } 497 | 498 | div.related ul, 499 | div.related ul li { 500 | margin: 0; 501 | padding: 0; 502 | } 503 | 504 | div.footer { 505 | display: none; 506 | } 507 | 508 | div.bodywrapper { 509 | margin: 0; 510 | } 511 | 512 | div.body { 513 | min-height: 0; 514 | padding: 0; 515 | } 516 | 517 | .rtd_doc_footer { 518 | display: none; 519 | } 520 | 521 | .document { 522 | width: auto; 523 | } 524 | 525 | .footer { 526 | width: auto; 527 | } 528 | 529 | .footer { 530 | width: auto; 531 | } 532 | 533 | .github { 534 | display: none; 535 | } 536 | } 537 | 538 | 539 | /* scrollbars */ 540 | 541 | ::-webkit-scrollbar { 542 | width: 6px; 543 | height: 6px; 544 | } 545 | 546 | ::-webkit-scrollbar-button:start:decrement, 547 | ::-webkit-scrollbar-button:end:increment { 548 | display: block; 549 | height: 10px; 550 | } 551 | 552 | ::-webkit-scrollbar-button:vertical:increment { 553 | background-color: #fff; 554 | } 555 | 556 | ::-webkit-scrollbar-track-piece { 557 | background-color: #eee; 558 | -webkit-border-radius: 3px; 559 | } 560 | 561 | ::-webkit-scrollbar-thumb:vertical { 562 | height: 50px; 563 | background-color: #ccc; 564 | -webkit-border-radius: 3px; 565 | } 566 | 567 | ::-webkit-scrollbar-thumb:horizontal { 568 | width: 50px; 569 | background-color: #ccc; 570 | -webkit-border-radius: 3px; 571 | } 572 | 573 | /* misc. */ 574 | 575 | .revsys-inline { 576 | display: none!important; 577 | } -------------------------------------------------------------------------------- /docs/_themes/flask/theme.conf: -------------------------------------------------------------------------------- 1 | [theme] 2 | inherit = basic 3 | stylesheet = flasky.css 4 | pygments_style = flask_theme_support.FlaskyStyle 5 | 6 | [options] 7 | index_logo = '' 8 | index_logo_height = 120px 9 | touch_icon = 10 | -------------------------------------------------------------------------------- /docs/_themes/flask_small/layout.html: -------------------------------------------------------------------------------- 1 | {% extends "basic/layout.html" %} 2 | {% block header %} 3 | {{ super() }} 4 | {% if pagename == 'index' %} 5 |
6 | {% endif %} 7 | {% endblock %} 8 | {% block footer %} 9 | {% if pagename == 'index' %} 10 |
11 | {% endif %} 12 | {% endblock %} 13 | {# do not display relbars #} 14 | {% block relbar1 %}{% endblock %} 15 | {% block relbar2 %} 16 | {% if theme_github_fork %} 17 | Fork me on GitHub 19 | {% endif %} 20 | {% endblock %} 21 | {% block sidebar1 %}{% endblock %} 22 | {% block sidebar2 %}{% endblock %} 23 | -------------------------------------------------------------------------------- /docs/_themes/flask_small/static/flasky.css_t: -------------------------------------------------------------------------------- 1 | /* 2 | * flasky.css_t 3 | * ~~~~~~~~~~~~ 4 | * 5 | * Sphinx stylesheet -- flasky theme based on nature theme. 6 | * 7 | * :copyright: Copyright 2007-2010 by the Sphinx team, see AUTHORS. 8 | * :license: BSD, see LICENSE for details. 9 | * 10 | */ 11 | 12 | @import url("basic.css"); 13 | 14 | /* -- page layout ----------------------------------------------------------- */ 15 | 16 | body { 17 | font-family: 'Georgia', serif; 18 | font-size: 17px; 19 | color: #000; 20 | background: white; 21 | margin: 0; 22 | padding: 0; 23 | } 24 | 25 | div.documentwrapper { 26 | float: left; 27 | width: 100%; 28 | } 29 | 30 | div.bodywrapper { 31 | margin: 40px auto 0 auto; 32 | width: 700px; 33 | } 34 | 35 | hr { 36 | border: 1px solid #B1B4B6; 37 | } 38 | 39 | div.body { 40 | background-color: #ffffff; 41 | color: #3E4349; 42 | padding: 0 30px 30px 30px; 43 | } 44 | 45 | img.floatingflask { 46 | padding: 0 0 10px 10px; 47 | float: right; 48 | } 49 | 50 | div.footer { 51 | text-align: right; 52 | color: #888; 53 | padding: 10px; 54 | font-size: 14px; 55 | width: 650px; 56 | margin: 0 auto 40px auto; 57 | } 58 | 59 | div.footer a { 60 | color: #888; 61 | text-decoration: underline; 62 | } 63 | 64 | div.related { 65 | line-height: 32px; 66 | color: #888; 67 | } 68 | 69 | div.related ul { 70 | padding: 0 0 0 10px; 71 | } 72 | 73 | div.related a { 74 | color: #444; 75 | } 76 | 77 | /* -- body styles ----------------------------------------------------------- */ 78 | 79 | a { 80 | color: #004B6B; 81 | text-decoration: underline; 82 | } 83 | 84 | a:hover { 85 | color: #6D4100; 86 | text-decoration: underline; 87 | } 88 | 89 | div.body { 90 | padding-bottom: 40px; /* saved for footer */ 91 | } 92 | 93 | div.body h1, 94 | div.body h2, 95 | div.body h3, 96 | div.body h4, 97 | div.body h5, 98 | div.body h6 { 99 | font-family: 'Garamond', 'Georgia', serif; 100 | font-weight: normal; 101 | margin: 30px 0px 10px 0px; 102 | padding: 0; 103 | } 104 | 105 | {% if theme_index_logo %} 106 | div.indexwrapper h1 { 107 | text-indent: -999999px; 108 | background: url({{ theme_index_logo }}) no-repeat center center; 109 | height: {{ theme_index_logo_height }}; 110 | } 111 | {% endif %} 112 | 113 | div.body h2 { font-size: 180%; } 114 | div.body h3 { font-size: 150%; } 115 | div.body h4 { font-size: 130%; } 116 | div.body h5 { font-size: 100%; } 117 | div.body h6 { font-size: 100%; } 118 | 119 | a.headerlink { 120 | color: white; 121 | padding: 0 4px; 122 | text-decoration: none; 123 | } 124 | 125 | a.headerlink:hover { 126 | color: #444; 127 | background: #eaeaea; 128 | } 129 | 130 | div.body p, div.body dd, div.body li { 131 | line-height: 1.4em; 132 | } 133 | 134 | div.admonition { 135 | background: #fafafa; 136 | margin: 20px -30px; 137 | padding: 10px 30px; 138 | border-top: 1px solid #ccc; 139 | border-bottom: 1px solid #ccc; 140 | } 141 | 142 | div.admonition p.admonition-title { 143 | font-family: 'Garamond', 'Georgia', serif; 144 | font-weight: normal; 145 | font-size: 24px; 146 | margin: 0 0 10px 0; 147 | padding: 0; 148 | line-height: 1; 149 | } 150 | 151 | div.admonition p.last { 152 | margin-bottom: 0; 153 | } 154 | 155 | div.highlight{ 156 | background-color: white; 157 | } 158 | 159 | dt:target, .highlight { 160 | background: #FAF3E8; 161 | } 162 | 163 | div.note { 164 | background-color: #eee; 165 | border: 1px solid #ccc; 166 | } 167 | 168 | div.seealso { 169 | background-color: #ffc; 170 | border: 1px solid #ff6; 171 | } 172 | 173 | div.topic { 174 | background-color: #eee; 175 | } 176 | 177 | div.warning { 178 | background-color: #ffe4e4; 179 | border: 1px solid #f66; 180 | } 181 | 182 | p.admonition-title { 183 | display: inline; 184 | } 185 | 186 | p.admonition-title:after { 187 | content: ":"; 188 | } 189 | 190 | pre, tt { 191 | font-family: 'Consolas', 'Menlo', 'Deja Vu Sans Mono', 'Bitstream Vera Sans Mono', monospace; 192 | font-size: 0.85em; 193 | } 194 | 195 | img.screenshot { 196 | } 197 | 198 | tt.descname, tt.descclassname { 199 | font-size: 0.95em; 200 | } 201 | 202 | tt.descname { 203 | padding-right: 0.08em; 204 | } 205 | 206 | img.screenshot { 207 | -moz-box-shadow: 2px 2px 4px #eee; 208 | -webkit-box-shadow: 2px 2px 4px #eee; 209 | box-shadow: 2px 2px 4px #eee; 210 | } 211 | 212 | table.docutils { 213 | border: 1px solid #888; 214 | -moz-box-shadow: 2px 2px 4px #eee; 215 | -webkit-box-shadow: 2px 2px 4px #eee; 216 | box-shadow: 2px 2px 4px #eee; 217 | } 218 | 219 | table.docutils td, table.docutils th { 220 | border: 1px solid #888; 221 | padding: 0.25em 0.7em; 222 | } 223 | 224 | table.field-list, table.footnote { 225 | border: none; 226 | -moz-box-shadow: none; 227 | -webkit-box-shadow: none; 228 | box-shadow: none; 229 | } 230 | 231 | table.footnote { 232 | margin: 15px 0; 233 | width: 100%; 234 | border: 1px solid #eee; 235 | } 236 | 237 | table.field-list th { 238 | padding: 0 0.8em 0 0; 239 | } 240 | 241 | table.field-list td { 242 | padding: 0; 243 | } 244 | 245 | table.footnote td { 246 | padding: 0.5em; 247 | } 248 | 249 | dl { 250 | margin: 0; 251 | padding: 0; 252 | } 253 | 254 | dl dd { 255 | margin-left: 30px; 256 | } 257 | 258 | pre { 259 | padding: 0; 260 | margin: 15px -30px; 261 | padding: 8px; 262 | line-height: 1.3em; 263 | padding: 7px 30px; 264 | background: #eee; 265 | border-radius: 2px; 266 | -moz-border-radius: 2px; 267 | -webkit-border-radius: 2px; 268 | } 269 | 270 | dl pre { 271 | margin-left: -60px; 272 | padding-left: 60px; 273 | } 274 | 275 | tt { 276 | background-color: #ecf0f3; 277 | color: #222; 278 | /* padding: 1px 2px; */ 279 | } 280 | 281 | tt.xref, a tt { 282 | background-color: #FBFBFB; 283 | } 284 | 285 | a:hover tt { 286 | background: #EEE; 287 | } 288 | -------------------------------------------------------------------------------- /docs/_themes/flask_small/theme.conf: -------------------------------------------------------------------------------- 1 | [theme] 2 | inherit = basic 3 | stylesheet = flasky.css 4 | nosidebar = true 5 | pygments_style = flask_theme_support.FlaskyStyle 6 | 7 | [options] 8 | index_logo = '' 9 | index_logo_height = 120px 10 | github_fork = '' 11 | -------------------------------------------------------------------------------- /docs/_themes/flask_theme_support.py: -------------------------------------------------------------------------------- 1 | # flasky extensions. flasky pygments style based on tango style 2 | from pygments.style import Style 3 | from pygments.token import Keyword, Name, Comment, String, Error, \ 4 | Number, Operator, Generic, Whitespace, Punctuation, Other, Literal 5 | 6 | 7 | class FlaskyStyle(Style): 8 | background_color = "#f8f8f8" 9 | default_style = "" 10 | 11 | styles = { 12 | # No corresponding class for the following: 13 | #Text: "", # class: '' 14 | Whitespace: "underline #f8f8f8", # class: 'w' 15 | Error: "#a40000 border:#ef2929", # class: 'err' 16 | Other: "#000000", # class 'x' 17 | 18 | Comment: "italic #8f5902", # class: 'c' 19 | Comment.Preproc: "noitalic", # class: 'cp' 20 | 21 | Keyword: "bold #004461", # class: 'k' 22 | Keyword.Constant: "bold #004461", # class: 'kc' 23 | Keyword.Declaration: "bold #004461", # class: 'kd' 24 | Keyword.Namespace: "bold #004461", # class: 'kn' 25 | Keyword.Pseudo: "bold #004461", # class: 'kp' 26 | Keyword.Reserved: "bold #004461", # class: 'kr' 27 | Keyword.Type: "bold #004461", # class: 'kt' 28 | 29 | Operator: "#582800", # class: 'o' 30 | Operator.Word: "bold #004461", # class: 'ow' - like keywords 31 | 32 | Punctuation: "bold #000000", # class: 'p' 33 | 34 | # because special names such as Name.Class, Name.Function, etc. 35 | # are not recognized as such later in the parsing, we choose them 36 | # to look the same as ordinary variables. 37 | Name: "#000000", # class: 'n' 38 | Name.Attribute: "#c4a000", # class: 'na' - to be revised 39 | Name.Builtin: "#004461", # class: 'nb' 40 | Name.Builtin.Pseudo: "#3465a4", # class: 'bp' 41 | Name.Class: "#000000", # class: 'nc' - to be revised 42 | Name.Constant: "#000000", # class: 'no' - to be revised 43 | Name.Decorator: "#888", # class: 'nd' - to be revised 44 | Name.Entity: "#ce5c00", # class: 'ni' 45 | Name.Exception: "bold #cc0000", # class: 'ne' 46 | Name.Function: "#000000", # class: 'nf' 47 | Name.Property: "#000000", # class: 'py' 48 | Name.Label: "#f57900", # class: 'nl' 49 | Name.Namespace: "#000000", # class: 'nn' - to be revised 50 | Name.Other: "#000000", # class: 'nx' 51 | Name.Tag: "bold #004461", # class: 'nt' - like a keyword 52 | Name.Variable: "#000000", # class: 'nv' - to be revised 53 | Name.Variable.Class: "#000000", # class: 'vc' - to be revised 54 | Name.Variable.Global: "#000000", # class: 'vg' - to be revised 55 | Name.Variable.Instance: "#000000", # class: 'vi' - to be revised 56 | 57 | Number: "#990000", # class: 'm' 58 | 59 | Literal: "#000000", # class: 'l' 60 | Literal.Date: "#000000", # class: 'ld' 61 | 62 | String: "#4e9a06", # class: 's' 63 | String.Backtick: "#4e9a06", # class: 'sb' 64 | String.Char: "#4e9a06", # class: 'sc' 65 | String.Doc: "italic #8f5902", # class: 'sd' - like a comment 66 | String.Double: "#4e9a06", # class: 's2' 67 | String.Escape: "#4e9a06", # class: 'se' 68 | String.Heredoc: "#4e9a06", # class: 'sh' 69 | String.Interpol: "#4e9a06", # class: 'si' 70 | String.Other: "#4e9a06", # class: 'sx' 71 | String.Regex: "#4e9a06", # class: 'sr' 72 | String.Single: "#4e9a06", # class: 's1' 73 | String.Symbol: "#4e9a06", # class: 'ss' 74 | 75 | Generic: "#000000", # class: 'g' 76 | Generic.Deleted: "#a40000", # class: 'gd' 77 | Generic.Emph: "italic #000000", # class: 'ge' 78 | Generic.Error: "#ef2929", # class: 'gr' 79 | Generic.Heading: "bold #000080", # class: 'gh' 80 | Generic.Inserted: "#00A000", # class: 'gi' 81 | Generic.Output: "#888", # class: 'go' 82 | Generic.Prompt: "#745334", # class: 'gp' 83 | Generic.Strong: "bold #000000", # class: 'gs' 84 | Generic.Subheading: "bold #800080", # class: 'gu' 85 | Generic.Traceback: "bold #a40000", # class: 'gt' 86 | } 87 | -------------------------------------------------------------------------------- /docs/api.rst: -------------------------------------------------------------------------------- 1 | .. _api: 2 | 3 | API 4 | === 5 | 6 | .. module:: rc 7 | 8 | This page covers all interfaces of RC. 9 | 10 | 11 | Base Cache System API 12 | --------------------- 13 | 14 | .. currentmodule:: rc.cache 15 | 16 | .. autoclass:: BaseCache 17 | :members: 18 | 19 | 20 | Cache Object 21 | ------------ 22 | 23 | .. currentmodule:: rc 24 | 25 | .. autoclass:: Cache 26 | :members: 27 | 28 | 29 | Cache Cluster Object 30 | -------------------- 31 | 32 | .. autoclass:: CacheCluster 33 | :members: 34 | 35 | 36 | Serializer 37 | ---------- 38 | 39 | .. autoclass:: BaseSerializer 40 | :members: 41 | :inherited-members: 42 | 43 | .. autoclass:: JSONSerializer 44 | :members: 45 | :inherited-members: 46 | 47 | .. autoclass:: PickleSerializer 48 | :members: 49 | :inherited-members: 50 | 51 | 52 | Redis Router 53 | ------------ 54 | 55 | The base router class provides a simple way to replace the router cls that 56 | cache cluster is using. 57 | 58 | .. autoclass:: BaseRedisRouter 59 | :members: 60 | :inherited-members: 61 | 62 | .. autoclass:: RedisCRC32HashRouter 63 | :members: 64 | :inherited-members: 65 | 66 | .. autoclass:: RedisConsistentHashRouter 67 | :members: 68 | :inherited-members: 69 | 70 | 71 | Testing Objects 72 | --------------- 73 | 74 | .. autoclass:: NullCache 75 | :members: 76 | 77 | .. autoclass:: FakeRedisCache 78 | :members: 79 | 80 | 81 | Cluster Host Config 82 | ------------------- 83 | 84 | .. currentmodule:: rc.redis_cluster 85 | 86 | .. autoclass:: HostConfig 87 | :members: 88 | 89 | 90 | Promise Object 91 | -------------- 92 | 93 | .. currentmodule:: rc.promise 94 | 95 | .. autoclass:: Promise 96 | :members: 97 | -------------------------------------------------------------------------------- /docs/cache.rst: -------------------------------------------------------------------------------- 1 | .. _cache: 2 | 3 | Cache 4 | ===== 5 | 6 | This page gives you some details on caching. 7 | 8 | 9 | Create Cache Object 10 | ------------------- 11 | 12 | Keep one cache instance around so we can do caching easily. There are two 13 | types of cache in RC, if you are building a small project and one redis server 14 | is enough to hold your cache, go with :class:`~rc.Cache`, if you are working 15 | on a website that is accessed millions of times per day, 16 | :class:`~rc.CacheCluster` is the ideal solution. 17 | 18 | 19 | Cache Global Namespace 20 | ---------------------- 21 | 22 | Namespace is a global thing with one cache object. All cache object can have 23 | a namespace, by default, it is not set. The idea is simple, namespace is just 24 | one prefix that will be added to all keys set through this cache object. 25 | You can use this to distinguist usage A from usage B if you are sharing 26 | redis server resources on them. There is a parameter that is used to set 27 | this up, a simple demo:: 28 | 29 | from rc import Cache 30 | 31 | models_cache = Cache(namespace='models') 32 | templates_cache = Cache(namespace='templates') 33 | 34 | 35 | Cache Function Result 36 | --------------------- 37 | 38 | There is one useful decorator api used to cache result for a function, 39 | check out :meth:`~rc.cache.BaseCache.cache`. Here is a simple example:: 40 | 41 | from rc import Cache 42 | 43 | cache = Cache() 44 | 45 | @cache.cache() 46 | def load(name, offset): 47 | return load_from_database(name, offset) 48 | 49 | rv = load('name', 10) 50 | 51 | If you have two functions with same name inside one module, use `key_prefix` 52 | to distinguish them:: 53 | 54 | class Data(object): 55 | @cache.cache(key_prefix='another') 56 | def load(self, name, offset): 57 | return load_from_another_place(name, offset) 58 | 59 | 60 | Cache Expiration Time 61 | --------------------- 62 | 63 | The cache expires automatically in time seconds, there is one `default_expire` 64 | that is used for all set on a cache object:: 65 | 66 | cache = Cache(default_expire=24 * 3600) # one day 67 | 68 | Of course you can change it on every cache set:: 69 | 70 | cache.set('key', 'value', expire=3600) # one hour 71 | 72 | 73 | Cache Invalidation 74 | ------------------ 75 | 76 | For a cache key that is set manually by you, simplily delete it:: 77 | 78 | cache.delete('key') 79 | 80 | In order to invalidate a cached function with certain arguments:: 81 | 82 | @cache.cache() 83 | def load(name, offset): 84 | return load_from_database(name, offset) 85 | 86 | rv = load('name', offset=10) 87 | 88 | # always invalidate using the same positional and keyword parameters 89 | # as you call the function 90 | cache.invalidate(load, 'name', offset=10) 91 | 92 | What if you want to expire all results of this function with any parameters, 93 | since we have a `key_prefix`, just change it to a different value, like from 94 | ``'version01'`` to ``'version02'``, the data of old version wouldn't be 95 | deleted immediately, however, they are going to be pushed out after 96 | expiration time. 97 | 98 | 99 | Cache Batch Fetching 100 | -------------------- 101 | 102 | For a simple key usage, you just need :meth:`~rc.cache.BaseCache.get_many`, 103 | here is one simple example:: 104 | 105 | assert cache.get_many('key', 'foo') == ['value', None] 106 | 107 | For cache decorated function, you need :meth:`~rc.cache.BaseCache.batch_mode`, 108 | check the api for more details. Basically we record all functions you want 109 | to execute and return a :class:`~rc.promise.Promise` object. When you leaves 110 | the batch context manager, the promise is resolved and the result value is 111 | there for you. 112 | 113 | 114 | Bypass Values 115 | ------------- 116 | 117 | .. versionadded:: 0.3 118 | 119 | When you are using the cache decorator, sometimes you don't want to cache 120 | certain return value of decorated functions, you can bypass them:: 121 | 122 | cache = Cache(bypass_values=[None]) 123 | 124 | @cache.cache() 125 | def load(): 126 | # this won't be cached 127 | return None 128 | -------------------------------------------------------------------------------- /docs/cache_cluster_config.rst: -------------------------------------------------------------------------------- 1 | .. _cache_cluster_config: 2 | 3 | Cache Cluster Config 4 | ==================== 5 | 6 | This page gives you introductions on how to create a :class:`~rc.CacheCluster` 7 | instance. 8 | 9 | 10 | Basic Config 11 | ------------ 12 | 13 | Simple demo:: 14 | 15 | from rc import CacheCluster 16 | 17 | cache = CacheCluster({ 18 | 0: {'host': 'redis-host-01'}, 19 | 1: {'host': 'redis-host-02', 'port': 6479}, 20 | 2: {'unix_socket_path': '/tmp/redis.sock'}, 21 | 3: {'host': 'redis-host-03', 'db': 1}, 22 | }) 23 | 24 | Basically `hosts` is just one dictionary that maps host name to parameters 25 | which are taken by :class:`~rc.redis_cluster.HostConfig`, excluding 26 | `host_name`. 27 | 28 | Just like `Cache`, you can specify your own customized `serializer_cls`, 29 | you can change default expiration time to any length you want, you can set 30 | a namespace for this cache instance, for more details, check out 31 | :class:`~rc.CacheCluster`. 32 | 33 | 34 | Redis Connection Pool Config 35 | ---------------------------- 36 | 37 | By default we use :class:`~redis.ConnectionPool`, specify your own connection 38 | pool class and options using parameters called ``pool_cls`` and 39 | ``pool_options``. 40 | 41 | 42 | Redis Router Config 43 | ------------------- 44 | 45 | By default we use :class:`~rc.RedisCRC32HashRouter`, specify your own router 46 | class and options using parameters called ``router_cls`` and 47 | ``router_options``. For more routers, check out :ref:`redis_cluster_router`. 48 | 49 | 50 | Concurrency Config 51 | ------------------ 52 | 53 | For operations on multiple keys like `get_many`, `set_many` and `delete_many`, 54 | we execute them in parallel. Under the hood, we does the parallel query using 55 | a select loop(select/poll/kqueue/epoll). Mostly, if we are using several 56 | remote redis servers, we achieve higher performance. You can specify 57 | ``max_concurrency`` and ``poller_timeout`` to control maximum concurrency and 58 | timeout for poller. 59 | -------------------------------------------------------------------------------- /docs/cache_config.rst: -------------------------------------------------------------------------------- 1 | .. _cache_config: 2 | 3 | Cache Config 4 | ============ 5 | 6 | This page gives you introductions on creating a :class:`~rc.Cache` instance. 7 | 8 | 9 | Basic Config 10 | ------------ 11 | 12 | Cache takes parameters for basic redis server setup and cache setup. Here is 13 | one simple demo:: 14 | 15 | from rc import Cache 16 | 17 | cache = Cache('redishost01', 6379, db=0, password='pass', 18 | socket_timeout=5) 19 | 20 | There are other parameters you can config, you can specify your own customized 21 | serializer_cls, you can change default expiration time to any length you want, 22 | you can set a namespace for this cache instance, for more details, check out 23 | :class:`~rc.Cache`. 24 | 25 | 26 | Redis Options 27 | ------------- 28 | 29 | There is one parameter called ``redis_options``, you can use this to set 30 | other parameters to the underlying :class:`redis.StrictRedis`. Here is a 31 | simple example:: 32 | 33 | from rc import Cache 34 | 35 | cache = Cache(redis_options={'unix_socket_path': '/tmp/redis.sock'}) 36 | -------------------------------------------------------------------------------- /docs/changelog.rst: -------------------------------------------------------------------------------- 1 | .. _changes: 2 | 3 | .. include:: ../CHANGES 4 | -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # rc documentation build configuration file, created by 4 | # sphinx-quickstart on Wed Nov 11 23:47:41 2015. 5 | # 6 | # This file is execfile()d with the current directory set to its 7 | # containing dir. 8 | # 9 | # Note that not all possible configuration values are present in this 10 | # autogenerated file. 11 | # 12 | # All configuration values have a default; values that are commented out 13 | # serve to show the default. 14 | 15 | import sys 16 | import os 17 | 18 | # If extensions (or modules to document with autodoc) are in another directory, 19 | # add these directories to sys.path here. If the directory is relative to the 20 | # documentation root, use os.path.abspath to make it absolute, like shown here. 21 | sys.path.insert(0, os.path.abspath('_themes')) 22 | 23 | # -- General configuration ------------------------------------------------ 24 | 25 | # If your documentation needs a minimal Sphinx version, state it here. 26 | #needs_sphinx = '1.0' 27 | 28 | # Add any Sphinx extension module names here, as strings. They can be 29 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 30 | # ones. 31 | extensions = [ 32 | 'sphinx.ext.autodoc', 33 | 'sphinx.ext.intersphinx', 34 | ] 35 | 36 | # Add any paths that contain templates here, relative to this directory. 37 | templates_path = ['_templates'] 38 | 39 | # The suffix of source filenames. 40 | source_suffix = '.rst' 41 | 42 | # The encoding of source files. 43 | #source_encoding = 'utf-8-sig' 44 | 45 | # The master toctree document. 46 | master_doc = 'index' 47 | 48 | # General information about the project. 49 | project = u'rc' 50 | copyright = u'2016, Shipeng Feng' 51 | 52 | # The version info for the project you're documenting, acts as replacement for 53 | # |version| and |release|, also used in various other places throughout the 54 | # built documents. 55 | # 56 | import pkg_resources 57 | try: 58 | release = pkg_resources.get_distribution('rc').version 59 | except pkg_resources.DistributionNotFound: 60 | print('To build the documentation, the distribution information of rc') 61 | print('Has to be available. Run "setup.py develop" to setup the') 62 | print('metadata. A virtualenv is recommended!') 63 | sys.exit(1) 64 | del pkg_resources 65 | 66 | version = '.'.join(release.split('.')[:2]) 67 | 68 | # The language for content autogenerated by Sphinx. Refer to documentation 69 | # for a list of supported languages. 70 | #language = None 71 | 72 | # There are two options for replacing |today|: either, you set today to some 73 | # non-false value, then it is used: 74 | #today = '' 75 | # Else, today_fmt is used as the format for a strftime call. 76 | #today_fmt = '%B %d, %Y' 77 | 78 | # List of patterns, relative to source directory, that match files and 79 | # directories to ignore when looking for source files. 80 | exclude_patterns = ['_build'] 81 | 82 | # The reST default role (used for this markup: `text`) to use for all 83 | # documents. 84 | #default_role = None 85 | 86 | # If true, '()' will be appended to :func: etc. cross-reference text. 87 | #add_function_parentheses = True 88 | 89 | # If true, the current module name will be prepended to all description 90 | # unit titles (such as .. function::). 91 | #add_module_names = True 92 | 93 | # If true, sectionauthor and moduleauthor directives will be shown in the 94 | # output. They are ignored by default. 95 | #show_authors = False 96 | 97 | # The name of the Pygments (syntax highlighting) style to use. 98 | #pygments_style = 'sphinx' 99 | 100 | # A list of ignored prefixes for module index sorting. 101 | #modindex_common_prefix = [] 102 | 103 | # If true, keep warnings as "system message" paragraphs in the built documents. 104 | #keep_warnings = False 105 | 106 | 107 | # -- Options for HTML output ---------------------------------------------- 108 | 109 | # The theme to use for HTML and HTML Help pages. See the documentation for 110 | # a list of builtin themes. 111 | html_theme = 'flask' 112 | 113 | # Theme options are theme-specific and customize the look and feel of a theme 114 | # further. For a list of options available for each theme, see the 115 | # documentation. 116 | html_theme_options = { 117 | 'index_logo': '' 118 | } 119 | 120 | # Add any paths that contain custom themes here, relative to this directory. 121 | html_theme_path = ['_themes'] 122 | 123 | # The name for this set of Sphinx documents. If None, it defaults to 124 | # " v documentation". 125 | #html_title = None 126 | 127 | # A shorter title for the navigation bar. Default is the same as html_title. 128 | #html_short_title = None 129 | 130 | # The name of an image file (relative to this directory) to place at the top 131 | # of the sidebar. 132 | #html_logo = None 133 | 134 | # The name of an image file (within the static path) to use as favicon of the 135 | # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 136 | # pixels large. 137 | #html_favicon = None 138 | 139 | # Add any paths that contain custom static files (such as style sheets) here, 140 | # relative to this directory. They are copied after the builtin static files, 141 | # so a file named "default.css" will overwrite the builtin "default.css". 142 | html_static_path = ['_static'] 143 | 144 | # Add any extra paths that contain custom files (such as robots.txt or 145 | # .htaccess) here, relative to this directory. These files are copied 146 | # directly to the root of the documentation. 147 | #html_extra_path = [] 148 | 149 | # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, 150 | # using the given strftime format. 151 | #html_last_updated_fmt = '%b %d, %Y' 152 | 153 | # If true, SmartyPants will be used to convert quotes and dashes to 154 | # typographically correct entities. 155 | #html_use_smartypants = True 156 | 157 | # Custom sidebar templates, maps document names to template names. 158 | #html_sidebars = {} 159 | 160 | # Additional templates that should be rendered to pages, maps page names to 161 | # template names. 162 | #html_additional_pages = {} 163 | 164 | # If false, no module index is generated. 165 | #html_domain_indices = True 166 | 167 | # If false, no index is generated. 168 | #html_use_index = True 169 | 170 | # If true, the index is split into individual pages for each letter. 171 | #html_split_index = False 172 | 173 | # If true, links to the reST sources are added to the pages. 174 | #html_show_sourcelink = True 175 | 176 | # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. 177 | #html_show_sphinx = True 178 | 179 | # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. 180 | #html_show_copyright = True 181 | 182 | # If true, an OpenSearch description file will be output, and all pages will 183 | # contain a tag referring to it. The value of this option must be the 184 | # base URL from which the finished HTML is served. 185 | #html_use_opensearch = '' 186 | 187 | # This is the file name suffix for HTML files (e.g. ".xhtml"). 188 | #html_file_suffix = None 189 | 190 | # Output file base name for HTML help builder. 191 | htmlhelp_basename = 'rcdoc' 192 | 193 | 194 | # -- Options for LaTeX output --------------------------------------------- 195 | 196 | latex_elements = { 197 | # The paper size ('letterpaper' or 'a4paper'). 198 | #'papersize': 'letterpaper', 199 | 200 | # The font size ('10pt', '11pt' or '12pt'). 201 | #'pointsize': '10pt', 202 | 203 | # Additional stuff for the LaTeX preamble. 204 | #'preamble': '', 205 | } 206 | 207 | # Grouping the document tree into LaTeX files. List of tuples 208 | # (source start file, target name, title, 209 | # author, documentclass [howto, manual, or own class]). 210 | latex_documents = [ 211 | ('index', 'rc.tex', u'rc Documentation', 212 | u'Shipeng Feng', 'manual'), 213 | ] 214 | 215 | # The name of an image file (relative to this directory) to place at the top of 216 | # the title page. 217 | #latex_logo = None 218 | 219 | # For "manual" documents, if this is true, then toplevel headings are parts, 220 | # not chapters. 221 | #latex_use_parts = False 222 | 223 | # If true, show page references after internal links. 224 | #latex_show_pagerefs = False 225 | 226 | # If true, show URL addresses after external links. 227 | #latex_show_urls = False 228 | 229 | # Documents to append as an appendix to all manuals. 230 | #latex_appendices = [] 231 | 232 | # If false, no module index is generated. 233 | #latex_domain_indices = True 234 | 235 | 236 | # -- Options for manual page output --------------------------------------- 237 | 238 | # One entry per manual page. List of tuples 239 | # (source start file, name, description, authors, manual section). 240 | man_pages = [ 241 | ('index', 'rc', u'rc Documentation', 242 | [u'Shipeng Feng'], 1) 243 | ] 244 | 245 | # If true, show URL addresses after external links. 246 | #man_show_urls = False 247 | 248 | 249 | # -- Options for Texinfo output ------------------------------------------- 250 | 251 | # Grouping the document tree into Texinfo files. List of tuples 252 | # (source start file, target name, title, author, 253 | # dir menu entry, description, category) 254 | texinfo_documents = [ 255 | ('index', 'rc', u'rc Documentation', 256 | u'Shipeng Feng', 'rc', 'One line description of project.', 257 | 'Miscellaneous'), 258 | ] 259 | 260 | # Documents to append as an appendix to all manuals. 261 | #texinfo_appendices = [] 262 | 263 | # If false, no module index is generated. 264 | #texinfo_domain_indices = True 265 | 266 | # How to display URL addresses: 'footnote', 'no', or 'inline'. 267 | #texinfo_show_urls = 'footnote' 268 | 269 | # If true, do not generate a @detailmenu in the "Top" node's menu. 270 | #texinfo_no_detailmenu = False 271 | 272 | 273 | # Example configuration for intersphinx: refer to the Python standard library. 274 | intersphinx_mapping = { 275 | 'http://docs.python.org/': None, 276 | 'https://redis-py.readthedocs.org/en/latest/': None, 277 | } 278 | -------------------------------------------------------------------------------- /docs/foreword.rst: -------------------------------------------------------------------------------- 1 | .. _foreword: 2 | 3 | Foreword 4 | ======== 5 | 6 | Read this before you get started if you are not familiar with cache. 7 | 8 | 9 | Why 10 | --- 11 | 12 | Speed. The main problem with many applications is, they're slow. Each time 13 | we get the result, a lot of code are executed. And cache is the easiest way 14 | to speed up things. If you are serious about performance, use more caching. 15 | 16 | 17 | How Caching Works 18 | ----------------- 19 | 20 | What does a cache do? Imagine we have a function that takes some time to 21 | complete, the idea is that we put the result of that expensive operation into 22 | a cache for some time. 23 | 24 | Basically we have a cache object that is connected to a remote server or file 25 | system or memory. When the request comes in, you check if the result is 26 | in the cache, if so, you return it from the cache. Otherwise you execute the 27 | calculation and put it in the cache. 28 | 29 | Here is a simple example:: 30 | 31 | def get_result(): 32 | rv = cache.get('mykey') 33 | if rv is None: 34 | rv = calculate_result() 35 | cache.set('mykey', rv) 36 | return rv 37 | -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | rc: the redis cache 2 | =================== 3 | 4 | .. image:: _static/rc.png 5 | :alt: RC: the redis cache 6 | :class: floatingflask 7 | 8 | Welcome to rc's documentation. Caching can be fun and easy. This is one 9 | library that implements cache system for redis in Python. It is for use 10 | with web applications and Python scripts. It comes with really handy apis 11 | for easy drop-in use with common tasks, including caching decorators. It 12 | can be used to build a cache cluster which has a routing system that allows 13 | you to automatically set cache on different servers, sharding can be really 14 | easy now. You can use it to batch fetch multiple cache results back, for 15 | a cluster, it even does the job in parallel, we fetch results from all servers 16 | concurrently, which means much higher performance. 17 | 18 | It uses the `redis`_ server, which is a in-memory key-value data structure 19 | server. It does not implement any other backends like filesystem and does not 20 | intend to do so. Mostly we want to use a key-value server like redis, if you 21 | have special needs, it is easy to write one cache decorator that stores 22 | everything in memory using a dict or you can check out other libraries. 23 | 24 | 25 | .. _redis: http://redis.io/ 26 | 27 | 28 | User's Guide 29 | ------------ 30 | 31 | This part of the documentation begins with installation, followed by more 32 | instructions for doing cache with rc. 33 | 34 | .. toctree:: 35 | :maxdepth: 2 36 | 37 | foreword 38 | installation 39 | quickstart 40 | tutorial 41 | cache 42 | cache_config 43 | cache_cluster_config 44 | serializer 45 | redis_cluster_router 46 | testing 47 | patterns/index 48 | 49 | 50 | API Reference 51 | ------------- 52 | 53 | This part of the documentation contains information on a specific function, 54 | class or method. 55 | 56 | .. toctree:: 57 | :maxdepth: 2 58 | 59 | api 60 | 61 | 62 | Additional Stuff 63 | ---------------- 64 | 65 | Changelog and license here if you are interested. 66 | 67 | .. toctree:: 68 | :maxdepth: 2 69 | 70 | changelog 71 | license 72 | -------------------------------------------------------------------------------- /docs/installation.rst: -------------------------------------------------------------------------------- 1 | .. _installation: 2 | 3 | Installation 4 | ============ 5 | 6 | Want to give rc a try quickly? Let's start by installing it, you need Python 7 | 2.6 or newer. 8 | 9 | 10 | virtualenv 11 | ---------- 12 | 13 | Virtualenv might be something you want to use for development! If you do not 14 | have it yet, try the following command:: 15 | 16 | $ sudo pip install virtualenv 17 | 18 | Since we have virtualenv installed now, let's create one working environment:: 19 | 20 | $ mkdir myproject 21 | $ cd myproject 22 | $ virtualenv venv 23 | $ . venv/bin/activate 24 | 25 | It is time to get rc:: 26 | 27 | $ pip install rc 28 | 29 | Done! 30 | 31 | 32 | System Wide 33 | ----------- 34 | 35 | Install it for all users on the system:: 36 | 37 | $ sudo pip install rc 38 | 39 | 40 | Development Version 41 | ------------------- 42 | 43 | Try the latest version:: 44 | 45 | $ . venv/bin/activate 46 | $ git clone http://github.com/fengsp/rc.git 47 | $ cd rc 48 | $ python setup.py develop 49 | -------------------------------------------------------------------------------- /docs/license.rst: -------------------------------------------------------------------------------- 1 | .. _license: 2 | 3 | License 4 | ======= 5 | 6 | RC is licensed under BSD License. 7 | 8 | .. include:: ../LICENSE 9 | -------------------------------------------------------------------------------- /docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | REM Command file for Sphinx documentation 4 | 5 | if "%SPHINXBUILD%" == "" ( 6 | set SPHINXBUILD=sphinx-build 7 | ) 8 | set BUILDDIR=_build 9 | set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% . 10 | set I18NSPHINXOPTS=%SPHINXOPTS% . 11 | if NOT "%PAPER%" == "" ( 12 | set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% 13 | set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS% 14 | ) 15 | 16 | if "%1" == "" goto help 17 | 18 | if "%1" == "help" ( 19 | :help 20 | echo.Please use `make ^` where ^ is one of 21 | echo. html to make standalone HTML files 22 | echo. dirhtml to make HTML files named index.html in directories 23 | echo. singlehtml to make a single large HTML file 24 | echo. pickle to make pickle files 25 | echo. json to make JSON files 26 | echo. htmlhelp to make HTML files and a HTML help project 27 | echo. qthelp to make HTML files and a qthelp project 28 | echo. devhelp to make HTML files and a Devhelp project 29 | echo. epub to make an epub 30 | echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter 31 | echo. text to make text files 32 | echo. man to make manual pages 33 | echo. texinfo to make Texinfo files 34 | echo. gettext to make PO message catalogs 35 | echo. changes to make an overview over all changed/added/deprecated items 36 | echo. xml to make Docutils-native XML files 37 | echo. pseudoxml to make pseudoxml-XML files for display purposes 38 | echo. linkcheck to check all external links for integrity 39 | echo. doctest to run all doctests embedded in the documentation if enabled 40 | goto end 41 | ) 42 | 43 | if "%1" == "clean" ( 44 | for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i 45 | del /q /s %BUILDDIR%\* 46 | goto end 47 | ) 48 | 49 | 50 | %SPHINXBUILD% 2> nul 51 | if errorlevel 9009 ( 52 | echo. 53 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 54 | echo.installed, then set the SPHINXBUILD environment variable to point 55 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 56 | echo.may add the Sphinx directory to PATH. 57 | echo. 58 | echo.If you don't have Sphinx installed, grab it from 59 | echo.http://sphinx-doc.org/ 60 | exit /b 1 61 | ) 62 | 63 | if "%1" == "html" ( 64 | %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html 65 | if errorlevel 1 exit /b 1 66 | echo. 67 | echo.Build finished. The HTML pages are in %BUILDDIR%/html. 68 | goto end 69 | ) 70 | 71 | if "%1" == "dirhtml" ( 72 | %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml 73 | if errorlevel 1 exit /b 1 74 | echo. 75 | echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. 76 | goto end 77 | ) 78 | 79 | if "%1" == "singlehtml" ( 80 | %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml 81 | if errorlevel 1 exit /b 1 82 | echo. 83 | echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. 84 | goto end 85 | ) 86 | 87 | if "%1" == "pickle" ( 88 | %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle 89 | if errorlevel 1 exit /b 1 90 | echo. 91 | echo.Build finished; now you can process the pickle files. 92 | goto end 93 | ) 94 | 95 | if "%1" == "json" ( 96 | %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json 97 | if errorlevel 1 exit /b 1 98 | echo. 99 | echo.Build finished; now you can process the JSON files. 100 | goto end 101 | ) 102 | 103 | if "%1" == "htmlhelp" ( 104 | %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp 105 | if errorlevel 1 exit /b 1 106 | echo. 107 | echo.Build finished; now you can run HTML Help Workshop with the ^ 108 | .hhp project file in %BUILDDIR%/htmlhelp. 109 | goto end 110 | ) 111 | 112 | if "%1" == "qthelp" ( 113 | %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp 114 | if errorlevel 1 exit /b 1 115 | echo. 116 | echo.Build finished; now you can run "qcollectiongenerator" with the ^ 117 | .qhcp project file in %BUILDDIR%/qthelp, like this: 118 | echo.^> qcollectiongenerator %BUILDDIR%\qthelp\rc.qhcp 119 | echo.To view the help file: 120 | echo.^> assistant -collectionFile %BUILDDIR%\qthelp\rc.ghc 121 | goto end 122 | ) 123 | 124 | if "%1" == "devhelp" ( 125 | %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp 126 | if errorlevel 1 exit /b 1 127 | echo. 128 | echo.Build finished. 129 | goto end 130 | ) 131 | 132 | if "%1" == "epub" ( 133 | %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub 134 | if errorlevel 1 exit /b 1 135 | echo. 136 | echo.Build finished. The epub file is in %BUILDDIR%/epub. 137 | goto end 138 | ) 139 | 140 | if "%1" == "latex" ( 141 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 142 | if errorlevel 1 exit /b 1 143 | echo. 144 | echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. 145 | goto end 146 | ) 147 | 148 | if "%1" == "latexpdf" ( 149 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 150 | cd %BUILDDIR%/latex 151 | make all-pdf 152 | cd %BUILDDIR%/.. 153 | echo. 154 | echo.Build finished; the PDF files are in %BUILDDIR%/latex. 155 | goto end 156 | ) 157 | 158 | if "%1" == "latexpdfja" ( 159 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 160 | cd %BUILDDIR%/latex 161 | make all-pdf-ja 162 | cd %BUILDDIR%/.. 163 | echo. 164 | echo.Build finished; the PDF files are in %BUILDDIR%/latex. 165 | goto end 166 | ) 167 | 168 | if "%1" == "text" ( 169 | %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text 170 | if errorlevel 1 exit /b 1 171 | echo. 172 | echo.Build finished. The text files are in %BUILDDIR%/text. 173 | goto end 174 | ) 175 | 176 | if "%1" == "man" ( 177 | %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man 178 | if errorlevel 1 exit /b 1 179 | echo. 180 | echo.Build finished. The manual pages are in %BUILDDIR%/man. 181 | goto end 182 | ) 183 | 184 | if "%1" == "texinfo" ( 185 | %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo 186 | if errorlevel 1 exit /b 1 187 | echo. 188 | echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo. 189 | goto end 190 | ) 191 | 192 | if "%1" == "gettext" ( 193 | %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale 194 | if errorlevel 1 exit /b 1 195 | echo. 196 | echo.Build finished. The message catalogs are in %BUILDDIR%/locale. 197 | goto end 198 | ) 199 | 200 | if "%1" == "changes" ( 201 | %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes 202 | if errorlevel 1 exit /b 1 203 | echo. 204 | echo.The overview file is in %BUILDDIR%/changes. 205 | goto end 206 | ) 207 | 208 | if "%1" == "linkcheck" ( 209 | %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck 210 | if errorlevel 1 exit /b 1 211 | echo. 212 | echo.Link check complete; look for any errors in the above output ^ 213 | or in %BUILDDIR%/linkcheck/output.txt. 214 | goto end 215 | ) 216 | 217 | if "%1" == "doctest" ( 218 | %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest 219 | if errorlevel 1 exit /b 1 220 | echo. 221 | echo.Testing of doctests in the sources finished, look at the ^ 222 | results in %BUILDDIR%/doctest/output.txt. 223 | goto end 224 | ) 225 | 226 | if "%1" == "xml" ( 227 | %SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml 228 | if errorlevel 1 exit /b 1 229 | echo. 230 | echo.Build finished. The XML files are in %BUILDDIR%/xml. 231 | goto end 232 | ) 233 | 234 | if "%1" == "pseudoxml" ( 235 | %SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml 236 | if errorlevel 1 exit /b 1 237 | echo. 238 | echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml. 239 | goto end 240 | ) 241 | 242 | :end 243 | -------------------------------------------------------------------------------- /docs/patterns/index.rst: -------------------------------------------------------------------------------- 1 | .. _patterns: 2 | 3 | Patterns On Caching 4 | =================== 5 | 6 | This part contains some snippets and patterns for caching. 7 | 8 | .. toctree:: 9 | :maxdepth: 2 10 | 11 | memory_cache 12 | -------------------------------------------------------------------------------- /docs/patterns/memory_cache.rst: -------------------------------------------------------------------------------- 1 | .. _memory_cache: 2 | 3 | Cache In Memory 4 | =============== 5 | 6 | RC doesn't support other backends, because mostly you want to use a cache 7 | server. But if you really need to put some cache in memory, it should be 8 | easy:: 9 | 10 | from functools import wraps 11 | 12 | def cache(func): 13 | saved = {} 14 | @wraps(func) 15 | def newfunc(*args): 16 | if args in saved: 17 | return saved[args] 18 | result = func(*args) 19 | saved[args] = result 20 | return result 21 | return newfunc 22 | 23 | @cache 24 | def lookup(url): 25 | return url 26 | -------------------------------------------------------------------------------- /docs/quickstart.rst: -------------------------------------------------------------------------------- 1 | .. _quickstart: 2 | 3 | Quickstart 4 | ========== 5 | 6 | This page gives a introduction to RC. 7 | 8 | 9 | A Simple Example 10 | ---------------- 11 | 12 | A minimal cache example looks like this:: 13 | 14 | from rc import Cache 15 | 16 | cache = Cache() 17 | assert cache.set('key', 'value') 18 | assert cache.get('key') == 'value' 19 | assert cache.get('foo') is None 20 | assert cache.set('list', [1]) 21 | assert cache.get('list') == [1] 22 | 23 | What we are doing here? 24 | 25 | 1. First we imported the :class:`~rc.Cache` class. An instance of this class 26 | can be used to cache things with a single redis server. 27 | 2. We create one cache instance. 28 | 3. We set and get things based on a key. 29 | 30 | 31 | Build A Cache Cluster 32 | --------------------- 33 | 34 | A cache cluster use a redis cluster as backend. 35 | 36 | :: 37 | 38 | from rc import CacheCluster 39 | 40 | cache = CacheCluster({ 41 | 'cache01': {'host': 'redis-host01'}, 42 | 'cache02': {'host': 'redis-host02'}, 43 | 'cache03': {'host': 'redis-host03'}, 44 | 'cache04': {'host': 'redis-host04', 'db': 1}, 45 | }) 46 | 47 | 48 | Cache Decorator 49 | --------------- 50 | 51 | :: 52 | 53 | @cache.cache() 54 | def load(name, offset): 55 | return load_from_database(name, offset) 56 | 57 | rv = load('name', offset=10) 58 | 59 | 60 | Batch Fetch Multiple Cache Results 61 | ---------------------------------- 62 | 63 | :: 64 | 65 | assert cache.get_many('key', 'foo') == ['value', None] 66 | 67 | # for cache decorated function 68 | @cache.cache() 69 | def cached_func(param): 70 | return param 71 | 72 | results = [] 73 | # with the context manager, the function 74 | # is executed and return a promise 75 | with cache.batch_mode(): 76 | for i in range(10): 77 | results.append(cached_func(i)) 78 | for i, rv in enumerate(results): 79 | assert rv.value == i 80 | 81 | 82 | Cache Invalidation 83 | ------------------ 84 | 85 | :: 86 | 87 | cache.delete('key') 88 | # for decorated function 89 | cache.invalidate(load, 'name', offset=10) 90 | -------------------------------------------------------------------------------- /docs/redis_cluster_router.rst: -------------------------------------------------------------------------------- 1 | .. _redis_cluster_router: 2 | 3 | Redis Cluster Router 4 | ==================== 5 | 6 | This page gives you introductions on redis router for cluster. 7 | 8 | 9 | CRC32Hash Router 10 | ---------------- 11 | 12 | Router that just routes commands to redis node based on ``crc32 % node_num``. 13 | For more details check out :class:`~rc.RedisCRC32HashRouter`. 14 | 15 | 16 | ConsistentHash Router 17 | --------------------- 18 | 19 | Router that routes to redis based on consistent hashing algorithm. 20 | For more details check out :class:`~rc.RedisConsistentHashRouter`. 21 | 22 | 23 | Build Your Own Router 24 | --------------------- 25 | 26 | Subclass :class:`~rc.BaseRedisRouter`, implement 27 | :meth:`~rc.BaseRedisRouter.get_host_for_key`. 28 | 29 | Here is the builtin CRC32 router:: 30 | 31 | from binascii import crc32 32 | 33 | 34 | class RedisCRC32HashRouter(BaseRedisRouter): 35 | """Use crc32 for hash partitioning.""" 36 | 37 | def __init__(self, hosts): 38 | BaseRedisRouter.__init__(self, hosts) 39 | self._sorted_host_names = sorted(hosts.keys()) 40 | 41 | def get_host_for_key(self, key): 42 | if isinstance(key, unicode): 43 | key = key.encode('utf-8') 44 | else: 45 | key = str(key) 46 | pos = crc32(key) % len(self._sorted_host_names) 47 | return self._sorted_host_names[pos] 48 | -------------------------------------------------------------------------------- /docs/serializer.rst: -------------------------------------------------------------------------------- 1 | .. _serializer: 2 | 3 | Serializer 4 | ========== 5 | 6 | This page we talk about serializers. 7 | 8 | 9 | JSON Serializer 10 | --------------- 11 | 12 | It is simple and fast. The downside is that it cannot serialize enough types 13 | of Python objects. For more details check out :class:`~rc.JSONSerializer`. 14 | 15 | 16 | Pickle Serializer 17 | ----------------- 18 | 19 | More Python types are supported. The downside is that it might be slower than 20 | JSON, unpickling can run arbitrary code, and using `pickle` to transfer data 21 | between programs in different languages is almost impossible, check out 22 | :class:`~rc.PickleSerializer`. 23 | 24 | 25 | Build Your Own Serializer 26 | ------------------------- 27 | 28 | Subclass :class:`~rc.BaseSerializer`, implement 29 | :meth:`~rc.BaseSerializer.dumps` and :meth:`~rc.BaseSerializer.loads`. 30 | 31 | Here is one simple example:: 32 | 33 | import json 34 | 35 | from rc.serializer import BaseSerializer 36 | 37 | class IterEncoder(json.JSONEncoder): 38 | 39 | def default(self, o): 40 | try: 41 | iterable = iter(o) 42 | except TypeError: 43 | pass 44 | else: 45 | return list(iterable) 46 | return json.JSONEncoder.default(self, o) 47 | 48 | class MyJSONSerializer(BaseSerializer): 49 | """One serializer that uses JSON and support arbitrary iterators""" 50 | 51 | def dumps(self, obj): 52 | return json.dumps(obj, cls=IterEncoder) 53 | 54 | def loads(self, string): 55 | if string is None: 56 | return 57 | return json.loads(string) 58 | -------------------------------------------------------------------------------- /docs/testing.rst: -------------------------------------------------------------------------------- 1 | .. _testing: 2 | 3 | Testing 4 | ======= 5 | 6 | Testing applications that use RC. 7 | 8 | 9 | Null Cache 10 | ---------- 11 | 12 | Simple idea, just create one :class:`~rc.NullCache` object that does not 13 | cache at all when you are doing unit test. 14 | 15 | 16 | Fake Redis 17 | ---------- 18 | 19 | Use a fake redis as backend, this is existing for testing purposes only. 20 | It depends on the `fakeredis`_ library, install it first:: 21 | 22 | $ pip install fakeredis 23 | 24 | .. _fakeredis: https://github.com/jamesls/fakeredis 25 | 26 | For more details, check out :class:`~rc.FakeRedisCache`. 27 | -------------------------------------------------------------------------------- /docs/tutorial.rst: -------------------------------------------------------------------------------- 1 | .. _tutorial: 2 | 3 | Tutorial 4 | ======== 5 | 6 | In this tutorial, we will create a simple blog application. You can learn 7 | to cache with rc and Python here. In our blog application, anyone can add or 8 | update a post, view all posts. 9 | 10 | 11 | Create Skeleton 12 | --------------- 13 | 14 | For this simple web application, we choose to use `Flask`_. Here is the basic 15 | skeleton:: 16 | 17 | # -*- coding: utf-8 -*- 18 | import time 19 | 20 | from flask import Flask 21 | from flask import request, url_for, redirect, abort, render_template_string 22 | from flask_sqlalchemy import SQLAlchemy 23 | from rc import Cache 24 | 25 | 26 | app = Flask(__name__) 27 | app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite://' 28 | db = SQLAlchemy(app) 29 | cache = Cache() 30 | 31 | 32 | def init_db(): 33 | db.create_all() 34 | 35 | 36 | if __name__ == '__main__': 37 | init_db() 38 | app.run() 39 | 40 | .. _Flask: http://flask.pocoo.org/ 41 | 42 | 43 | Create Models 44 | ------------- 45 | 46 | Let's declare the models and create the database schema here:: 47 | 48 | class Post(db.Model): 49 | id = db.Column(db.Integer, primary_key=True) 50 | title = db.Column(db.String(100), nullable=False) 51 | content = db.Column(db.Text, nullable=False) 52 | created_ts = db.Column(db.Integer, nullable=False) 53 | updated_ts = db.Column(db.Integer, nullable=False) 54 | 55 | def __init__(self, title, content, created_ts, updated_ts): 56 | self.title = title 57 | self.content = content 58 | self.created_ts = created_ts 59 | self.updated_ts = updated_ts 60 | 61 | def __repr__(self): 62 | return '' % self.id 63 | 64 | @staticmethod 65 | def add(title, content): 66 | current_ts = int(time.time()) 67 | post = Post(title, content, current_ts, current_ts) 68 | db.session.add(post) 69 | db.session.commit() 70 | cache.invalidate(Post.get_by_id, post.id) 71 | cache.invalidate(Post.get_all_ids) 72 | 73 | @staticmethod 74 | def update(post_id, title, content): 75 | post = Post.query.get(post_id) 76 | post.title = title 77 | post.content = content 78 | post.updated_ts = int(time.time()) 79 | db.session.commit() 80 | cache.invalidate(Post.get_by_id, post.id) 81 | 82 | @staticmethod 83 | @cache.cache() 84 | def get_all_ids(): 85 | posts = Post.query.all() 86 | return [post.id for post in posts] 87 | 88 | @staticmethod 89 | @cache.cache() 90 | def get_by_id(post_id): 91 | post = Post.query.get(post_id) 92 | return dict(id=post.id, title=post.title, content=post.content, 93 | created_ts=post.created_ts, updated_ts=post.updated_ts) 94 | 95 | 96 | View Functions 97 | -------------- 98 | 99 | We will have four view functions here, they are used to add or update or view 100 | a single post, view all posts. The code explains itself:: 101 | 102 | @app.route('/add', methods=['POST']) 103 | def add_post(): 104 | title = request.form['title'] 105 | content = request.form['content'] 106 | Post.add(title, content) 107 | return redirect(url_for('show_all_posts')) 108 | 109 | 110 | @app.route('/post/') 111 | def show_post(post_id): 112 | post = Post.get_by_id(post_id) 113 | if post is None: 114 | abort(404) 115 | return render_template_string(SHOW_POST_TEMPLATE, post=post) 116 | 117 | 118 | @app.route('/post/', methods=['POST']) 119 | def edit_post(post_id): 120 | post = Post.get_by_id(post_id) 121 | if post is None: 122 | abort(404) 123 | title = request.form['title'] 124 | content = request.form['content'] 125 | Post.update(post_id, title, content) 126 | return redirect(url_for('show_all_posts')) 127 | 128 | 129 | @app.route('/') 130 | def show_all_posts(): 131 | all_post_ids = Post.get_all_ids() 132 | all_posts = [] 133 | with cache.batch_mode(): 134 | for post_id in all_post_ids: 135 | all_posts.append(Post.get_by_id(post_id)) 136 | all_posts = [p.value for p in all_posts] 137 | return render_template_string(ALL_POSTS_TEMPLATE, all_posts=all_posts) 138 | 139 | 140 | Add The Templates 141 | ----------------- 142 | 143 | The template for showing all posts is here. 144 | 145 | .. sourcecode:: html+jinja 146 | 147 | 148 | 149 | 150 | Blog 151 | 152 | 153 |

Blog

154 |
155 |
156 |
Title: 157 |
158 |
Content: 159 |
160 |
161 |
162 |
163 | 172 | 173 | 174 | 175 | The template for showing one post is here. 176 | 177 | .. sourcecode:: html+jinja 178 | 179 | 180 | 181 | 182 | {{ post.title }} 183 | 184 | 185 |

{{ post.title }}

186 |

{{ post.content }}

187 |
188 |
189 |
Title: 190 |
191 |
Content: 192 |
193 |
194 |
195 |
196 | 197 | 198 | 199 | If you want the full source code check out the `tutorial source`_. 200 | 201 | .. _tutorial source: 202 | https://github.com/fengsp/rc/tree/master/examples/tutorial.py 203 | -------------------------------------------------------------------------------- /examples/tutorial.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | import time 3 | 4 | from flask import Flask 5 | from flask import request, url_for, redirect, abort, render_template_string 6 | from flask_sqlalchemy import SQLAlchemy 7 | from rc import Cache 8 | 9 | 10 | app = Flask(__name__) 11 | app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite://' 12 | db = SQLAlchemy(app) 13 | cache = Cache() 14 | 15 | 16 | def init_db(): 17 | db.create_all() 18 | 19 | 20 | class Post(db.Model): 21 | id = db.Column(db.Integer, primary_key=True) 22 | title = db.Column(db.String(100), nullable=False) 23 | content = db.Column(db.Text, nullable=False) 24 | created_ts = db.Column(db.Integer, nullable=False) 25 | updated_ts = db.Column(db.Integer, nullable=False) 26 | 27 | def __init__(self, title, content, created_ts, updated_ts): 28 | self.title = title 29 | self.content = content 30 | self.created_ts = created_ts 31 | self.updated_ts = updated_ts 32 | 33 | def __repr__(self): 34 | return '' % self.id 35 | 36 | @staticmethod 37 | def add(title, content): 38 | current_ts = int(time.time()) 39 | post = Post(title, content, current_ts, current_ts) 40 | db.session.add(post) 41 | db.session.commit() 42 | cache.invalidate(Post.get_by_id, post.id) 43 | cache.invalidate(Post.get_all_ids) 44 | 45 | @staticmethod 46 | def update(post_id, title, content): 47 | post = Post.query.get(post_id) 48 | post.title = title 49 | post.content = content 50 | post.updated_ts = int(time.time()) 51 | db.session.commit() 52 | cache.invalidate(Post.get_by_id, post.id) 53 | 54 | @staticmethod 55 | @cache.cache() 56 | def get_all_ids(): 57 | posts = Post.query.all() 58 | return [post.id for post in posts] 59 | 60 | @staticmethod 61 | @cache.cache() 62 | def get_by_id(post_id): 63 | post = Post.query.get(post_id) 64 | return dict(id=post.id, title=post.title, content=post.content, 65 | created_ts=post.created_ts, updated_ts=post.updated_ts) 66 | 67 | 68 | @app.route('/add', methods=['POST']) 69 | def add_post(): 70 | title = request.form['title'] 71 | content = request.form['content'] 72 | Post.add(title, content) 73 | return redirect(url_for('show_all_posts')) 74 | 75 | 76 | @app.route('/post/') 77 | def show_post(post_id): 78 | post = Post.get_by_id(post_id) 79 | if post is None: 80 | abort(404) 81 | return render_template_string(SHOW_POST_TEMPLATE, post=post) 82 | 83 | 84 | @app.route('/post/', methods=['POST']) 85 | def edit_post(post_id): 86 | post = Post.get_by_id(post_id) 87 | if post is None: 88 | abort(404) 89 | title = request.form['title'] 90 | content = request.form['content'] 91 | Post.update(post_id, title, content) 92 | return redirect(url_for('show_all_posts')) 93 | 94 | 95 | @app.route('/') 96 | def show_all_posts(): 97 | all_post_ids = Post.get_all_ids() 98 | all_posts = [] 99 | with cache.batch_mode(): 100 | for post_id in all_post_ids: 101 | all_posts.append(Post.get_by_id(post_id)) 102 | all_posts = [p.value for p in all_posts] 103 | return render_template_string(ALL_POSTS_TEMPLATE, all_posts=all_posts) 104 | 105 | 106 | ALL_POSTS_TEMPLATE = """\ 107 | 108 | 109 | 110 | Blog 111 | 112 | 113 |

Blog

114 |
115 |
116 |
Title: 117 |
118 |
Content: 119 |
120 |
121 |
122 |
123 | 132 | 133 | 134 | """ 135 | 136 | 137 | SHOW_POST_TEMPLATE = """\ 138 | 139 | 140 | 141 | {{ post.title }} 142 | 143 | 144 |

{{ post.title }}

145 |

{{ post.content }}

146 |
147 |
148 |
Title: 149 |
150 |
Content: 151 |
152 |
153 |
154 |
155 | 156 | 157 | """ 158 | 159 | 160 | if __name__ == '__main__': 161 | init_db() 162 | app.run() 163 | -------------------------------------------------------------------------------- /logo/rc.psd: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fengsp/rc/32c4d4e2cb7ba734b2dbd9bd83bcc85a2f09499f/logo/rc.psd -------------------------------------------------------------------------------- /rc/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | rc 4 | ~~ 5 | 6 | The redis cache. 7 | 8 | :copyright: (c) 2016 by Shipeng Feng. 9 | :license: BSD, see LICENSE for more details. 10 | """ 11 | from rc.cache import Cache, CacheCluster 12 | from rc.serializer import BaseSerializer, JSONSerializer, PickleSerializer 13 | from rc.redis_router import BaseRedisRouter, RedisCRC32HashRouter 14 | from rc.redis_router import RedisConsistentHashRouter 15 | from rc.testing import NullCache, FakeRedisCache 16 | 17 | 18 | __version__ = '0.3.1' 19 | 20 | 21 | __all__ = [ 22 | 'Cache', 'CacheCluster', 23 | 24 | 'BaseSerializer', 'JSONSerializer', 'PickleSerializer', 25 | 26 | 'BaseRedisRouter', 'RedisCRC32HashRouter', 'RedisConsistentHashRouter', 27 | 28 | 'NullCache', 'FakeRedisCache', 29 | ] 30 | -------------------------------------------------------------------------------- /rc/cache.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | import inspect 3 | import functools 4 | from itertools import izip 5 | 6 | from rc.redis_clients import RedisClient 7 | from rc.redis_cluster import RedisCluster 8 | from rc.serializer import JSONSerializer 9 | from rc.utils import generate_key_for_cached_func 10 | from rc.promise import Promise 11 | 12 | 13 | #: Running mode for cache 14 | NORMAL_MODE = 0 15 | BATCH_MODE = 1 16 | 17 | 18 | class cached_property(object): 19 | 20 | def __init__(self, fget): 21 | self.fget = fget 22 | 23 | def __get__(self, obj, objtype): 24 | rv = obj.__dict__[self.fget.__name__] = self.fget(obj) 25 | return rv 26 | 27 | 28 | class BaseCache(object): 29 | """Baseclass for all redis cache systems. 30 | 31 | :param namespace: a prefix that should be added to all keys 32 | :param serializer_cls: the serialization class you want to use. 33 | :param default_expire: default expiration time that is used if no 34 | expire specified on :meth:`~rc.cache.BaseCache.set`. 35 | :param bypass_values: a list of return values that would be ignored by the 36 | cache decorator and won't be cached at all. 37 | 38 | .. versionadded:: 0.3 39 | The `bypass_values` parameter was added. 40 | """ 41 | 42 | def __init__(self, namespace=None, serializer_cls=None, 43 | default_expire=3 * 24 * 3600, bypass_values=[]): 44 | if serializer_cls is None: 45 | serializer_cls = JSONSerializer 46 | self.namespace = namespace or '' 47 | self.serializer_cls = serializer_cls 48 | self.default_expire = default_expire 49 | self.bypass_values = bypass_values 50 | self._running_mode = NORMAL_MODE 51 | self._pending_operations = [] 52 | 53 | def get_client(self): 54 | """Returns the redis client that is used for cache.""" 55 | raise NotImplementedError() 56 | 57 | @cached_property 58 | def client(self): 59 | """Returns the redis client that is used for cache.""" 60 | return self.get_client() 61 | 62 | @cached_property 63 | def serializer(self): 64 | """Returns the serializer instance that is used for cache.""" 65 | return self.serializer_cls() 66 | 67 | def _raw_get(self, key): 68 | return self.client.get(self.namespace + key) 69 | 70 | def _raw_set(self, key, string, expire=None): 71 | if expire is None: 72 | expire = self.default_expire 73 | return self.client.setex(self.namespace + key, expire, string) 74 | 75 | def _raw_get_many(self, *keys): 76 | if not keys: 77 | return [] 78 | if self.namespace: 79 | keys = [self.namespace + key for key in keys] 80 | return self.client.mget(keys) 81 | 82 | def get(self, key): 83 | """Returns the value for the cache key, otherwise `None` is returned. 84 | 85 | :param key: cache key 86 | """ 87 | return self.serializer.loads(self._raw_get(key)) 88 | 89 | def set(self, key, value, expire=None): 90 | """Adds or overwrites key/value to the cache. The value expires in 91 | time seconds. 92 | 93 | :param key: cache key 94 | :param value: value for the key 95 | :param expire: expiration time 96 | :return: Whether the key has been set 97 | """ 98 | return self._raw_set(key, self.serializer.dumps(value), expire) 99 | 100 | def delete(self, key): 101 | """Deletes the value for the cache key. 102 | 103 | :param key: cache key 104 | :return: Whether the key has been deleted 105 | """ 106 | return self.client.delete(self.namespace + key) 107 | 108 | def get_many(self, *keys): 109 | """Returns the a list of values for the cache keys.""" 110 | return [self.serializer.loads(s) for s in self._raw_get_many(*keys)] 111 | 112 | def set_many(self, mapping, expire=None): 113 | """Sets multiple keys and values using dictionary. 114 | The values expires in time seconds. 115 | 116 | :param mapping: a dictionary with key/values to set 117 | :param expire: expiration time 118 | :return: whether all keys has been set 119 | """ 120 | if not mapping: 121 | return True 122 | rv = True 123 | for key, value in mapping.iteritems(): 124 | if not self.set(key, value, expire): 125 | rv = False 126 | return rv 127 | 128 | def delete_many(self, *keys): 129 | """Deletes multiple keys. 130 | 131 | :return: whether all keys has been deleted 132 | """ 133 | if not keys: 134 | return True 135 | return all(self.delete(key) for key in keys) 136 | 137 | def cache(self, key_prefix=None, expire=None, include_self=False): 138 | """A decorator that is used to cache a function with supplied 139 | parameters. It is intended for decorator usage:: 140 | 141 | @cache.cache() 142 | def load(name): 143 | return load_from_database(name) 144 | 145 | rv = load('foo') 146 | rv = load('foo') # returned from cache 147 | 148 | The cache key doesn't need to be specified, it will be created with 149 | the name of the module + the name of the function + function arguments. 150 | 151 | :param key_prefix: this is used to ensure cache result won't clash 152 | with another function that has the same name 153 | in this module, normally you do not need to pass 154 | this in 155 | :param expire: expiration time 156 | :param include_self: whether to include the `self` or `cls` as 157 | cache key for method or not, default to be False 158 | 159 | .. note:: 160 | 161 | The function being decorated must be called with the same 162 | positional and keyword arguments. Otherwise, you might create 163 | multiple caches. If you pass one parameter as positional, do it 164 | always. 165 | 166 | .. note:: 167 | 168 | Using objects as part of the cache key is possible, though it is 169 | suggested to not pass in an object instance as parameter. We 170 | perform a str() on the passed in objects so that you can provide 171 | a __str__ function that returns a identifying string for that 172 | object, the unique string will be used as part of the cache key. 173 | 174 | .. note:: 175 | 176 | When a method on a class is decorated, the ``self`` or ``cls`` 177 | arguments is not included in the cache key. Starting from 0.2 178 | you can control it with `include_self`. If you set 179 | `include_self` to True, remember to provide `__str__` method 180 | for the object, otherwise you might encounter random behavior. 181 | 182 | .. versionadded:: 0.2 183 | The `include_self` parameter was added. 184 | """ 185 | def decorator(f): 186 | argspec = inspect.getargspec(f) 187 | if argspec and argspec[0] and argspec[0][0] in ('self', 'cls'): 188 | has_self = True 189 | else: 190 | has_self = False 191 | 192 | @functools.wraps(f) 193 | def wrapper(*args, **kwargs): 194 | cache_args = args 195 | # handle self and cls 196 | if has_self: 197 | if not include_self: 198 | cache_args = args[1:] 199 | cache_key = generate_key_for_cached_func( 200 | key_prefix, f, *cache_args, **kwargs) 201 | if self._running_mode == BATCH_MODE: 202 | promise = Promise() 203 | self._pending_operations.append( 204 | (f, args, kwargs, promise, cache_key, expire)) 205 | return promise 206 | rv = self._raw_get(cache_key) 207 | if rv is None: 208 | value = f(*args, **kwargs) 209 | rv = self.serializer.dumps(value) 210 | if value not in self.bypass_values: 211 | self._raw_set(cache_key, rv, expire) 212 | return self.serializer.loads(rv) 213 | 214 | wrapper.__rc_cache_params__ = { 215 | 'key_prefix': key_prefix, 216 | 'expire': expire, 217 | 'include_self': include_self, 218 | } 219 | return wrapper 220 | return decorator 221 | 222 | def invalidate(self, func, *args, **kwargs): 223 | """Invalidate a cache decorated function. You must call this with 224 | the same positional and keyword arguments as what you did when you 225 | call the decorated function, otherwise the cache will not be deleted. 226 | The usage is simple:: 227 | 228 | @cache.cache() 229 | def load(name, limit): 230 | return load_from_database(name, limit) 231 | 232 | rv = load('foo', limit=5) 233 | 234 | cache.invalidate(load, 'foo', limit=5) 235 | 236 | :param func: decorated function to invalidate 237 | :param args: same positional arguments as you call the function 238 | :param kwargs: same keyword arguments as you call the function 239 | :return: whether it is invalidated or not 240 | """ 241 | try: 242 | cache_params = func.__rc_cache_params__ 243 | except AttributeError: 244 | raise TypeError('Attempted to invalidate a function that is' 245 | 'not cache decorated') 246 | key_prefix = cache_params['key_prefix'] 247 | cache_args = args 248 | include_self = cache_params.get('include_self', False) 249 | if include_self: 250 | instance_self = getattr(func, '__self__', None) 251 | if instance_self: 252 | cache_args = tuple([instance_self] + list(args)) 253 | cache_key = generate_key_for_cached_func( 254 | key_prefix, func, *cache_args, **kwargs) 255 | return self.delete(cache_key) 256 | 257 | def batch_mode(self): 258 | """Returns a context manager for cache batch mode. This is used 259 | to batch fetch results of cache decorated functions. All results 260 | returned by cache decorated function will be 261 | :class:`~rc.promise.Promise` object. This context manager runs the 262 | batch fetch and then resolves all promises in the end. Example:: 263 | 264 | results = [] 265 | with cache.batch_mode(): 266 | for i in range(10): 267 | results.append(get_result(i)) 268 | results = map(lambda r: r.value, results) 269 | 270 | .. note:: 271 | 272 | When you are using rc on this mode, rc is not thread safe. 273 | """ 274 | return BatchManager(self) 275 | 276 | def batch(self, cancel=False): 277 | if self._running_mode != BATCH_MODE: 278 | raise RuntimeError('You have to batch on batch mode.') 279 | pending_operations = self._pending_operations 280 | self._pending_operations = [] 281 | self._running_mode = NORMAL_MODE 282 | if cancel: 283 | return 284 | cache_keys = [] 285 | for f, args, kwargs, promise, cache_key, expire in pending_operations: 286 | cache_keys.append(cache_key) 287 | cache_results = self._raw_get_many(*cache_keys) 288 | for rv, (func, args, kwargs, promise, cache_key, expire) in izip( 289 | cache_results, pending_operations): 290 | if rv is None: 291 | value = func(*args, **kwargs) 292 | rv = self.serializer.dumps(value) 293 | if value not in self.bypass_values: 294 | self._raw_set(cache_key, rv, expire) 295 | promise.resolve(self.serializer.loads(rv)) 296 | 297 | 298 | class Cache(BaseCache): 299 | """Uses a single Redis server as backend. 300 | 301 | :param host: address of the Redis, this is compatible with the official 302 | Python StrictRedis cilent (redis-py). 303 | :param port: port number of the Redis server. 304 | :param db: db numeric index of the Redis server. 305 | :param password: password authentication for the Redis server. 306 | :param socket_timeout: socket timeout for the StrictRedis client. 307 | :param namespace: a prefix that should be added to all keys. 308 | :param serializer_cls: the serialization class you want to use. 309 | By default, it is :class:`rc.JSONSerializer`. 310 | :param default_expire: default expiration time that is used if no 311 | expire specified on :meth:`set`. 312 | :param redis_options: a dictionary of parameters that are useful for 313 | setting other parameters to the StrictRedis client. 314 | :param bypass_values: a list of return values that would be ignored by the 315 | cache decorator and won't be cached at all. 316 | 317 | .. versionadded:: 0.3 318 | The `bypass_values` parameter was added. 319 | """ 320 | 321 | def __init__(self, host='localhost', port=6379, db=0, password=None, 322 | socket_timeout=None, namespace=None, serializer_cls=None, 323 | default_expire=3 * 24 * 3600, redis_options=None, 324 | bypass_values=[]): 325 | BaseCache.__init__(self, namespace, serializer_cls, default_expire, 326 | bypass_values) 327 | if redis_options is None: 328 | redis_options = {} 329 | self.host = host 330 | self.port = port 331 | self.db = db 332 | self.password = password 333 | self.socket_timeout = socket_timeout 334 | self.redis_options = redis_options 335 | 336 | def get_client(self): 337 | return RedisClient(host=self.host, port=self.port, db=self.db, 338 | password=self.password, 339 | socket_timeout=self.socket_timeout, 340 | **self.redis_options) 341 | 342 | def set_many(self, mapping, expire=None): 343 | if not mapping: 344 | return True 345 | if expire is None: 346 | expire = self.default_expire 347 | pipe = self.client.pipeline() 348 | for key, value in mapping.iteritems(): 349 | string = self.serializer.dumps(value) 350 | pipe.setex(self.namespace + key, expire, string) 351 | return all(pipe.execute()) 352 | 353 | def delete_many(self, *keys): 354 | if not keys: 355 | return True 356 | if self.namespace: 357 | keys = [self.namespace + key for key in keys] 358 | return self.client.delete(*keys) 359 | 360 | 361 | class CacheCluster(BaseCache): 362 | """The a redis cluster as backend. 363 | 364 | Basic example:: 365 | 366 | cache = CacheCluster({ 367 | 0: {'port': 6379}, 368 | 1: {'port': 6479}, 369 | 2: {'port': 6579}, 370 | 3: {'port': 6679}, 371 | }) 372 | 373 | :param hosts: a dictionary of hosts that maps the host host_name to 374 | configuration parameters. The parameters are used to 375 | construct a :class:`~rc.redis_cluster.HostConfig`. 376 | :param namespace: a prefix that should be added to all keys. 377 | :param serializer_cls: the serialization class you want to use. 378 | By default, it is :class:`~rc.JSONSerializer`. 379 | :param default_expire: default expiration time that is used if no 380 | expire specified on :meth:`set`. 381 | :param router_cls: use this to override the redis router class, 382 | default to be :class:`~rc.RedisCRC32HashRouter`. 383 | :param router_options: a dictionary of parameters that is useful for 384 | setting other parameters of router 385 | :param pool_cls: use this to override the redis connection pool class, 386 | default to be :class:`~redis.ConnectionPool` 387 | :param pool_options: a dictionary of parameters that is useful for 388 | setting other parameters of pool 389 | :param max_concurrency: defines how many parallel queries can happen 390 | at the same time 391 | :param poller_timeout: for multi key operations we use a select loop as 392 | the parallel query implementation, use this 393 | to specify timeout for the underlying pollers 394 | (select/poll/kqueue/epoll). 395 | :param bypass_values: a list of return values that would be ignored by the 396 | cache decorator and won't be cached at all. 397 | 398 | .. versionadded:: 0.3 399 | The `bypass_values` parameter was added. 400 | """ 401 | 402 | def __init__(self, hosts, namespace=None, serializer_cls=None, 403 | default_expire=3 * 24 * 3600, router_cls=None, 404 | router_options=None, pool_cls=None, pool_options=None, 405 | max_concurrency=64, poller_timeout=1.0, bypass_values=[]): 406 | BaseCache.__init__(self, namespace, serializer_cls, default_expire, 407 | bypass_values) 408 | self.hosts = hosts 409 | self.router_cls = router_cls 410 | self.router_options = router_options 411 | self.pool_cls = pool_cls 412 | self.pool_options = pool_options 413 | self.max_concurrency = max_concurrency 414 | self.poller_timeout = poller_timeout 415 | 416 | def get_client(self): 417 | redis_cluster = RedisCluster(self.hosts, router_cls=self.router_cls, 418 | router_options=self.router_options, 419 | pool_cls=self.pool_cls, 420 | pool_options=self.pool_options) 421 | return redis_cluster.get_client(self.max_concurrency, 422 | self.poller_timeout) 423 | 424 | def set_many(self, mapping, expire=None): 425 | if not mapping: 426 | return True 427 | if expire is None: 428 | expire = self.default_expire 429 | string_mapping = {} 430 | for key, value in mapping.iteritems(): 431 | string = self.serializer.dumps(value) 432 | string_mapping[self.namespace + key] = string 433 | return self.client.msetex(string_mapping, expire) 434 | 435 | def delete_many(self, *keys): 436 | if not keys: 437 | return True 438 | if self.namespace: 439 | keys = [self.namespace + key for key in keys] 440 | return self.client.mdelete(*keys) 441 | 442 | 443 | class BatchManager(object): 444 | """Context manager that helps us with batching.""" 445 | 446 | def __init__(self, cache): 447 | self.cache = cache 448 | 449 | def __enter__(self): 450 | self.cache._running_mode = BATCH_MODE 451 | return self.cache 452 | 453 | def __exit__(self, exc_type, exc_value, tb): 454 | if exc_type is not None: 455 | self.cache.batch(cancel=True) 456 | else: 457 | self.cache.batch() 458 | -------------------------------------------------------------------------------- /rc/ketama.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | import hashlib 3 | import math 4 | 5 | from bisect import bisect 6 | 7 | 8 | def md5_bytes(key): 9 | return map(ord, hashlib.md5(key).digest()) 10 | 11 | 12 | class HashRing(object): 13 | 14 | def __init__(self, nodes=None, weights=None): 15 | self._nodes = set(nodes or []) 16 | self._weights = weights if weights else {} 17 | 18 | self._rebuild_circle() 19 | 20 | def _rebuild_circle(self): 21 | self._hashring = {} 22 | self._sorted_keys = [] 23 | total_weight = 0 24 | for node in self._nodes: 25 | total_weight += self._weights.get(node, 1) 26 | 27 | for node in self._nodes: 28 | weight = self._weights.get(node, 1) 29 | 30 | ks = math.floor((40 * len(self._nodes) * weight) / total_weight) 31 | 32 | for i in xrange(0, int(ks)): 33 | k = md5_bytes('%s-%s-salt' % (node, i)) 34 | 35 | for l in xrange(0, 4): 36 | key = ((k[3 + l * 4] << 24) | (k[2 + l * 4] << 16) | 37 | (k[1 + l * 4] << 8) | k[l * 4]) 38 | self._hashring[key] = node 39 | self._sorted_keys.append(key) 40 | 41 | self._sorted_keys.sort() 42 | 43 | def _get_node_pos(self, key): 44 | if not self._hashring: 45 | return 46 | 47 | if isinstance(key, unicode): 48 | key = key.encode('utf8') 49 | 50 | k = md5_bytes(key) 51 | key = (k[3] << 24) | (k[2] << 16) | (k[1] << 8) | k[0] 52 | 53 | nodes = self._sorted_keys 54 | pos = bisect(nodes, key) 55 | 56 | if pos == len(nodes): 57 | return 0 58 | return pos 59 | 60 | def get_node(self, key): 61 | pos = self._get_node_pos(key) 62 | if pos is None: 63 | return 64 | return self._hashring[self._sorted_keys[pos]] 65 | -------------------------------------------------------------------------------- /rc/poller.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | import select 3 | 4 | 5 | class BasePoller(object): 6 | is_supported = False 7 | 8 | def __init__(self, objects): 9 | self.objects = dict(objects) 10 | 11 | def poll(self, timeout=None): 12 | """The return value is two list of objects that are ready: 13 | (rlist, wlist). 14 | """ 15 | raise NotImplementedError() 16 | 17 | def pop(self, host_name): 18 | return self.objects.pop(host_name, None) 19 | 20 | def __len__(self): 21 | return len(self.objects) 22 | 23 | 24 | class SelectPoller(BasePoller): 25 | is_supported = hasattr(select, 'select') 26 | 27 | def poll(self, timeout=None): 28 | objs = self.objects.values() 29 | rlist, wlist, _ = select.select(objs, objs, [], timeout) 30 | return rlist, wlist 31 | 32 | 33 | class PollPoller(BasePoller): 34 | is_supported = hasattr(select, 'poll') 35 | 36 | def __init__(self, objects): 37 | BasePoller.__init__(self, objects) 38 | self.pollobj = select.poll() 39 | self.fd_to_object = {} 40 | for _, obj in objects: 41 | self.pollobj.register(obj.fileno(), select.POLLIN | select.POLLOUT) 42 | self.fd_to_object[obj.fileno()] = obj 43 | 44 | def pop(self, host_name): 45 | rv = BasePoller.pop(self, host_name) 46 | if rv is not None: 47 | self.pollobj.unregister(rv.fileno()) 48 | self.fd_to_object.pop(rv.fileno(), None) 49 | return rv 50 | 51 | def poll(self, timeout=None): 52 | rlist = [] 53 | wlist = [] 54 | for fd, event in self.pollobj.poll(timeout): 55 | obj = self.fd_to_object[fd] 56 | if event & select.POLLIN: 57 | rlist.append(obj) 58 | elif event & select.POLLOUT: 59 | wlist.append(obj) 60 | return rlist, wlist 61 | 62 | 63 | class KQueuePoller(BasePoller): 64 | is_supported = hasattr(select, 'kqueue') 65 | 66 | def __init__(self, objects): 67 | BasePoller.__init__(self, objects) 68 | self.kqueue = select.kqueue() 69 | self.events = [] 70 | self.fd_to_object = {} 71 | for _, obj in objects: 72 | r_event = select.kevent( 73 | obj.fileno(), filter=select.KQ_FILTER_READ, 74 | flags=select.KQ_EV_ADD | select.KQ_EV_ENABLE) 75 | self.events.append(r_event) 76 | w_event = select.kevent( 77 | obj.fileno(), filter=select.KQ_FILTER_WRITE, 78 | flags=select.KQ_EV_ADD | select.KQ_EV_ENABLE) 79 | self.events.append(w_event) 80 | self.fd_to_object[obj.fileno()] = obj 81 | 82 | def pop(self, host_name): 83 | rv = BasePoller.pop(self, host_name) 84 | if rv is not None: 85 | self.events = [e for e in self.events if e.ident != rv.fileno()] 86 | self.fd_to_object.pop(rv.fileno(), None) 87 | return rv 88 | 89 | def poll(self, timeout=None): 90 | rlist = [] 91 | wlist = [] 92 | events = self.kqueue.control(self.events, 128, timeout) 93 | for event in events: 94 | obj = self.fd_to_object.get(event.ident) 95 | if obj is None: 96 | continue 97 | if event.filter == select.KQ_FILTER_READ: 98 | rlist.append(obj) 99 | elif event.filter == select.KQ_FILTER_WRITE: 100 | wlist.append(obj) 101 | return rlist, wlist 102 | 103 | 104 | class EpollPoller(BasePoller): 105 | is_supported = hasattr(select, 'epoll') 106 | 107 | def __init__(self, objects): 108 | BasePoller.__init__(self, objects) 109 | self.epoll = select.epoll() 110 | self.fd_to_object = {} 111 | for _, obj in objects: 112 | self.fd_to_object[obj.fileno()] = obj 113 | self.epoll.register(obj.fileno(), select.EPOLLIN | select.EPOLLOUT) 114 | 115 | def pop(self, host_name): 116 | rv = BasePoller.pop(self, host_name) 117 | if rv is not None: 118 | self.epoll.unregister(rv.fileno()) 119 | self.fd_to_object.pop(rv.fileno(), None) 120 | return rv 121 | 122 | def poll(self, timeout=None): 123 | if timeout is None: 124 | timeout = -1 125 | rlist = [] 126 | wlist = [] 127 | for fd, event in self.epoll.poll(timeout): 128 | obj = self.fd_to_object[fd] 129 | if event & select.EPOLLIN: 130 | rlist.append(obj) 131 | elif event & select.EPOLLOUT: 132 | wlist.append(obj) 133 | return rlist, wlist 134 | 135 | 136 | supported_pollers = [poller for poller in [EpollPoller, KQueuePoller, 137 | PollPoller, SelectPoller] 138 | if poller.is_supported] 139 | poller = supported_pollers[0] 140 | -------------------------------------------------------------------------------- /rc/promise.py: -------------------------------------------------------------------------------- 1 | PENDING_STATE = 0 2 | RESOLVED_STATE = 1 3 | 4 | 5 | class Promise(object): 6 | """A promise object. You can access ``promise.value`` to get the 7 | resolved value. Here is one example:: 8 | 9 | p = Promise() 10 | assert p.is_pending 11 | assert not p.is_resolved 12 | assert p.value is None 13 | p.resolve('value') 14 | assert not p.is_pending 15 | assert p.is_resolved 16 | assert p.value == 'value' 17 | """ 18 | 19 | def __init__(self): 20 | #: the value for this promise if it's resolved 21 | self.value = None 22 | self._state = PENDING_STATE 23 | self._callbacks = [] 24 | 25 | def resolve(self, value): 26 | """Resolves with value.""" 27 | if self._state != PENDING_STATE: 28 | raise RuntimeError('Promise is no longer pending.') 29 | self.value = value 30 | self._state = RESOLVED_STATE 31 | for callback in self._callbacks: 32 | callback(value) 33 | 34 | @property 35 | def is_resolved(self): 36 | """Return `True` if the promise is resolved.""" 37 | return self._state == RESOLVED_STATE 38 | 39 | @property 40 | def is_pending(self): 41 | """Return `True` if the promise is pending.""" 42 | return self._state == PENDING_STATE 43 | 44 | def then(self, on_resolve=None): 45 | """Add one callback that is called with the resolved value when the 46 | promise is resolved, and return the promise itself. One demo:: 47 | 48 | p = Promise() 49 | d = {} 50 | p.then(lambda v: d.setdefault('key', v)) 51 | p.resolve('value') 52 | assert p.value == 'value' 53 | assert d['key'] == 'value' 54 | """ 55 | if on_resolve is not None: 56 | if self._state == PENDING_STATE: 57 | self._callbacks.append(on_resolve) 58 | elif self._state == RESOLVED_STATE: 59 | on_resolve(self.value) 60 | return self 61 | 62 | def __repr__(self): 63 | if self._state == PENDING_STATE: 64 | v = '(pending)' 65 | else: 66 | v = repr(self.value) 67 | return '<%s %s>' % ( 68 | self.__class__.__name__, 69 | v, 70 | ) 71 | -------------------------------------------------------------------------------- /rc/redis_clients.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | import socket 3 | import errno 4 | from itertools import izip 5 | 6 | from redis import StrictRedis 7 | from redis.client import list_or_args 8 | from redis.exceptions import ConnectionError 9 | try: 10 | from redis.exceptions import TimeoutError 11 | except ImportError: 12 | TimeoutError = ConnectionError 13 | 14 | from rc.poller import poller 15 | 16 | 17 | class BaseRedisClient(StrictRedis): 18 | pass 19 | 20 | 21 | class RedisClient(BaseRedisClient): 22 | pass 23 | 24 | 25 | class RedisClusterClient(BaseRedisClient): 26 | 27 | def __init__(self, connection_pool, max_concurrency=64, 28 | poller_timeout=1.0): 29 | BaseRedisClient.__init__(self, connection_pool=connection_pool) 30 | self.max_concurrency = max_concurrency 31 | self.poller_timeout = poller_timeout 32 | 33 | def execute_command(self, *args, **options): 34 | command_name = args[0] 35 | command_args = args[1:] 36 | connection_pool = self.connection_pool 37 | router = connection_pool.cluster.router 38 | host_name = router.get_host_for_command(command_name, command_args) 39 | connection = connection_pool.get_connection(command_name, host_name) 40 | try: 41 | connection.send_command(*args) 42 | return self.parse_response(connection, command_name, **options) 43 | except (ConnectionError, TimeoutError) as e: 44 | connection.disconnect() 45 | if not connection.retry_on_timeout and isinstance(e, TimeoutError): 46 | raise 47 | connection.send_command(*args) 48 | return self.parse_response(connection, command_name, **options) 49 | finally: 50 | connection_pool.release(connection) 51 | 52 | def delete(self, name): 53 | """We just support one key delete for now.""" 54 | names = [name] 55 | return self.execute_command('DEL', *names) 56 | 57 | def mdelete(self, *names): 58 | commands = [] 59 | for name in names: 60 | commands.append(('DEL', name)) 61 | results = self._execute_multi_command_with_poller('DEL', commands) 62 | return sum(results.values()) 63 | 64 | def msetex(self, mapping, time): 65 | commands = [] 66 | for name, value in mapping.iteritems(): 67 | commands.append(('SETEX', name, time, value)) 68 | results = self._execute_multi_command_with_poller('SETEX', commands) 69 | return all(results.values()) 70 | 71 | def mget(self, keys, *args): 72 | args = list_or_args(keys, args) 73 | commands = [] 74 | for arg in args: 75 | commands.append(('MGET', arg)) 76 | results = self._execute_multi_command_with_poller('MGET', commands) 77 | return [results[k] for k in args] 78 | 79 | def _execute_multi_command_with_poller(self, command_name, commands): 80 | connection_pool = self.connection_pool 81 | router = connection_pool.cluster.router 82 | # put command to the corresponding command buffer 83 | bufs = {} 84 | for args in commands: 85 | host_name = router.get_host_for_key(args[1]) 86 | buf = self._get_command_buffer(bufs, command_name, host_name) 87 | buf.enqueue_command(args) 88 | # poll all results back with max concurrency 89 | results = {} 90 | remaining_buf_items = bufs.items() 91 | while remaining_buf_items: 92 | buf_items = remaining_buf_items[:self.max_concurrency] 93 | remaining_buf_items = remaining_buf_items[self.max_concurrency:] 94 | bufs_poll = poller(buf_items) 95 | while bufs_poll: 96 | rlist, wlist = bufs_poll.poll(self.poller_timeout) 97 | for rbuf in rlist: 98 | if not rbuf.has_pending_request: 99 | results.update(rbuf.fetch_response(self)) 100 | bufs_poll.pop(rbuf.host_name) 101 | for wbuf in wlist: 102 | if wbuf.has_pending_request: 103 | wbuf.send_pending_request() 104 | # clean 105 | for _, buf in bufs.iteritems(): 106 | connection_pool.release(buf.connection) 107 | return results 108 | 109 | def _get_command_buffer(self, bufs, command_name, host_name): 110 | buf = bufs.get(host_name) 111 | if buf is not None: 112 | return buf 113 | connection_pool = self.connection_pool 114 | connection = connection_pool.get_connection(command_name, host_name) 115 | buf = CommandBuffer(host_name, connection, command_name) 116 | bufs[host_name] = buf 117 | return buf 118 | 119 | 120 | class CommandBuffer(object): 121 | """The command buffer is used for sending and fetching multi key command 122 | related data. 123 | """ 124 | 125 | def __init__(self, host_name, connection, command_name): 126 | self.host_name = host_name 127 | self.connection = connection 128 | self.command_name = command_name 129 | self.commands = [] 130 | self.pending_commands = [] 131 | self._send_buf = [] 132 | 133 | connection.connect() 134 | 135 | def assert_open(self): 136 | if self.connection._sock is None: 137 | raise ValueError('Can not operate on closed file.') 138 | 139 | def enqueue_command(self, command): 140 | self.commands.append(command) 141 | 142 | def fileno(self): 143 | self.assert_open() 144 | return self.connection._sock.fileno() 145 | 146 | @property 147 | def has_pending_request(self): 148 | return self._send_buf or self.commands 149 | 150 | def _try_send_buffer(self): 151 | sock = self.connection._sock 152 | try: 153 | timeout = sock.gettimeout() 154 | sock.setblocking(False) 155 | try: 156 | for i, item in enumerate(self._send_buf): 157 | sent = 0 158 | while 1: 159 | try: 160 | sent = sock.send(item) 161 | except socket.error, e: 162 | if e.errno == errno.EAGAIN: 163 | continue 164 | elif e.errno == errno.EWOULDBLOCK: 165 | break 166 | raise 167 | break 168 | if sent < len(item): 169 | self._send_buf[:i + 1] = [item[sent:]] 170 | break 171 | else: 172 | del self._send_buf[:] 173 | finally: 174 | sock.settimeout(timeout) 175 | except socket.timeout: 176 | self.connection.disconnect() 177 | raise TimeoutError('Timeout writing to socket (%s)' 178 | % self.host_name) 179 | except socket.error: 180 | self.connection.disconnect() 181 | raise ConnectionError('Error while writing to socket (%s)' 182 | % self.host_name) 183 | except: 184 | self.connection.disconnect() 185 | raise 186 | 187 | def batch_commands(self, commands): 188 | args = [] 189 | for command in commands: 190 | command_args = command[1:] 191 | args.extend(command_args) 192 | if args: 193 | return [(self.command_name,) + tuple(args)] 194 | else: 195 | return [] 196 | 197 | def send_pending_request(self): 198 | self.assert_open() 199 | if self.commands: 200 | if self.command_name in ('MGET', 'DEL'): 201 | commands = self.batch_commands(self.commands) 202 | else: 203 | commands = self.commands 204 | self._send_buf.extend(self.connection.pack_commands(commands)) 205 | self.pending_commands = self.commands 206 | self.commands = [] 207 | if not self._send_buf: 208 | return True 209 | self._try_send_buffer() 210 | return not self._send_buf 211 | 212 | def fetch_response(self, client): 213 | self.assert_open() 214 | if self.has_pending_request: 215 | raise RuntimeError('There are pending requests.') 216 | if self.command_name in ('MGET', 'DEL'): 217 | rv = client.parse_response(self.connection, self.command_name) 218 | else: 219 | rv = [] 220 | for i in xrange(len(self.pending_commands)): 221 | rv.append(client.parse_response( 222 | self.connection, self.command_name)) 223 | if self.command_name == 'DEL': 224 | rv = [1] * rv + [0] * (len(self.pending_commands) - rv) 225 | pending_keys = map(lambda c: c[1], self.pending_commands) 226 | return dict(izip(pending_keys, rv)) 227 | -------------------------------------------------------------------------------- /rc/redis_cluster.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | import weakref 3 | 4 | from redis.connection import ConnectionPool, UnixDomainSocketConnection 5 | try: 6 | from redis.connection import SSLConnection 7 | except ImportError: 8 | SSLConnection = None 9 | 10 | from rc.redis_clients import RedisClusterClient 11 | from rc.redis_router import RedisCRC32HashRouter 12 | 13 | 14 | class HostConfig(object): 15 | 16 | def __init__(self, host_name, host='localhost', port=6379, 17 | unix_socket_path=None, db=0, password=None, 18 | ssl=False, ssl_options=None): 19 | self.host_name = host_name 20 | self.host = host 21 | self.port = port 22 | self.unix_socket_path = unix_socket_path 23 | self.db = db 24 | self.password = password 25 | self.ssl = ssl 26 | self.ssl_options = ssl_options 27 | 28 | def __repr__(self): 29 | identity_dict = { 30 | 'host': self.host, 31 | 'port': self.port, 32 | 'unix_socket_path': self.unix_socket_path, 33 | 'db': self.db, 34 | } 35 | return '<%s %s>' % ( 36 | self.__class__.__name__, 37 | ' '.join('%s=%s' % x for x in sorted(identity_dict.items())), 38 | ) 39 | 40 | 41 | class RedisCluster(object): 42 | """The redis cluster is the object that holds the connection pools to 43 | the redis nodes. 44 | 45 | :param hosts: a dictionary of hosts that maps the host host_name to 46 | configuration parameters. The parameters are used to 47 | construct a :class:`~rc.redis_cluster.HostConfig`. 48 | :param router_cls: use this to override the redis router class 49 | :param router_options: a dictionary of parameters that is useful for 50 | setting other parameters of router 51 | :param pool_cls: use this to override the redis connection pool class 52 | :param pool_options: a dictionary of parameters that is useful for 53 | setting other parameters of pool 54 | """ 55 | 56 | def __init__(self, hosts, router_cls=None, router_options=None, 57 | pool_cls=None, pool_options=None): 58 | if router_cls is None: 59 | router_cls = RedisCRC32HashRouter 60 | if pool_cls is None: 61 | pool_cls = ConnectionPool 62 | if pool_options is None: 63 | pool_options = {} 64 | if router_options is None: 65 | router_options = {} 66 | self.router_cls = router_cls 67 | self.router_options = router_options 68 | self.pool_cls = pool_cls 69 | self.pool_options = pool_options 70 | self.hosts = {} 71 | for host_name, host_config in hosts.iteritems(): 72 | self.hosts[host_name] = HostConfig(host_name, **host_config) 73 | self.router = self.router_cls(self.hosts, **router_options) 74 | #: connection pools of all hosts 75 | self._pools = {} 76 | 77 | def get_pool_of_host(self, host_name): 78 | """Returns the connection pool for a certain host.""" 79 | pool = self._pools.get(host_name) 80 | if pool is not None: 81 | return pool 82 | else: 83 | host_config = self.hosts[host_name] 84 | pool_options = dict(self.pool_options) 85 | pool_options['db'] = host_config.db 86 | pool_options['password'] = host_config.password 87 | if host_config.unix_socket_path is not None: 88 | pool_options['path'] = host_config.unix_socket_path 89 | pool_options['connection_class'] = UnixDomainSocketConnection 90 | else: 91 | pool_options['host'] = host_config.host 92 | pool_options['port'] = host_config.port 93 | if host_config.ssl: 94 | if SSLConnection is None: 95 | raise RuntimeError('SSL connections are not supported') 96 | pool_options['connection_class'] = SSLConnection 97 | pool_options.update(host_config.ssl_options or {}) 98 | pool = self.pool_cls(**pool_options) 99 | self._pools[host_name] = pool 100 | return pool 101 | 102 | def get_client(self, max_concurrency=64, poller_timeout=1.0): 103 | """Returns a cluster client. This client can automatically route 104 | the requests to the corresponding node. 105 | 106 | :param max_concurrency: defines how many parallel queries can happen 107 | at the same time 108 | :param poller_timeout: for multi key commands we use a select loop as 109 | the parallel query implementation, use this 110 | to specify timeout for underlying pollers 111 | (select/poll/kqueue/epoll). 112 | """ 113 | return RedisClusterClient( 114 | RedisClusterPool(self), max_concurrency, poller_timeout) 115 | 116 | 117 | class RedisClusterPool(object): 118 | """The cluster pool works with the cluster client to get the correct pool. 119 | """ 120 | 121 | def __init__(self, cluster): 122 | self.cluster = cluster 123 | 124 | def get_connection(self, command_name, host_name): 125 | real_pool = self.cluster.get_pool_of_host(host_name) 126 | connection = real_pool.get_connection(command_name) 127 | connection.__birth_pool = weakref.ref(real_pool) 128 | return connection 129 | 130 | def release(self, connection): 131 | real_pool = connection.__birth_pool() 132 | if real_pool is not None: 133 | real_pool.release(connection) 134 | -------------------------------------------------------------------------------- /rc/redis_router.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from binascii import crc32 3 | 4 | from rc.ketama import HashRing 5 | 6 | 7 | class BaseRedisRouter(object): 8 | """Subclass this to implement your own router.""" 9 | 10 | def __init__(self, hosts): 11 | self.hosts = hosts 12 | 13 | def get_key_for_command(self, command, args): 14 | if command in ('GET', 'SET', 'SETEX', 'DEL'): 15 | return args[0] 16 | raise RuntimeError('The command "%s" is not supported yet.' % command) 17 | 18 | def get_host_for_key(self, key): 19 | """Get host name for a certain key.""" 20 | raise NotImplementedError() 21 | 22 | def get_host_for_command(self, command, args): 23 | return self.get_host_for_key(self.get_key_for_command(command, args)) 24 | 25 | 26 | class RedisCRC32HashRouter(BaseRedisRouter): 27 | """Use crc32 for hash partitioning.""" 28 | 29 | def __init__(self, hosts): 30 | BaseRedisRouter.__init__(self, hosts) 31 | self._sorted_host_names = sorted(hosts.keys()) 32 | 33 | def get_host_for_key(self, key): 34 | if isinstance(key, unicode): 35 | key = key.encode('utf-8') 36 | else: 37 | key = str(key) 38 | pos = crc32(key) % len(self._sorted_host_names) 39 | return self._sorted_host_names[pos] 40 | 41 | 42 | class RedisConsistentHashRouter(BaseRedisRouter): 43 | """Use ketama for hash partitioning.""" 44 | 45 | def __init__(self, hosts): 46 | BaseRedisRouter.__init__(self, hosts) 47 | self._hashring = HashRing(hosts.values()) 48 | 49 | def get_host_for_key(self, key): 50 | node = self._hashring.get_node(key) 51 | if node is None: 52 | raise RuntimeError('Can not find a host using consistent hash') 53 | return node.host_name 54 | -------------------------------------------------------------------------------- /rc/serializer.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | import json 3 | try: 4 | import cPickle as pickle 5 | except ImportError: 6 | import pickle 7 | 8 | 9 | class BaseSerializer(object): 10 | """Baseclass for serializer. Subclass this to get your own serializer.""" 11 | 12 | def dumps(self, obj): 13 | """Dumps an object into a string for redis.""" 14 | raise NotImplementedError() 15 | 16 | def loads(self, string): 17 | """Read a serialized object from a string.""" 18 | raise NotImplementedError() 19 | 20 | 21 | class PickleSerializer(BaseSerializer): 22 | """One serializer that uses Pickle""" 23 | 24 | def dumps(self, obj): 25 | return pickle.dumps(obj) 26 | 27 | def loads(self, string): 28 | if string is None: 29 | return 30 | return pickle.loads(string) 31 | 32 | 33 | class JSONSerializer(BaseSerializer): 34 | """One serializer that uses JSON""" 35 | 36 | def dumps(self, obj): 37 | return json.dumps(obj) 38 | 39 | def loads(self, string): 40 | if string is None: 41 | return 42 | return json.loads(string) 43 | -------------------------------------------------------------------------------- /rc/testing.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from rc.cache import BaseCache 3 | 4 | 5 | class NullCache(BaseCache): 6 | """Use this for unit test. This doesn't cache.""" 7 | 8 | def __init__(self, *args, **kwargs): 9 | BaseCache.__init__(self) 10 | 11 | def get(self, key): 12 | """Always return `None`""" 13 | return 14 | 15 | def set(self, key, value, time=None): 16 | """Always return `True`""" 17 | return True 18 | 19 | def delete(self, key): 20 | """Always return `True`""" 21 | return True 22 | 23 | def get_many(self, *keys): 24 | """Always return a list of `None`""" 25 | return [None for key in keys] 26 | 27 | 28 | class FakeRedisCache(BaseCache): 29 | """Uses a fake redis server as backend. It depends on the 30 | `fakeredis`_ library. 31 | 32 | .. _fakeredis: https://github.com/jamesls/fakeredis 33 | 34 | :param namespace: a prefix that should be added to all keys. 35 | :param serializer_cls: the serialization class you want to use. 36 | By default, it is :class:`rc.JSONSerializer`. 37 | :param default_expire: default expiration time that is used if no 38 | expire specified on :meth:`set`. 39 | """ 40 | 41 | def __init__(self, namespace=None, serializer_cls=None, 42 | default_expire=3 * 24 * 3600): 43 | BaseCache.__init__(self, namespace, serializer_cls, default_expire) 44 | 45 | def get_client(self): 46 | import fakeredis 47 | return fakeredis.FakeStrictRedis() 48 | -------------------------------------------------------------------------------- /rc/utils.py: -------------------------------------------------------------------------------- 1 | def u_(s): 2 | if isinstance(s, unicode): 3 | return s 4 | if not isinstance(s, str): 5 | s = str(s) 6 | return unicode(s, 'utf-8') 7 | 8 | 9 | def generate_key_for_cached_func(key_prefix, func, *args, **kwargs): 10 | """Generate key for cached function. The cache key will be created with 11 | the name of the module + the name of the function + function arguments. 12 | """ 13 | if key_prefix is None: 14 | key_prefix = [] 15 | else: 16 | key_prefix = [key_prefix] 17 | module_name = func.__module__ 18 | func_name = func.__name__ 19 | # handle keyword arguments 20 | kwargs = kwargs.items() 21 | if kwargs: 22 | kwargs.sort(key=lambda t: t[0]) 23 | kwargs = map(lambda t: (u_(t[0]), u_(t[1])), kwargs) 24 | kwargs = map(lambda t: u'='.join(t), kwargs) 25 | # handle positional arguments 26 | args = map(lambda arg: u_(arg), args) 27 | # join them together 28 | return u' '.join(key_prefix + [module_name, func_name] + args + kwargs) 29 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | import re 2 | from setuptools import setup 3 | 4 | 5 | with open('rc/__init__.py', 'rb') as f: 6 | version = str(eval(re.search(r'__version__\s+=\s+(.*)', 7 | f.read().decode('utf-8')).group(1))) 8 | 9 | 10 | setup( 11 | name='rc', 12 | author='Shipeng Feng', 13 | author_email='fsp261@gmail.com', 14 | version=version, 15 | url='http://github.com/fengsp/rc', 16 | packages=['rc'], 17 | description='rc, the redis cache', 18 | install_requires=[ 19 | 'redis>=2.6', 20 | ], 21 | classifiers=[ 22 | 'License :: OSI Approved :: BSD License', 23 | 'Programming Language :: Python', 24 | 'Programming Language :: Python :: 2' 25 | ], 26 | ) 27 | -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | import tempfile 2 | import uuid 3 | import os 4 | import time 5 | import socket 6 | import shutil 7 | from subprocess import Popen, PIPE 8 | 9 | import pytest 10 | 11 | 12 | devnull = open(os.devnull, 'w') 13 | 14 | 15 | class RedisServer(object): 16 | 17 | def __init__(self, socket_path): 18 | self.socket_path = socket_path 19 | self.redis = Popen(['redis-server', '-'], stdin=PIPE, stdout=devnull) 20 | self.redis.stdin.write(''' 21 | port 0 22 | unixsocket %s 23 | save ""''' % socket_path) 24 | self.redis.stdin.flush() 25 | self.redis.stdin.close() 26 | while 1: 27 | try: 28 | s = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) 29 | s.connect(socket_path) 30 | except IOError: 31 | time.sleep(0.05) 32 | continue 33 | else: 34 | break 35 | 36 | def shutdown(self): 37 | self.redis.kill() 38 | self.redis.wait() 39 | os.remove(self.socket_path) 40 | 41 | def __del__(self): 42 | try: 43 | self.shutdown() 44 | except: 45 | pass 46 | 47 | 48 | @pytest.fixture(scope='session') 49 | def redis_hosts(request): 50 | socket_dir = tempfile.mkdtemp() 51 | hosts = {} 52 | servers = [] 53 | for i in range(4): 54 | socket_path = os.path.join(socket_dir, str(uuid.uuid4())) 55 | server = RedisServer(socket_path) 56 | for j in range(4): 57 | hosts['cache-server-%s' % (i * 4 + j)] = { 58 | 'unix_socket_path': socket_path, 59 | 'db': j, 60 | } 61 | servers.append(server) 62 | 63 | def fin(): 64 | for server in servers: 65 | server.shutdown() 66 | shutil.rmtree(socket_dir) 67 | request.addfinalizer(fin) 68 | return hosts 69 | 70 | 71 | @pytest.fixture(scope='session') 72 | def redis_unix_socket_path(redis_hosts): 73 | return redis_hosts.values()[0]['unix_socket_path'] 74 | -------------------------------------------------------------------------------- /tests/test_cache.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | import pytest 3 | 4 | from rc.cache import Cache, CacheCluster 5 | from rc.testing import NullCache, FakeRedisCache 6 | 7 | 8 | def test_null_cache(): 9 | cache = NullCache() 10 | with pytest.raises(NotImplementedError): 11 | cache.client 12 | assert cache.get('key') is None 13 | assert cache.set('key', 'value') 14 | assert cache.delete('key') 15 | assert cache.get_many('key1', 'key2') == [None, None] 16 | assert cache.set_many({'key1': 'value1', 'key2': 'value2'}) 17 | assert cache.delete_many('key1', 'key2') 18 | 19 | 20 | def test_fakeredis_cache(): 21 | cache = FakeRedisCache() 22 | assert cache.get('key') is None 23 | assert cache.set('key', 'value') 24 | assert cache.get('key') == 'value' 25 | assert cache.delete('key') 26 | assert cache.get_many('key1', 'key2') == [None, None] 27 | assert cache.set_many({'key1': 'value1', 'key2': 'value2'}) 28 | assert cache.delete_many('key1', 'key2') 29 | 30 | 31 | def test_cache_basic_apis(redis_unix_socket_path): 32 | cache = Cache(redis_options={'unix_socket_path': redis_unix_socket_path}) 33 | assert cache.get('key') is None 34 | assert cache.set('key', 'value') 35 | assert cache.get('key') == 'value' 36 | assert cache.delete('key') 37 | assert cache.get('key') is None 38 | 39 | assert cache.get_many('key1', 'key2') == [None, None] 40 | assert cache.set_many({'key1': 'value1', 'key2': 'value2'}) 41 | assert cache.get_many('key1', 'key2') == ['value1', 'value2'] 42 | assert cache.delete_many('key1', 'key2') 43 | assert cache.get_many('key1', 'key2') == [None, None] 44 | assert cache.get_many() == [] 45 | assert cache.set_many({}) 46 | assert cache.delete_many() 47 | 48 | assert cache.get('key') is None 49 | assert cache.set('key', ['value']) 50 | assert cache.get('key') == ['value'] 51 | assert cache.get_many('key') == [['value']] 52 | assert cache.delete('key') 53 | assert cache.get('key') is None 54 | 55 | # import time 56 | # assert cache.get('key') is None 57 | # cache.set('key', 'value', 1) 58 | # time.sleep(1) 59 | # assert cache.get('key') is None 60 | 61 | 62 | def test_cache_namespace(redis_unix_socket_path): 63 | cache01 = Cache(redis_options={'unix_socket_path': redis_unix_socket_path}) 64 | cache02 = Cache( 65 | namespace='test:', 66 | redis_options={'unix_socket_path': redis_unix_socket_path}) 67 | assert cache01.set('key', 'value') 68 | assert cache01.get('key') == 'value' 69 | assert cache02.get('key') is None 70 | 71 | 72 | def test_cache_decorator_basic_apis(redis_unix_socket_path): 73 | cache = Cache(redis_options={'unix_socket_path': redis_unix_socket_path}) 74 | 75 | @cache.cache() 76 | def load(name, offset): 77 | return ' '.join(('load', name, offset)) 78 | rv = load('name', 'offset') 79 | assert isinstance(rv, unicode) 80 | assert rv == 'load name offset' 81 | assert load('name', offset='offset') == 'load name offset' 82 | 83 | @cache.cache() 84 | def load(name, offset): 85 | return ' '.join(('load02', name, offset)) 86 | assert load('name', 'offset') == 'load name offset' 87 | assert load('name', offset='offset') == 'load name offset' 88 | assert cache.invalidate(load, 'name', 'offset') 89 | assert load('name', 'offset') == 'load02 name offset' 90 | assert load('name', offset='offset') == 'load name offset' 91 | assert cache.invalidate(load, 'name', offset='offset') 92 | assert load('name', offset='offset') == 'load02 name offset' 93 | 94 | class Foo(object): 95 | @cache.cache() 96 | def load_method(self, name, offset): 97 | return ' '.join(('load', name, str(offset))) 98 | foo = Foo() 99 | assert foo.load_method('name', 10) == 'load name 10' 100 | assert foo.load_method('name', offset=10) == 'load name 10' 101 | 102 | class Foo(object): 103 | @cache.cache() 104 | def load_method(self, name, offset): 105 | return ' '.join(('load02', name, str(offset))) 106 | foo = Foo() 107 | assert foo.load_method('name', 10) == 'load name 10' 108 | assert cache.invalidate(foo.load_method, 'name', 10) 109 | assert foo.load_method('name', 10) == 'load02 name 10' 110 | 111 | 112 | def test_cache_decorator_include_self(redis_unix_socket_path): 113 | cache = Cache(redis_options={'unix_socket_path': redis_unix_socket_path}) 114 | 115 | class User(object): 116 | def __init__(self, user_id): 117 | self.user_id = user_id 118 | 119 | def __str__(self): 120 | return '' % self.user_id 121 | 122 | @cache.cache(include_self=True) 123 | def load(self, name, offset): 124 | return ' '.join(('load', name, str(offset))) 125 | user01 = User(1) 126 | user02 = User(2) 127 | 128 | assert user01.load('name', 'offset') == 'load name offset' 129 | assert user02.load('name', 'offset') == 'load name offset' 130 | 131 | class User(object): 132 | def __init__(self, user_id): 133 | self.user_id = user_id 134 | 135 | def __str__(self): 136 | return '' % self.user_id 137 | 138 | @cache.cache(include_self=True) 139 | def load(self, name, offset): 140 | return ' '.join(('load02', name, str(offset))) 141 | user01 = User(1) 142 | user02 = User(2) 143 | 144 | assert user01.load('name', 'offset') == 'load name offset' 145 | assert user02.load('name', 'offset') == 'load name offset' 146 | assert cache.invalidate(user01.load, 'name', 'offset') 147 | assert user01.load('name', 'offset') == 'load02 name offset' 148 | assert user02.load('name', 'offset') == 'load name offset' 149 | 150 | 151 | def test_cache_cluster_basic_apis(redis_hosts): 152 | cache = CacheCluster(redis_hosts) 153 | assert cache.get('key') is None 154 | assert cache.set('key', 'value') 155 | assert cache.get('key') == 'value' 156 | assert cache.delete('key') 157 | assert cache.get('key') is None 158 | 159 | assert cache.get_many('key1', 'key2') == [None, None] 160 | assert cache.set_many({'key1': 'value1', 'key2': 'value2'}) 161 | assert cache.get_many('key1', 'key2') == ['value1', 'value2'] 162 | assert cache.delete_many('key1', 'key2') 163 | assert cache.get_many('key1', 'key2') == [None, None] 164 | assert cache.get_many() == [] 165 | assert cache.set_many({}) 166 | assert cache.delete_many() 167 | 168 | assert cache.get('key') is None 169 | assert cache.set('key', ['value']) 170 | assert cache.get('key') == ['value'] 171 | assert cache.delete('key') 172 | assert cache.get('key') is None 173 | 174 | # import time 175 | # assert cache.get('key') is None 176 | # cache.set('key', 'value', 1) 177 | # time.sleep(1) 178 | # assert cache.get('key') is None 179 | 180 | 181 | def test_cache_cluster_namespace(redis_hosts): 182 | cache01 = CacheCluster(redis_hosts) 183 | cache02 = CacheCluster(redis_hosts, namespace='test:') 184 | assert cache01.set('key', 'value') 185 | assert cache01.get('key') == 'value' 186 | assert cache02.get('key') is None 187 | 188 | 189 | def test_cache_batch_mode(redis_unix_socket_path): 190 | cache = Cache(redis_options={'unix_socket_path': redis_unix_socket_path}) 191 | 192 | @cache.cache() 193 | def cache_batch_test_func(value): 194 | return value 195 | 196 | with cache.batch_mode(): 197 | pass 198 | 199 | results = [] 200 | with cache.batch_mode(): 201 | for i in range(10): 202 | rv = cache_batch_test_func(i) 203 | assert rv.is_pending 204 | assert rv.value is None 205 | results.append(rv) 206 | for i, rv in enumerate(results): 207 | assert rv.is_resolved 208 | assert rv.value == i 209 | 210 | for i in range(20): 211 | assert cache_batch_test_func(i) == i 212 | 213 | 214 | def test_cache_cluster_batch_mode(redis_hosts): 215 | cache = CacheCluster(redis_hosts) 216 | 217 | @cache.cache() 218 | def cluster_batch_test_func(value): 219 | return value 220 | 221 | for i in range(5): 222 | assert cluster_batch_test_func(i) == i 223 | 224 | results = [] 225 | with cache.batch_mode(): 226 | for i in range(10): 227 | rv = cluster_batch_test_func(i) 228 | assert rv.is_pending 229 | assert rv.value is None 230 | results.append(rv) 231 | for i, rv in enumerate(results): 232 | assert rv.is_resolved 233 | assert rv.value == i 234 | 235 | for i in range(20): 236 | assert cluster_batch_test_func(i) == i 237 | 238 | 239 | def test_cache_bypass_values(redis_unix_socket_path): 240 | cache = Cache(redis_options={'unix_socket_path': redis_unix_socket_path}, 241 | bypass_values=[None]) 242 | 243 | @cache.cache() 244 | def bypass_values_test_func(): 245 | return None 246 | 247 | assert bypass_values_test_func() is None 248 | 249 | @cache.cache() 250 | def bypass_values_test_func(): 251 | return 1 252 | 253 | assert bypass_values_test_func() is not None 254 | -------------------------------------------------------------------------------- /tests/test_ketama.py: -------------------------------------------------------------------------------- 1 | from rc.ketama import HashRing 2 | 3 | 4 | def test_basic(): 5 | nodes = ['node01', 'node02', 'node03', 'node04'] 6 | hashring = HashRing(nodes) 7 | keys = [u'key-%s' % i for i in range(500)] 8 | keys_nodes = [hashring.get_node(k) for k in keys] 9 | for node in nodes: 10 | assert node in keys_nodes 11 | 12 | keys = ['key-%s' % i for i in range(500, 1000)] 13 | keys_nodes = [hashring.get_node(k) for k in keys] 14 | for node in nodes: 15 | assert node in keys_nodes 16 | -------------------------------------------------------------------------------- /tests/test_poller.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from rc.poller import supported_pollers 4 | from rc import redis_clients 5 | from rc.redis_cluster import RedisCluster 6 | 7 | 8 | @pytest.mark.parametrize('poller', supported_pollers) 9 | def test_all_pollers(redis_hosts, poller, monkeypatch): 10 | monkeypatch.setattr(redis_clients, 'poller', poller) 11 | 12 | # assert len(supported_pollers) == 4 13 | redis_cluster = RedisCluster(redis_hosts) 14 | cluster_client = redis_cluster.get_client() 15 | keys = [] 16 | for i in range(10): 17 | key = 'key-%s' % i 18 | keys.append(key) 19 | cluster_client.set(key, i) 20 | assert cluster_client.mget(keys) == map(str, range(10)) 21 | -------------------------------------------------------------------------------- /tests/test_promise.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from rc.promise import Promise 4 | 5 | 6 | def test_promise_resolve(): 7 | p = Promise() 8 | assert p.is_pending 9 | assert not p.is_resolved 10 | assert p.value is None 11 | p.resolve('value') 12 | assert not p.is_pending 13 | assert p.is_resolved 14 | assert p.value == 'value' 15 | with pytest.raises(RuntimeError): 16 | p.resolve('value again') 17 | 18 | 19 | def test_promise_then(): 20 | p = Promise() 21 | d = {} 22 | v = p.then(lambda v: d.setdefault('key', v)) 23 | p.resolve('value') 24 | assert v.value == 'value' 25 | assert d['key'] == 'value' 26 | -------------------------------------------------------------------------------- /tests/test_redis_clients.py: -------------------------------------------------------------------------------- 1 | from rc.redis_cluster import RedisCluster 2 | 3 | 4 | def test_redis_cluster_client_basic_operations(redis_hosts): 5 | cluster = RedisCluster(redis_hosts) 6 | client = cluster.get_client() 7 | 8 | keys = [] 9 | for i in xrange(10): 10 | key = 'test key: %s' % i 11 | keys.append(key) 12 | client.set(key, i) 13 | for i, key in enumerate(keys): 14 | assert client.get(key) == str(i) 15 | assert client.mget(keys) == map(str, range(10)) 16 | 17 | keys = [] 18 | for i in xrange(10, 20): 19 | key = 'test key: %s' % i 20 | keys.append(key) 21 | client.setex(key, 100, i) 22 | for i, key in enumerate(keys, 10): 23 | assert client.get(key) == str(i) 24 | assert client.mget(keys) == map(str, range(10, 20)) 25 | 26 | keys = [] 27 | deleted_keys = [] 28 | for i in xrange(20, 30): 29 | key = 'test key: %s' % i 30 | keys.append(key) 31 | client.setex(key, 100, i) 32 | for i in xrange(20, 25): 33 | key = 'test key: %s' % i 34 | deleted_keys.append(key) 35 | client.delete(key) 36 | for i, key in enumerate(keys[5:], 25): 37 | assert client.get(key) == str(i) 38 | for key in deleted_keys: 39 | assert client.get(key) is None 40 | assert client.mget(keys) == [None] * 5 + map(str, range(25, 30)) 41 | 42 | keys = [] 43 | mapping = {} 44 | deleted_keys = [] 45 | for i in xrange(30, 40): 46 | key = 'test key: %s' % i 47 | mapping[key] = i 48 | keys.append(key) 49 | assert client.msetex(mapping, 100) 50 | for i, key in enumerate(keys, 30): 51 | assert client.get(key) == str(i) 52 | assert client.mget(keys) == map(str, range(30, 40)) 53 | for i in xrange(30, 35): 54 | key = 'test key: %s' % i 55 | deleted_keys.append(key) 56 | assert client.mdelete(*deleted_keys) 57 | for i, key in enumerate(keys[5:], 35): 58 | assert client.get(key) == str(i) 59 | for key in deleted_keys: 60 | assert client.get(key) is None 61 | assert client.mget(keys) == [None] * 5 + map(str, range(35, 40)) 62 | -------------------------------------------------------------------------------- /tests/test_redis_cluster.py: -------------------------------------------------------------------------------- 1 | from rc.redis_cluster import RedisCluster, HostConfig 2 | 3 | 4 | def test_cluster_constructor(): 5 | cluster = RedisCluster({ 6 | 0: {'password': 'pass', 'ssl': True}, 7 | 1: {'host': '127.0.0.1', 'port': 10000, 'db': 1}, 8 | 2: {'unix_socket_path': '/tmp/redis_socket'}, 9 | }) 10 | 11 | assert cluster.hosts[0].host_name == 0 12 | assert cluster.hosts[0].host == 'localhost' 13 | assert cluster.hosts[0].port == 6379 14 | assert cluster.hosts[0].unix_socket_path is None 15 | assert cluster.hosts[0].db == 0 16 | assert cluster.hosts[0].password == 'pass' 17 | assert cluster.hosts[0].ssl is True 18 | assert cluster.hosts[0].ssl_options is None 19 | 20 | assert cluster.hosts[1].host_name == 1 21 | assert cluster.hosts[1].host == '127.0.0.1' 22 | assert cluster.hosts[1].port == 10000 23 | assert cluster.hosts[1].unix_socket_path is None 24 | assert cluster.hosts[1].db == 1 25 | assert cluster.hosts[1].password is None 26 | assert cluster.hosts[1].ssl is False 27 | assert cluster.hosts[1].ssl_options is None 28 | 29 | assert cluster.hosts[2].host_name == 2 30 | assert cluster.hosts[2].host == 'localhost' 31 | assert cluster.hosts[2].port == 6379 32 | assert cluster.hosts[2].unix_socket_path == '/tmp/redis_socket' 33 | assert cluster.hosts[2].db == 0 34 | assert cluster.hosts[2].password is None 35 | assert cluster.hosts[2].ssl is False 36 | assert cluster.hosts[2].ssl_options is None 37 | 38 | 39 | def test_host_config(): 40 | host_config_01 = HostConfig('01') 41 | host_config_02 = HostConfig('02', password='pass', ssl=False) 42 | assert repr(host_config_01) == repr(host_config_02) 43 | 44 | 45 | def test_cluster_pools(): 46 | cluster = RedisCluster({ 47 | 0: {'password': 'pass', 'ssl': True}, 48 | 1: {'unix_socket_path': '/tmp/redis_socket'}, 49 | }) 50 | pool_for_0 = cluster.get_pool_of_host(0) 51 | pool_for_1 = cluster.get_pool_of_host(1) 52 | pool_for_0_again = cluster.get_pool_of_host(0) 53 | assert pool_for_0 is pool_for_0_again 54 | assert pool_for_0 is not pool_for_1 55 | -------------------------------------------------------------------------------- /tests/test_redis_router.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from rc.redis_cluster import RedisCluster 4 | 5 | 6 | def test_redis_router_basics(): 7 | cluster = RedisCluster({ 8 | 0: {}, 9 | 1: {}, 10 | 2: {}, 11 | }) 12 | router = cluster.router 13 | 14 | assert router.get_key_for_command('GET', 'c') == 'c' 15 | assert router.get_key_for_command('SET', 'g') == 'g' 16 | with pytest.raises(RuntimeError): 17 | router.get_key_for_command('MGET', ['a', 'b', 'c']) 18 | 19 | assert router.get_host_for_key('c') == 0 20 | assert router.get_host_for_key('g') == 1 21 | assert router.get_host_for_key('a') == 2 22 | 23 | assert router.get_host_for_command('GET', 'c') == 0 24 | assert router.get_host_for_command('GET', 'g') == 1 25 | assert router.get_host_for_command('SET', 'a') == 2 26 | -------------------------------------------------------------------------------- /tests/test_serializer.py: -------------------------------------------------------------------------------- 1 | from rc.serializer import PickleSerializer, JSONSerializer 2 | 3 | 4 | def test_pickle_serializer(): 5 | serializer = PickleSerializer() 6 | obj = 1 7 | string = serializer.dumps(obj) 8 | assert obj == serializer.loads(string) 9 | obj = 'test' 10 | string = serializer.dumps(obj) 11 | assert obj == serializer.loads(string) 12 | obj = {'key': 'value'} 13 | string = serializer.dumps(obj) 14 | assert obj == serializer.loads(string) 15 | assert serializer.loads(None) is None 16 | 17 | 18 | def test_json_serializer(): 19 | serializer = JSONSerializer() 20 | obj = 1 21 | string = serializer.dumps(obj) 22 | assert obj == serializer.loads(string) 23 | obj = 'test' 24 | string = serializer.dumps(obj) 25 | assert obj == serializer.loads(string) 26 | obj = {'key': 'value'} 27 | string = serializer.dumps(obj) 28 | assert obj == serializer.loads(string) 29 | assert serializer.loads(None) is None 30 | -------------------------------------------------------------------------------- /tests/test_utils.py: -------------------------------------------------------------------------------- 1 | from rc.utils import generate_key_for_cached_func 2 | 3 | 4 | def test_generate_key(): 5 | def func(): 6 | pass 7 | cache_key = generate_key_for_cached_func(None, func, 'foo') 8 | assert cache_key == u'test_utils func foo' 9 | cache_key = generate_key_for_cached_func('prefix', func, 'foo') 10 | assert cache_key == u'prefix test_utils func foo' 11 | cache_key = generate_key_for_cached_func(None, func, 'foo', k='v') 12 | assert cache_key == u'test_utils func foo k=v' 13 | cache_key = generate_key_for_cached_func(None, func, 14 | 'foo', k='v', k2='v2') 15 | assert cache_key == u'test_utils func foo k=v k2=v2' 16 | 17 | def method(self): 18 | pass 19 | cache_key = generate_key_for_cached_func(None, method, 'foo') 20 | assert cache_key == u'test_utils method foo' 21 | cache_key = generate_key_for_cached_func(None, method, None, 'foo') 22 | assert cache_key == u'test_utils method None foo' 23 | 24 | def method(cls): 25 | pass 26 | cache_key = generate_key_for_cached_func(None, method, 'foo') 27 | assert cache_key == u'test_utils method foo' 28 | --------------------------------------------------------------------------------