├── .github └── workflows │ └── runtests.yml ├── .gitignore ├── CHANGELOG ├── LICENSE ├── MANIFEST.in ├── README.rst ├── beaker ├── __init__.py ├── _compat.py ├── cache.py ├── container.py ├── converters.py ├── cookie.py ├── crypto │ ├── __init__.py │ ├── jcecrypto.py │ ├── noencryption.py │ ├── nsscrypto.py │ ├── pbkdf2.py │ ├── pyca_cryptography.py │ ├── pycrypto.py │ └── util.py ├── docs │ ├── Makefile │ ├── caching.rst │ ├── changes.rst │ ├── conf.py │ ├── configuration.rst │ ├── glossary.rst │ ├── index.rst │ ├── modules │ │ ├── cache.rst │ │ ├── container.rst │ │ ├── database.rst │ │ ├── google.rst │ │ ├── memcached.rst │ │ ├── middleware.rst │ │ ├── mongodb.rst │ │ ├── pbkdf2.rst │ │ ├── redis.rst │ │ ├── rediscluster.rst │ │ ├── session.rst │ │ ├── sqla.rst │ │ ├── synchronization.rst │ │ └── util.rst │ └── sessions.rst ├── exceptions.py ├── ext │ ├── __init__.py │ ├── database.py │ ├── google.py │ ├── memcached.py │ ├── mongodb.py │ ├── redisclusternm.py │ ├── redisnm.py │ └── sqla.py ├── middleware.py ├── session.py ├── synchronization.py └── util.py ├── setup.cfg ├── setup.py └── tests ├── __init__.py ├── annotated_functions.py ├── test_cache.py ├── test_cache_decorator.py ├── test_cachemanager.py ├── test_container.py ├── test_converters.py ├── test_cookie_domain_only.py ├── test_cookie_expires.py ├── test_cookie_only.py ├── test_database.py ├── test_domain_setting.py ├── test_increment.py ├── test_managers ├── __init__.py ├── base.py ├── test_ext_mongodb.py ├── test_ext_redis.py └── test_ext_rediscluster.py ├── test_memcached.py ├── test_namespacing.py ├── test_namespacing_files ├── __init__.py ├── namespace_get.py └── namespace_go.py ├── test_pbkdf2.py ├── test_session.py ├── test_sqla.py ├── test_syncdict.py ├── test_synchronizer.py └── test_unicode_cache_keys.py /.github/workflows/runtests.yml: -------------------------------------------------------------------------------- 1 | name: Run Tests 2 | on: [push, pull_request] 3 | jobs: 4 | build: 5 | name: Run tests 6 | strategy: 7 | matrix: 8 | os: [ubuntu-latest] 9 | python-version: ["3.8", "3.9", "3.10", "3.11", "3.12", "3.13"] 10 | runs-on: ${{ matrix.os }} 11 | steps: 12 | - uses: actions/checkout@v2 13 | - name: Install locales 14 | run: sudo apt-get install -y locales language-pack-it 15 | - name: Set up Python ${{ matrix.python-version }} 16 | uses: actions/setup-python@v2 17 | with: 18 | python-version: ${{ matrix.python-version }} 19 | - name: Install dependencies 20 | run: | 21 | python -m pip install --upgrade pip 22 | pip install -U --upgrade-strategy=eager --pre -e .[testsuite] 23 | - name: Start memcached 24 | uses: niden/actions-memcached@v7 25 | - name: Start Redis 26 | uses: supercharge/redis-github-action@1.4.0 27 | - name: Start MongoDB 28 | uses: supercharge/mongodb-github-action@1.8.0 29 | - uses: vishnudxb/redis-cluster@1.0.9 30 | with: 31 | master1-port: 5000 32 | master2-port: 5001 33 | master3-port: 5002 34 | slave1-port: 5003 35 | slave2-port: 5004 36 | slave3-port: 5005 37 | sleep-duration: 5 38 | - name: Test with pytest 39 | run: | 40 | pytest -vv 41 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.egg 2 | *.egg-info 3 | *.pyc 4 | *$py.class 5 | *.pt.py 6 | *.txt.py 7 | *~ 8 | .coverage 9 | .tox/ 10 | nosetests.xml 11 | build/ 12 | dist/ 13 | bin/ 14 | lib/ 15 | include/ 16 | .idea/ 17 | distribute-*.tar.gz 18 | bookenv/ 19 | jyenv/ 20 | pypyenv/ 21 | env*/ 22 | tests/test.db 23 | /.eggs/ 24 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2006, 2007 Ben Bangert, Mike Bayer, Philip Jenvey 2 | and contributors. 3 | All rights reserved. 4 | 5 | Redistribution and use in source and binary forms, with or without 6 | modification, are permitted provided that the following conditions 7 | are met: 8 | 1. Redistributions of source code must retain the above copyright 9 | notice, this list of conditions and the following disclaimer. 10 | 2. Redistributions in binary form must reproduce the above copyright 11 | notice, this list of conditions and the following disclaimer in the 12 | documentation and/or other materials provided with the distribution. 13 | 3. The name of the author or contributors may not be used to endorse or 14 | promote products derived from this software without specific prior 15 | written permission. 16 | 17 | THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND 18 | ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 19 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE 20 | ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE 21 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 22 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS 23 | OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) 24 | HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT 25 | LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY 26 | OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF 27 | SUCH DAMAGE. 28 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | recursive-include tests *.py 2 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | ========================= 2 | Cache and Session Library 3 | ========================= 4 | 5 | About 6 | ===== 7 | 8 | Beaker is a web session and general caching library that includes WSGI 9 | middleware for use in web applications. 10 | 11 | As a general caching library, Beaker can handle storing for various times 12 | any Python object that can be pickled with optional back-ends on a 13 | fine-grained basis. 14 | 15 | Beaker was built largely on the code from MyghtyUtils, then refactored and 16 | extended with database support. 17 | 18 | Beaker includes Cache and Session WSGI middleware to ease integration with 19 | WSGI capable frameworks, and is automatically used by `Pylons 20 | `_ and 21 | `TurboGears `_. 22 | 23 | 24 | Features 25 | ======== 26 | 27 | * Fast, robust performance 28 | * Multiple reader/single writer lock system to avoid duplicate simultaneous 29 | cache creation 30 | * Cache back-ends include dbm, file, memory, memcached, Redis, MongoDB, and 31 | database (Using SQLAlchemy for multiple-db vendor support) 32 | * Signed cookies to prevent session hijacking/spoofing 33 | * Cookie-only sessions to remove the need for a db or file backend (ideal 34 | for clustered systems) 35 | * Extensible Container object to support new back-ends 36 | * Caches can be divided into namespaces (to represent templates, objects, 37 | etc.) then keyed for different copies 38 | * Create functions for automatic call-backs to create new cache copies after 39 | expiration 40 | * Fine-grained toggling of back-ends, keys, and expiration per Cache object 41 | 42 | 43 | Documentation 44 | ============= 45 | 46 | Documentation can be found on the `Official Beaker Docs site 47 | `_. 48 | 49 | 50 | Source 51 | ====== 52 | 53 | The latest developer version is available in a `GitHub repository 54 | `_. 55 | 56 | Contributing 57 | ============ 58 | 59 | Bugs can be filed on GitHub, **should be accompanied by a test case** to 60 | retain current code coverage, and should be in a pull request when ready to be 61 | accepted into the beaker code-base. 62 | -------------------------------------------------------------------------------- /beaker/__init__.py: -------------------------------------------------------------------------------- 1 | __version__ = '1.13.0' 2 | -------------------------------------------------------------------------------- /beaker/_compat.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | import sys 3 | 4 | # True if we are running on Python 2. 5 | PY2 = sys.version_info[0] == 2 6 | PYVER = sys.version_info[:2] 7 | JYTHON = sys.platform.startswith('java') 8 | 9 | if PY2 and not JYTHON: # pragma: no cover 10 | import cPickle as pickle 11 | else: # pragma: no cover 12 | import pickle 13 | 14 | 15 | if not PY2: # pragma: no cover 16 | xrange_ = range 17 | NoneType = type(None) 18 | 19 | string_type = str 20 | unicode_text = str 21 | byte_string = bytes 22 | 23 | from urllib.parse import urlencode as url_encode 24 | from urllib.parse import quote as url_quote 25 | from urllib.parse import unquote as url_unquote 26 | from urllib.parse import urlparse as url_parse 27 | from urllib.request import url2pathname 28 | import http.cookies as http_cookies 29 | from base64 import b64decode as _b64decode, b64encode as _b64encode 30 | 31 | try: 32 | import dbm.gnu as anydbm 33 | except ImportError: 34 | import dbm.dumb as anydbm 35 | 36 | def b64decode(b): 37 | return _b64decode(b.encode('ascii')) 38 | 39 | def b64encode(s): 40 | return _b64encode(s).decode('ascii') 41 | 42 | def u_(s): 43 | return str(s) 44 | 45 | def bytes_(s): 46 | if isinstance(s, byte_string): 47 | return s 48 | return str(s).encode('ascii', 'strict') 49 | 50 | def dictkeyslist(d): 51 | return list(d.keys()) 52 | 53 | else: 54 | xrange_ = xrange 55 | from types import NoneType 56 | 57 | string_type = basestring 58 | unicode_text = unicode 59 | byte_string = str 60 | 61 | from urllib import urlencode as url_encode 62 | from urllib import quote as url_quote 63 | from urllib import unquote as url_unquote 64 | from urlparse import urlparse as url_parse 65 | from urllib import url2pathname 66 | import Cookie as http_cookies 67 | from base64 import b64decode, b64encode 68 | import anydbm 69 | 70 | def u_(s): 71 | if isinstance(s, unicode_text): 72 | return s 73 | 74 | if not isinstance(s, byte_string): 75 | s = str(s) 76 | return unicode(s, 'utf-8') 77 | 78 | def bytes_(s): 79 | if isinstance(s, byte_string): 80 | return s 81 | return str(s) 82 | 83 | def dictkeyslist(d): 84 | return d.keys() 85 | 86 | 87 | def im_func(f): 88 | if not PY2: # pragma: no cover 89 | return getattr(f, '__func__', None) 90 | else: 91 | return getattr(f, 'im_func', None) 92 | 93 | 94 | def default_im_func(f): 95 | if not PY2: # pragma: no cover 96 | return getattr(f, '__func__', f) 97 | else: 98 | return getattr(f, 'im_func', f) 99 | 100 | 101 | def im_self(f): 102 | if not PY2: # pragma: no cover 103 | return getattr(f, '__self__', None) 104 | else: 105 | return getattr(f, 'im_self', None) 106 | 107 | 108 | def im_class(f): 109 | if not PY2: # pragma: no cover 110 | self = im_self(f) 111 | if self is not None: 112 | return self.__class__ 113 | else: 114 | return None 115 | else: 116 | return getattr(f, 'im_class', None) 117 | 118 | 119 | def add_metaclass(metaclass): 120 | """Class decorator for creating a class with a metaclass.""" 121 | def wrapper(cls): 122 | orig_vars = cls.__dict__.copy() 123 | slots = orig_vars.get('__slots__') 124 | if slots is not None: 125 | if isinstance(slots, str): 126 | slots = [slots] 127 | for slots_var in slots: 128 | orig_vars.pop(slots_var) 129 | orig_vars.pop('__dict__', None) 130 | orig_vars.pop('__weakref__', None) 131 | return metaclass(cls.__name__, cls.__bases__, orig_vars) 132 | return wrapper 133 | 134 | 135 | if not PY2: # pragma: no cover 136 | import builtins 137 | exec_ = getattr(builtins, "exec") 138 | 139 | def reraise(tp, value, tb=None): 140 | if value.__traceback__ is not tb: 141 | raise value.with_traceback(tb) 142 | raise value 143 | else: # pragma: no cover 144 | def exec_(code, globs=None, locs=None): 145 | """Execute code in a namespace.""" 146 | if globs is None: 147 | frame = sys._getframe(1) 148 | globs = frame.f_globals 149 | if locs is None: 150 | locs = frame.f_locals 151 | del frame 152 | elif locs is None: 153 | locs = globs 154 | exec("""exec code in globs, locs""") 155 | 156 | exec_("""def reraise(tp, value, tb=None): 157 | raise tp, value, tb 158 | """) 159 | 160 | 161 | try: 162 | from inspect import signature as func_signature 163 | except ImportError: 164 | from funcsigs import signature as func_signature 165 | 166 | 167 | def bindfuncargs(arginfo, args, kwargs): 168 | boundargs = arginfo.bind(*args, **kwargs) 169 | return boundargs.args, boundargs.kwargs 170 | -------------------------------------------------------------------------------- /beaker/converters.py: -------------------------------------------------------------------------------- 1 | from beaker._compat import string_type 2 | 3 | # (c) 2005 Ian Bicking and contributors; written for Paste (http://pythonpaste.org) 4 | # Licensed under the MIT license: http://www.opensource.org/licenses/mit-license.php 5 | def asbool(obj): 6 | if isinstance(obj, string_type): 7 | obj = obj.strip().lower() 8 | if obj in ['true', 'yes', 'on', 'y', 't', '1']: 9 | return True 10 | elif obj in ['false', 'no', 'off', 'n', 'f', '0']: 11 | return False 12 | else: 13 | raise ValueError( 14 | "String is not true/false: %r" % obj) 15 | return bool(obj) 16 | 17 | 18 | def aslist(obj, sep=None, strip=True): 19 | if isinstance(obj, string_type): 20 | lst = obj.split(sep) 21 | if strip: 22 | lst = [v.strip() for v in lst] 23 | return lst 24 | elif isinstance(obj, (list, tuple)): 25 | return obj 26 | elif obj is None: 27 | return [] 28 | else: 29 | return [obj] 30 | -------------------------------------------------------------------------------- /beaker/cookie.py: -------------------------------------------------------------------------------- 1 | import sys 2 | from ._compat import http_cookies 3 | 4 | # Some versions of Python 2.7 and later won't need this encoding bug fix: 5 | _cookie_encodes_correctly = http_cookies.SimpleCookie().value_encode(';') == (';', '"\\073"') 6 | 7 | # Cookie pickling bug is fixed in Python 2.7.9 and Python 3.4.3+ 8 | # http://bugs.python.org/issue22775 9 | cookie_pickles_properly = ( 10 | (sys.version_info[:2] == (2, 7) and sys.version_info >= (2, 7, 9)) or 11 | sys.version_info >= (3, 4, 3) 12 | ) 13 | 14 | # Add support for the SameSite attribute (obsolete when PY37 is unsupported). 15 | http_cookies.Morsel._reserved.setdefault('samesite', 'SameSite') 16 | 17 | 18 | # Adapted from Django.http.cookies and always enabled the bad_cookies 19 | # behaviour to cope with any invalid cookie key while keeping around 20 | # the session. 21 | class SimpleCookie(http_cookies.SimpleCookie): 22 | if not cookie_pickles_properly: 23 | def __setitem__(self, key, value): 24 | # Apply the fix from http://bugs.python.org/issue22775 where 25 | # it's not fixed in Python itself 26 | if isinstance(value, http_cookies.Morsel): 27 | # allow assignment of constructed Morsels (e.g. for pickling) 28 | dict.__setitem__(self, key, value) 29 | else: 30 | super(SimpleCookie, self).__setitem__(key, value) 31 | 32 | if not _cookie_encodes_correctly: 33 | def value_encode(self, val): 34 | # Some browsers do not support quoted-string from RFC 2109, 35 | # including some versions of Safari and Internet Explorer. 36 | # These browsers split on ';', and some versions of Safari 37 | # are known to split on ', '. Therefore, we encode ';' and ',' 38 | 39 | # SimpleCookie already does the hard work of encoding and decoding. 40 | # It uses octal sequences like '\\012' for newline etc. 41 | # and non-ASCII chars. We just make use of this mechanism, to 42 | # avoid introducing two encoding schemes which would be confusing 43 | # and especially awkward for javascript. 44 | 45 | # NB, contrary to Python docs, value_encode returns a tuple containing 46 | # (real val, encoded_val) 47 | val, encoded = super(SimpleCookie, self).value_encode(val) 48 | 49 | encoded = encoded.replace(";", "\\073").replace(",", "\\054") 50 | # If encoded now contains any quoted chars, we need double quotes 51 | # around the whole string. 52 | if "\\" in encoded and not encoded.startswith('"'): 53 | encoded = '"' + encoded + '"' 54 | 55 | return val, encoded 56 | 57 | def load(self, rawdata): 58 | self.bad_cookies = set() 59 | super(SimpleCookie, self).load(rawdata) 60 | for key in self.bad_cookies: 61 | del self[key] 62 | 63 | # override private __set() method: 64 | # (needed for using our Morsel, and for laxness with CookieError 65 | def _BaseCookie__set(self, key, real_value, coded_value): 66 | try: 67 | super(SimpleCookie, self)._BaseCookie__set(key, real_value, coded_value) 68 | except http_cookies.CookieError: 69 | if not hasattr(self, 'bad_cookies'): 70 | self.bad_cookies = set() 71 | self.bad_cookies.add(key) 72 | dict.__setitem__(self, key, http_cookies.Morsel()) 73 | -------------------------------------------------------------------------------- /beaker/crypto/__init__.py: -------------------------------------------------------------------------------- 1 | """Provide a crypto object, depending on the available modules. 2 | 3 | The object has this interface: 4 | 5 | aesEncrypt(DATA, KEY) 6 | Encrypt the DATA with key KEY. 7 | 8 | aesDecrypt(DATA, KEY): 9 | Decrypt the DATA with key KEY. 10 | 11 | has_aes 12 | True if the encryption provides AES encryption. 13 | 14 | getKeyLength() 15 | Return the maximum size for keys for this crypto object, in bytes. 16 | 17 | """ 18 | 19 | from .._compat import JYTHON 20 | 21 | 22 | from beaker.crypto.pbkdf2 import pbkdf2 23 | from beaker.crypto.util import hmac, sha1, hmac_sha1, md5 24 | from beaker import util 25 | from beaker.exceptions import InvalidCryptoBackendError 26 | 27 | keyLength = None 28 | DEFAULT_NONCE_BITS = 128 29 | 30 | CRYPTO_MODULES = {} 31 | 32 | 33 | def load_default_module(): 34 | """Load the default crypto module and return it. 35 | 36 | Note: if no crypto module is available, return a dummy module 37 | which does not encrypt at all. 38 | 39 | """ 40 | if JYTHON: 41 | try: 42 | from beaker.crypto import jcecrypto 43 | return jcecrypto 44 | except ImportError: 45 | pass 46 | else: 47 | try: 48 | from beaker.crypto import nsscrypto 49 | return nsscrypto 50 | except ImportError: 51 | try: 52 | from beaker.crypto import pycrypto 53 | return pycrypto 54 | except ImportError: 55 | pass 56 | from beaker.crypto import noencryption 57 | return noencryption 58 | 59 | 60 | def register_crypto_module(name, mod): 61 | """ 62 | Register the given module under the name given. 63 | """ 64 | CRYPTO_MODULES[name] = mod 65 | 66 | 67 | def get_crypto_module(name): 68 | """ 69 | Get the active crypto module for this name 70 | """ 71 | if name not in CRYPTO_MODULES: 72 | if name == 'default': 73 | register_crypto_module('default', load_default_module()) 74 | elif name == 'nss': 75 | from beaker.crypto import nsscrypto 76 | register_crypto_module(name, nsscrypto) 77 | elif name == 'pycrypto': 78 | from beaker.crypto import pycrypto 79 | register_crypto_module(name, pycrypto) 80 | elif name == 'cryptography': 81 | from beaker.crypto import pyca_cryptography 82 | register_crypto_module(name, pyca_cryptography) 83 | else: 84 | raise InvalidCryptoBackendError( 85 | "No crypto backend with name '%s' is registered." % name) 86 | 87 | return CRYPTO_MODULES[name] 88 | 89 | 90 | 91 | def generateCryptoKeys(master_key, salt, iterations, keylen): 92 | # NB: We XOR parts of the keystream into the randomly-generated parts, just 93 | # in case os.urandom() isn't as random as it should be. Note that if 94 | # os.urandom() returns truly random data, this will have no effect on the 95 | # overall security. 96 | return pbkdf2(master_key, salt, iterations=iterations, dklen=keylen) 97 | 98 | 99 | def get_nonce_size(number_of_bits): 100 | if number_of_bits % 8: 101 | raise ValueError('Nonce complexity currently supports multiples of 8') 102 | 103 | bytes = number_of_bits // 8 104 | b64bytes = ((4 * bytes // 3) + 3) & ~3 105 | return bytes, b64bytes 106 | -------------------------------------------------------------------------------- /beaker/crypto/jcecrypto.py: -------------------------------------------------------------------------------- 1 | """ 2 | Encryption module that uses the Java Cryptography Extensions (JCE). 3 | 4 | Note that in default installations of the Java Runtime Environment, the 5 | maximum key length is limited to 128 bits due to US export 6 | restrictions. This makes the generated keys incompatible with the ones 7 | generated by pycryptopp, which has no such restrictions. To fix this, 8 | download the "Unlimited Strength Jurisdiction Policy Files" from Sun, 9 | which will allow encryption using 256 bit AES keys. 10 | """ 11 | from warnings import warn 12 | 13 | from javax.crypto import Cipher 14 | from javax.crypto.spec import SecretKeySpec, IvParameterSpec 15 | 16 | import jarray 17 | 18 | # Initialization vector filled with zeros 19 | _iv = IvParameterSpec(jarray.zeros(16, 'b')) 20 | 21 | 22 | def aesEncrypt(data, key): 23 | cipher = Cipher.getInstance('AES/CTR/NoPadding') 24 | skeySpec = SecretKeySpec(key, 'AES') 25 | cipher.init(Cipher.ENCRYPT_MODE, skeySpec, _iv) 26 | return cipher.doFinal(data).tostring() 27 | 28 | # magic. 29 | aesDecrypt = aesEncrypt 30 | 31 | has_aes = True 32 | 33 | def getKeyLength(): 34 | maxlen = Cipher.getMaxAllowedKeyLength('AES/CTR/NoPadding') 35 | return min(maxlen, 256) / 8 36 | 37 | 38 | if getKeyLength() < 32: 39 | warn('Crypto implementation only supports key lengths up to %d bits. ' 40 | 'Generated session cookies may be incompatible with other ' 41 | 'environments' % (getKeyLength() * 8)) 42 | -------------------------------------------------------------------------------- /beaker/crypto/noencryption.py: -------------------------------------------------------------------------------- 1 | """Encryption module that does nothing""" 2 | 3 | def aesEncrypt(data, key): 4 | return data 5 | 6 | def aesDecrypt(data, key): 7 | return data 8 | 9 | has_aes = False 10 | 11 | def getKeyLength(): 12 | return 32 13 | -------------------------------------------------------------------------------- /beaker/crypto/nsscrypto.py: -------------------------------------------------------------------------------- 1 | """Encryption module that uses nsscrypto""" 2 | import nss.nss 3 | 4 | nss.nss.nss_init_nodb() 5 | 6 | # Apparently the rest of beaker doesn't care about the particular cipher, 7 | # mode and padding used. 8 | # NOTE: A constant IV!!! This is only secure if the KEY is never reused!!! 9 | _mech = nss.nss.CKM_AES_CBC_PAD 10 | _iv = '\0' * nss.nss.get_iv_length(_mech) 11 | 12 | def aesEncrypt(data, key): 13 | slot = nss.nss.get_best_slot(_mech) 14 | 15 | key_obj = nss.nss.import_sym_key(slot, _mech, nss.nss.PK11_OriginGenerated, 16 | nss.nss.CKA_ENCRYPT, nss.nss.SecItem(key)) 17 | 18 | param = nss.nss.param_from_iv(_mech, nss.nss.SecItem(_iv)) 19 | ctx = nss.nss.create_context_by_sym_key(_mech, nss.nss.CKA_ENCRYPT, key_obj, 20 | param) 21 | l1 = ctx.cipher_op(data) 22 | # Yes, DIGEST. This needs fixing in NSS, but apparently nobody (including 23 | # me :( ) cares enough. 24 | l2 = ctx.digest_final() 25 | 26 | return l1 + l2 27 | 28 | def aesDecrypt(data, key): 29 | slot = nss.nss.get_best_slot(_mech) 30 | 31 | key_obj = nss.nss.import_sym_key(slot, _mech, nss.nss.PK11_OriginGenerated, 32 | nss.nss.CKA_DECRYPT, nss.nss.SecItem(key)) 33 | 34 | param = nss.nss.param_from_iv(_mech, nss.nss.SecItem(_iv)) 35 | ctx = nss.nss.create_context_by_sym_key(_mech, nss.nss.CKA_DECRYPT, key_obj, 36 | param) 37 | l1 = ctx.cipher_op(data) 38 | # Yes, DIGEST. This needs fixing in NSS, but apparently nobody (including 39 | # me :( ) cares enough. 40 | l2 = ctx.digest_final() 41 | 42 | return l1 + l2 43 | 44 | has_aes = True 45 | 46 | def getKeyLength(): 47 | return 32 48 | -------------------------------------------------------------------------------- /beaker/crypto/pbkdf2.py: -------------------------------------------------------------------------------- 1 | """ 2 | PBKDF2 Implementation adapted from django.utils.crypto. 3 | 4 | This is used to generate the encryption key for enciphered sessions. 5 | """ 6 | from beaker._compat import bytes_, xrange_ 7 | 8 | import hmac 9 | import struct 10 | import hashlib 11 | import binascii 12 | 13 | 14 | def _bin_to_long(x): 15 | """Convert a binary string into a long integer""" 16 | return int(binascii.hexlify(x), 16) 17 | 18 | 19 | def _long_to_bin(x, hex_format_string): 20 | """ 21 | Convert a long integer into a binary string. 22 | hex_format_string is like "%020x" for padding 10 characters. 23 | """ 24 | return binascii.unhexlify((hex_format_string % x).encode('ascii')) 25 | 26 | 27 | if hasattr(hashlib, "pbkdf2_hmac"): 28 | def pbkdf2(password, salt, iterations, dklen=0, digest=None): 29 | """ 30 | Implements PBKDF2 using the stdlib. This is used in Python 2.7.8+ and 3.4+. 31 | 32 | HMAC+SHA256 is used as the default pseudo random function. 33 | 34 | As of 2014, 100,000 iterations was the recommended default which took 35 | 100ms on a 2.7Ghz Intel i7 with an optimized implementation. This is 36 | probably the bare minimum for security given 1000 iterations was 37 | recommended in 2001. 38 | """ 39 | if digest is None: 40 | digest = hashlib.sha1 41 | if not dklen: 42 | dklen = None 43 | password = bytes_(password) 44 | salt = bytes_(salt) 45 | return hashlib.pbkdf2_hmac( 46 | digest().name, password, salt, iterations, dklen) 47 | else: 48 | def pbkdf2(password, salt, iterations, dklen=0, digest=None): 49 | """ 50 | Implements PBKDF2 as defined in RFC 2898, section 5.2 51 | 52 | HMAC+SHA256 is used as the default pseudo random function. 53 | 54 | As of 2014, 100,000 iterations was the recommended default which took 55 | 100ms on a 2.7Ghz Intel i7 with an optimized implementation. This is 56 | probably the bare minimum for security given 1000 iterations was 57 | recommended in 2001. This code is very well optimized for CPython and 58 | is about five times slower than OpenSSL's implementation. 59 | """ 60 | assert iterations > 0 61 | if not digest: 62 | digest = hashlib.sha1 63 | password = bytes_(password) 64 | salt = bytes_(salt) 65 | hlen = digest().digest_size 66 | if not dklen: 67 | dklen = hlen 68 | if dklen > (2 ** 32 - 1) * hlen: 69 | raise OverflowError('dklen too big') 70 | l = -(-dklen // hlen) 71 | r = dklen - (l - 1) * hlen 72 | 73 | hex_format_string = "%%0%ix" % (hlen * 2) 74 | 75 | inner, outer = digest(), digest() 76 | if len(password) > inner.block_size: 77 | password = digest(password).digest() 78 | password += b'\x00' * (inner.block_size - len(password)) 79 | inner.update(password.translate(hmac.trans_36)) 80 | outer.update(password.translate(hmac.trans_5C)) 81 | 82 | def F(i): 83 | u = salt + struct.pack(b'>I', i) 84 | result = 0 85 | for j in xrange_(int(iterations)): 86 | dig1, dig2 = inner.copy(), outer.copy() 87 | dig1.update(u) 88 | dig2.update(dig1.digest()) 89 | u = dig2.digest() 90 | result ^= _bin_to_long(u) 91 | return _long_to_bin(result, hex_format_string) 92 | 93 | T = [F(x) for x in xrange_(1, l)] 94 | return b''.join(T) + F(l)[:r] 95 | -------------------------------------------------------------------------------- /beaker/crypto/pyca_cryptography.py: -------------------------------------------------------------------------------- 1 | """Encryption module that uses pyca/cryptography""" 2 | 3 | import os 4 | import json 5 | 6 | from cryptography.hazmat.backends import default_backend 7 | from cryptography.hazmat.primitives.ciphers import ( 8 | Cipher, algorithms, modes 9 | ) 10 | 11 | 12 | def aesEncrypt(data, key): 13 | # Generate a random 96-bit IV. 14 | iv = os.urandom(12) 15 | 16 | # Construct an AES-GCM Cipher object with the given key and a 17 | # randomly generated IV. 18 | encryptor = Cipher( 19 | algorithms.AES(key), 20 | modes.GCM(iv), 21 | backend=default_backend() 22 | ).encryptor() 23 | 24 | # Encrypt the plaintext and get the associated ciphertext. 25 | # GCM does not require padding. 26 | ciphertext = encryptor.update(data) + encryptor.finalize() 27 | 28 | return iv + encryptor.tag + ciphertext 29 | 30 | 31 | def aesDecrypt(data, key): 32 | iv = data[:12] 33 | tag = data[12:28] 34 | ciphertext = data[28:] 35 | 36 | # Construct a Cipher object, with the key, iv, and additionally the 37 | # GCM tag used for authenticating the message. 38 | decryptor = Cipher( 39 | algorithms.AES(key), 40 | modes.GCM(iv, tag), 41 | backend=default_backend() 42 | ).decryptor() 43 | 44 | # Decryption gets us the authenticated plaintext. 45 | # If the tag does not match an InvalidTag exception will be raised. 46 | return decryptor.update(ciphertext) + decryptor.finalize() 47 | 48 | 49 | has_aes = True 50 | 51 | def getKeyLength(): 52 | return 32 53 | -------------------------------------------------------------------------------- /beaker/crypto/pycrypto.py: -------------------------------------------------------------------------------- 1 | """Encryption module that uses pycryptopp or pycrypto""" 2 | try: 3 | # Pycryptopp is preferred over Crypto because Crypto has had 4 | # various periods of not being maintained, and pycryptopp uses 5 | # the Crypto++ library which is generally considered the 'gold standard' 6 | # of crypto implementations 7 | from pycryptopp.cipher import aes 8 | 9 | def aesEncrypt(data, key): 10 | cipher = aes.AES(key) 11 | return cipher.process(data) 12 | 13 | # magic. 14 | aesDecrypt = aesEncrypt 15 | 16 | except ImportError: 17 | from Crypto.Cipher import AES 18 | from Crypto.Util import Counter 19 | 20 | def aesEncrypt(data, key): 21 | cipher = AES.new(key, AES.MODE_CTR, 22 | counter=Counter.new(128, initial_value=0)) 23 | 24 | return cipher.encrypt(data) 25 | 26 | def aesDecrypt(data, key): 27 | cipher = AES.new(key, AES.MODE_CTR, 28 | counter=Counter.new(128, initial_value=0)) 29 | return cipher.decrypt(data) 30 | 31 | has_aes = True 32 | 33 | def getKeyLength(): 34 | return 32 35 | -------------------------------------------------------------------------------- /beaker/crypto/util.py: -------------------------------------------------------------------------------- 1 | from hashlib import md5 2 | 3 | try: 4 | # Use PyCrypto (if available) 5 | from Crypto.Hash import HMAC as hmac, SHA as hmac_sha1 6 | sha1 = hmac_sha1.new 7 | 8 | except ImportError: 9 | 10 | # PyCrypto not available. Use the Python standard library. 11 | import hmac 12 | 13 | # NOTE: We have to use the callable with hashlib (hashlib.sha1), 14 | # otherwise hmac only accepts the sha module object itself 15 | from hashlib import sha1 16 | hmac_sha1 = sha1 -------------------------------------------------------------------------------- /beaker/docs/Makefile: -------------------------------------------------------------------------------- 1 | # Makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | PAPER = 8 | 9 | # Internal variables. 10 | PAPEROPT_a4 = -D latex_paper_size=a4 11 | PAPEROPT_letter = -D latex_paper_size=letter 12 | ALLSPHINXOPTS = -d build/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 13 | 14 | .PHONY: help clean html web pickle htmlhelp latex changes linkcheck 15 | 16 | help: 17 | @echo "Please use \`make ' where is one of" 18 | @echo " html to make standalone HTML files" 19 | @echo " pickle to make pickle files" 20 | @echo " json to make JSON files" 21 | @echo " htmlhelp to make HTML files and a HTML help project" 22 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" 23 | @echo " changes to make an overview over all changed/added/deprecated items" 24 | @echo " linkcheck to check all external links for integrity" 25 | 26 | clean: 27 | -rm -rf build/* 28 | 29 | html: 30 | mkdir -p build/html build/doctrees 31 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) build/html 32 | @echo 33 | @echo "Build finished. The HTML pages are in build/html." 34 | 35 | pickle: 36 | mkdir -p build/pickle build/doctrees 37 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) build/pickle 38 | @echo 39 | @echo "Build finished; now you can process the pickle files." 40 | 41 | web: pickle 42 | 43 | json: 44 | mkdir -p build/json build/doctrees 45 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) build/json 46 | @echo 47 | @echo "Build finished; now you can process the JSON files." 48 | 49 | htmlhelp: 50 | mkdir -p build/htmlhelp build/doctrees 51 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) build/htmlhelp 52 | @echo 53 | @echo "Build finished; now you can run HTML Help Workshop with the" \ 54 | ".hhp project file in build/htmlhelp." 55 | 56 | latex: 57 | mkdir -p build/latex build/doctrees 58 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) build/latex 59 | @echo 60 | @echo "Build finished; the LaTeX files are in build/latex." 61 | @echo "Run \`make all-pdf' or \`make all-ps' in that directory to" \ 62 | "run these through (pdf)latex." 63 | 64 | changes: 65 | mkdir -p build/changes build/doctrees 66 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) build/changes 67 | @echo 68 | @echo "The overview file is in build/changes." 69 | 70 | linkcheck: 71 | mkdir -p build/linkcheck build/doctrees 72 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) build/linkcheck 73 | @echo 74 | @echo "Link check complete; look for any errors in the above output " \ 75 | "or in build/linkcheck/output.txt." 76 | -------------------------------------------------------------------------------- /beaker/docs/caching.rst: -------------------------------------------------------------------------------- 1 | .. _caching: 2 | 3 | ======= 4 | Caching 5 | ======= 6 | 7 | About 8 | ===== 9 | 10 | Beaker's caching system was originally based off the Perl Cache::Cache module, 11 | which was ported for use in `Myghty`_. Beaker was then extracted from this 12 | code, and has been substantially rewritten and modernized. 13 | 14 | Several concepts still exist from this origin though. Beaker's caching (and 15 | its sessions, behind the scenes) utilize the concept of 16 | :term:`NamespaceManager`, and :term:`Container` objects to handle storing 17 | cached data. 18 | 19 | Each back-end utilizes a customized version of each of these objects to handle 20 | storing data appropriately depending on the type of the back-end. 21 | 22 | The :class:`~beaker.cache.CacheManager` is responsible for getting the 23 | appropriate NamespaceManager, which then stores the cached values. Each 24 | namespace corresponds to a single ``thing`` that should be cached. Usually 25 | a single ``thing`` to be cached might vary slightly depending on parameters, 26 | for example a template might need several different copies of itself stored 27 | depending on whether a user is logged in or not. Each one of these copies 28 | is then ``keyed`` under the NamespaceManager and stored in a Container. 29 | 30 | There are three schemes for using Beaker's caching, the first and more 31 | traditional style is the programmatic API. This exposes the namespace's 32 | and retrieves a :class:`~beaker.cache.Cache` object that handles storing 33 | keyed values in a NamespaceManager with Container objects. 34 | 35 | The more elegant system, introduced in Beaker 1.3, is to use the 36 | :ref:`cache decorators `, these also support the 37 | use of :term:`Cache Regions`. 38 | 39 | Introduced in Beaker 1.5 is a more flexible :func:`~beaker.cache.cache_region` 40 | decorator capable of decorating functions for use with Beaker's 41 | :ref:`caching_with_regions` **before** Beaker has been configured. This makes 42 | it possible to easily use Beaker's region caching decorator on functions in 43 | the module level. 44 | 45 | 46 | Creating the CacheManager Instance 47 | ================================== 48 | 49 | Before using Beaker's caching, an instance of the 50 | :class:`~beaker.cache.CacheManager` class should be created. All of the 51 | examples below assume that it has already been created. 52 | 53 | Creating the cache instance:: 54 | 55 | from beaker.cache import CacheManager 56 | from beaker.util import parse_cache_config_options 57 | 58 | cache_opts = { 59 | 'cache.type': 'file', 60 | 'cache.data_dir': '/tmp/cache/data', 61 | 'cache.lock_dir': '/tmp/cache/lock' 62 | } 63 | 64 | cache = CacheManager(**parse_cache_config_options(cache_opts)) 65 | 66 | Additional configuration options are documented in the :ref:`Configuration` 67 | section of the Beaker docs. 68 | 69 | 70 | Programmatic API 71 | ================ 72 | 73 | .. _programmatic: 74 | 75 | To store data for a cache value, first, a NamespaceManager has to be 76 | retrieved to manage the keys for a ``thing`` to be cached:: 77 | 78 | # Assuming that cache is an already created CacheManager instance 79 | tmpl_cache = cache.get_cache('mytemplate.html', type='dbm', expire=3600) 80 | 81 | .. note:: 82 | In addition to the defaults supplied to the 83 | :class:`~beaker.cache.CacheManager` instance, any of the Cache options 84 | can be changed on a per-namespace basis, as this example demonstrates 85 | by setting a ``type``, and ``expire`` option. 86 | 87 | Individual values should be stored using a creation function, which will 88 | be called anytime the cache has expired or a new copy needs to be made. The 89 | creation function must not accept any arguments as it won't be called with 90 | any. Options affecting the created value can be passed in by using closure 91 | scope on the creation function:: 92 | 93 | search_param = 'gophers' 94 | 95 | def get_results(): 96 | # do something to retrieve data 97 | data = get_data(search_param) 98 | return data 99 | 100 | # Cache this function, based on the search_param, using the tmpl_cache 101 | # instance from the prior example 102 | results = tmpl_cache.get(key=search_param, createfunc=get_results) 103 | 104 | Invalidating 105 | ------------ 106 | 107 | All of the values for a particular namespace can be removed by calling the 108 | :meth:`~beaker.cache.Cache.clear` method:: 109 | 110 | tmpl_cache.clear() 111 | 112 | Note that this only clears the key's in the namespace that this particular 113 | Cache instance is aware of. Therefore, it is recommended to manually clear out 114 | specific keys in a cache namespace that should be removed:: 115 | 116 | tmpl_cache.remove_value(key=search_param) 117 | 118 | 119 | Decorator API 120 | ============= 121 | 122 | .. _decorator_api: 123 | 124 | When using the decorator API, a namespace does not need to be specified and 125 | will instead be created for you with the name of the module + the name of the 126 | function that will have its output cached. 127 | 128 | Since it's possible that multiple functions in the same module might have the 129 | same name, additional arguments can be provided to the decorators that will be 130 | used in the namespace to prevent multiple functions from caching their values 131 | in the same location. 132 | 133 | For example:: 134 | 135 | # Assuming that cache is an already created CacheManager instance 136 | @cache.cache('my_search_func', expire=3600) 137 | def get_results(search_param): 138 | # do something to retrieve data 139 | data = get_data(search_param) 140 | return data 141 | 142 | results = get_results('gophers') 143 | 144 | The non-keyword arguments to the :meth:`~beaker.cache.CacheManager.cache` 145 | method are the additional ones used to ensure this function's cache results 146 | won't clash with another function in this module called ``get_results``. 147 | 148 | The cache expire argument is specified as a keyword argument. Other valid 149 | arguments to the :meth:`~beaker.cache.CacheManager.get_cache` method such 150 | as ``type`` can also be passed in. 151 | 152 | When using the decorator, the function to cache can have arguments, which will 153 | be used as the key was in the :ref:`Programmatic API ` for 154 | the data generated. 155 | 156 | .. warning:: 157 | These arguments can **not** be keyword arguments. 158 | 159 | Invalidating 160 | ------------ 161 | 162 | Since the :meth:`~beaker.cache.CacheManager.cache` decorator hides the 163 | namespace used, manually removing the key requires the use of the 164 | :meth:`~beaker.cache.CacheManager.invalidate` function. To invalidate 165 | the 'gophers' result that the prior example referred to:: 166 | 167 | cache.invalidate(get_results, 'my_search_func', 'gophers') 168 | 169 | If however, a type was specified for the cached function, the type must 170 | also be given to the :meth:`~beaker.cache.CacheManager.invalidate` 171 | function so that it can remove the value from the appropriate back-end. 172 | 173 | Example:: 174 | 175 | # Assuming that cache is an already created CacheManager instance 176 | @cache.cache('my_search_func', type="file", expire=3600) 177 | def get_results(search_param): 178 | # do something to retrieve data 179 | data = get_data(search_param) 180 | return data 181 | 182 | cache.invalidate(get_results, 'my_search_func', 'gophers', type="file") 183 | 184 | .. note:: 185 | Both the arguments used to specify the additional namespace info to the 186 | cache decorator **and** the arguments sent to the function need to be 187 | given to the :meth:`~beaker.cache.CacheManager.region_invalidate` 188 | function so that it can properly locate the namespace and cache key 189 | to remove. 190 | 191 | 192 | .. _caching_with_regions: 193 | 194 | Cache Regions 195 | ============= 196 | 197 | Rather than having to specify the expiration, or toggle the type used for 198 | caching different functions, commonly used cache parameters can be defined 199 | as :term:`Cache Regions`. These user-defined regions may be used 200 | with the :meth:`~beaker.cache.CacheManager.region` decorator rather than 201 | passing the configuration. 202 | 203 | This can be useful if there are a few common cache schemes used by an 204 | application that should be setup in a single place then used as appropriate 205 | throughout the application. 206 | 207 | Setting up cache regions is documented in the 208 | :ref:`cache region options ` section in 209 | :ref:`configuration`. 210 | 211 | Assuming a ``long_term`` and ``short_term`` region were setup, the 212 | :meth:`~beaker.cache.CacheManager.region` decorator can be used:: 213 | 214 | @cache.region('short_term', 'my_search_func') 215 | def get_results(search_param): 216 | # do something to retrieve data 217 | data = get_data(search_param) 218 | return data 219 | 220 | results = get_results('gophers') 221 | 222 | Or using the :func:`~beaker.cache.cache_region` decorator:: 223 | 224 | @cache_region('short_term', 'my_search_func') 225 | def get_results(search_param): 226 | # do something to retrieve data 227 | data = get_data(search_param) 228 | return data 229 | 230 | results = get_results('gophers') 231 | 232 | The only difference with the :func:`~beaker.cache.cache_region` decorator is 233 | that the cache does not need to be configured when it is used. This allows one 234 | to decorate functions in a module before the Beaker cache is configured. 235 | 236 | Invalidating 237 | ------------ 238 | 239 | Since the :meth:`~beaker.cache.CacheManager.region` decorator hides the 240 | namespace used, manually removing the key requires the use of the 241 | :meth:`~beaker.cache.CacheManager.region_invalidate` function. To invalidate 242 | the 'gophers' result that the prior example referred to:: 243 | 244 | cache.region_invalidate(get_results, None, 'my_search_func', 'gophers') 245 | 246 | Or when using the :func:`~beaker.cache.cache_region` decorator, the 247 | :func:`beaker.cache.region_invalidate` function should be used:: 248 | 249 | region_invalidate(get_results, None, 'my_search_func', 'gophers') 250 | 251 | .. note:: 252 | Both the arguments used to specify the additional namespace info to the 253 | cache decorator **and** the arguments sent to the function need to be 254 | given to the :meth:`~beaker.cache.CacheManager.region_invalidate` 255 | function so that it can properly locate the namespace and cache key 256 | to remove. 257 | 258 | 259 | .. _Myghty: http://www.myghty.org/ 260 | -------------------------------------------------------------------------------- /beaker/docs/changes.rst: -------------------------------------------------------------------------------- 1 | :tocdepth: 2 2 | 3 | .. _changes: 4 | 5 | Changes in Beaker 6 | ***************** 7 | 8 | .. include:: ../../CHANGELOG 9 | -------------------------------------------------------------------------------- /beaker/docs/conf.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # Beaker documentation build configuration file, created by 4 | # sphinx-quickstart on Fri Sep 19 15:12:15 2008. 5 | # 6 | # This file is execfile()d with the current directory set to its containing dir. 7 | # 8 | # The contents of this file are pickled, so don't put values in the namespace 9 | # that aren't pickleable (module imports are okay, they're removed automatically). 10 | # 11 | # All configuration values have a default; values that are commented out 12 | # serve to show the default. 13 | 14 | import sys 15 | import os 16 | 17 | # If your extensions are in another directory, add it here. If the directory 18 | # is relative to the documentation root, use os.path.abspath to make it 19 | # absolute, like shown here. 20 | sys.path.insert(0, os.path.abspath('../..')) 21 | 22 | # General configuration 23 | # --------------------- 24 | 25 | # Add any Sphinx extension module names here, as strings. They can be extensions 26 | # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. 27 | extensions = ['sphinx.ext.autodoc'] 28 | 29 | # Add any paths that contain templates here, relative to this directory. 30 | # templates_path = ['_templates'] 31 | 32 | # The suffix of source filenames. 33 | source_suffix = '.rst' 34 | 35 | # The master toctree document. 36 | master_doc = 'index' 37 | 38 | # General information about the project. 39 | project = u'Beaker' 40 | copyright = u'2008-2016, Ben Bangert, Mike Bayer' 41 | 42 | # The version info for the project you're documenting, acts as replacement for 43 | # |version| and |release|, also used in various other places throughout the 44 | # built documents. 45 | # 46 | # The short X.Y version. 47 | version = '1.9' 48 | # The full version, including alpha/beta/rc tags. 49 | release = '1.9.0' 50 | 51 | # The language for content autogenerated by Sphinx. Refer to documentation 52 | # for a list of supported languages. 53 | #language = None 54 | 55 | # There are two options for replacing |today|: either, you set today to some 56 | # non-false value, then it is used: 57 | #today = '' 58 | # Else, today_fmt is used as the format for a strftime call. 59 | #today_fmt = '%B %d, %Y' 60 | 61 | # List of documents that shouldn't be included in the build. 62 | #unused_docs = [] 63 | 64 | # List of directories, relative to source directory, that shouldn't be searched 65 | # for source files. 66 | exclude_trees = [] 67 | 68 | # The reST default role (used for this markup: `text`) to use for all documents. 69 | #default_role = None 70 | 71 | # If true, '()' will be appended to :func: etc. cross-reference text. 72 | #add_function_parentheses = True 73 | 74 | # If true, the current module name will be prepended to all description 75 | # unit titles (such as .. function::). 76 | #add_module_names = True 77 | 78 | # If true, sectionauthor and moduleauthor directives will be shown in the 79 | # output. They are ignored by default. 80 | show_authors = True 81 | 82 | # The name of the Pygments (syntax highlighting) style to use. 83 | pygments_style = 'pastie' 84 | 85 | 86 | # Options for HTML output 87 | # ----------------------- 88 | 89 | # The style sheet to use for HTML and HTML Help pages. A file of that name 90 | # must exist either in Sphinx' static/ path, or in one of the custom paths 91 | # given in html_static_path. 92 | # html_style = 'default.css' 93 | 94 | # The name for this set of Sphinx documents. If None, it defaults to 95 | # " v documentation". 96 | #html_title = None 97 | 98 | # A shorter title for the navigation bar. Default is the same as html_title. 99 | #html_short_title = None 100 | 101 | # The name of an image file (within the static path) to place at the top of 102 | # the sidebar. 103 | #html_logo = None 104 | 105 | # The name of an image file (within the static path) to use as favicon of the 106 | # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 107 | # pixels large. 108 | #html_favicon = None 109 | 110 | # Add any paths that contain custom static files (such as style sheets) here, 111 | # relative to this directory. They are copied after the builtin static files, 112 | # so a file named "default.css" will overwrite the builtin "default.css". 113 | html_static_path = ['_static'] 114 | 115 | # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, 116 | # using the given strftime format. 117 | #html_last_updated_fmt = '%b %d, %Y' 118 | 119 | # If true, SmartyPants will be used to convert quotes and dashes to 120 | # typographically correct entities. 121 | #html_use_smartypants = True 122 | 123 | # html_index = 'contents.html' 124 | 125 | # Custom sidebar templates, maps document names to template names. 126 | # html_sidebars = {'index': 'indexsidebar.html'} 127 | 128 | # Additional templates that should be rendered to pages, maps page names to 129 | # template names. 130 | # html_additional_pages = {'index': 'index.html'} 131 | 132 | html_theme_options = { 133 | } 134 | 135 | # If false, no module index is generated. 136 | #html_use_modindex = True 137 | 138 | # If false, no index is generated. 139 | #html_use_index = True 140 | 141 | # If true, the index is split into individual pages for each letter. 142 | #html_split_index = False 143 | 144 | # If true, the reST sources are included in the HTML build as _sources/. 145 | #html_copy_source = True 146 | 147 | # If true, an OpenSearch description file will be output, and all pages will 148 | # contain a tag referring to it. The value of this option must be the 149 | # base URL from which the finished HTML is served. 150 | html_use_opensearch = 'https://beaker.readthedocs.io/' 151 | 152 | # If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml"). 153 | #html_file_suffix = '' 154 | 155 | # Output file base name for HTML help builder. 156 | htmlhelp_basename = 'Beakerdoc' 157 | 158 | 159 | # Options for LaTeX output 160 | # ------------------------ 161 | 162 | # The paper size ('letter' or 'a4'). 163 | #latex_paper_size = 'letter' 164 | 165 | # The font size ('10pt', '11pt' or '12pt'). 166 | #latex_font_size = '10pt' 167 | 168 | # Grouping the document tree into LaTeX files. List of tuples 169 | # (source start file, target name, title, author, document class [howto/manual]). 170 | latex_documents = [ 171 | ('contents', 'Beaker.tex', u'Beaker Documentation', 172 | u'Ben Bangert, Mike Bayer', 'manual'), 173 | ] 174 | 175 | # The name of an image file (relative to this directory) to place at the top of 176 | # the title page. 177 | #latex_logo = None 178 | 179 | # For "manual" documents, if this is true, then toplevel headings are parts, 180 | # not chapters. 181 | #latex_use_parts = False 182 | 183 | # Additional stuff for the LaTeX preamble. 184 | latex_preamble = ''' 185 | \usepackage{palatino} 186 | \definecolor{TitleColor}{rgb}{0.7,0,0} 187 | \definecolor{InnerLinkColor}{rgb}{0.7,0,0} 188 | \definecolor{OuterLinkColor}{rgb}{0.8,0,0} 189 | \definecolor{VerbatimColor}{rgb}{0.985,0.985,0.985} 190 | \definecolor{VerbatimBorderColor}{rgb}{0.8,0.8,0.8} 191 | ''' 192 | 193 | # Documents to append as an appendix to all manuals. 194 | #latex_appendices = [] 195 | 196 | # If false, no module index is generated. 197 | latex_use_modindex = False 198 | 199 | # Added to handle docs in middleware.py 200 | autoclass_content = "both" 201 | -------------------------------------------------------------------------------- /beaker/docs/glossary.rst: -------------------------------------------------------------------------------- 1 | .. _glossary: 2 | 3 | Glossary 4 | ======== 5 | 6 | .. glossary:: 7 | 8 | Cache Regions 9 | Bundles of configuration options keyed to a user-defined variable 10 | for use with the :meth:`beaker.cache.CacheManager.region` 11 | decorator. 12 | 13 | Container 14 | A Beaker container is a storage object for a specific cache value 15 | and the key under the namespace it has been assigned. 16 | 17 | Dog-Pile Effect 18 | What occurs when a cached object expires, and multiple requests to 19 | fetch it are made at the same time. In systems that don't lock or 20 | use a scheme to prevent multiple instances from simultaneously 21 | creating the same thing, every request will cause the system to 22 | create a new value to be cached. 23 | 24 | Beaker alleviates this with file locking to ensure that only a single 25 | copy is re-created while other requests for the same object are 26 | instead given the old value until the new one is ready. 27 | 28 | NamespaceManager 29 | A Beaker namespace manager, is best thought of as a collection of 30 | containers with various keys. For example, a single template to be 31 | cached might vary slightly depending on search term, or user login, so 32 | the template would be keyed based on the variable that changes its 33 | output. 34 | 35 | The namespace would be the template name, while each container would 36 | correspond to one of the values and the key it responds to. 37 | -------------------------------------------------------------------------------- /beaker/docs/index.rst: -------------------------------------------------------------------------------- 1 | Beaker Documentation 2 | ==================== 3 | 4 | Beaker is a library for caching and sessions for use with web applications and 5 | stand-alone Python scripts and applications. It comes with WSGI middleware for 6 | easy drop-in use with WSGI based web applications, and caching decorators for 7 | ease of use with any Python based application. 8 | 9 | * **Lazy-Loading Sessions**: No performance hit for having sessions active in a request unless they're actually used 10 | * **Performance**: Utilizes a multiple-reader / single-writer locking system to prevent the Dog Pile effect when caching. 11 | * **Multiple Back-ends**: File-based, DBM files, memcached, memory, Redis, MongoDB, and database (via SQLAlchemy) back-ends available for sessions and caching 12 | * **Cookie-based Sessions**: SHA-1 signatures with optional AES encryption for client-side cookie-based session storage 13 | * **Flexible Caching**: Data can be cached per function to different back-ends, with different expirations, and different keys 14 | * **Extensible Back-ends**: Add more back-ends using setuptools entrypoints to support new back-ends. 15 | 16 | .. toctree:: 17 | :maxdepth: 2 18 | 19 | configuration 20 | sessions 21 | caching 22 | 23 | .. toctree:: 24 | :maxdepth: 1 25 | 26 | changes 27 | 28 | 29 | Indices and tables 30 | ================== 31 | 32 | * :ref:`genindex` 33 | * :ref:`modindex` 34 | * :ref:`search` 35 | * :ref:`glossary` 36 | 37 | Module Listing 38 | -------------- 39 | 40 | .. toctree:: 41 | :maxdepth: 2 42 | 43 | modules/cache 44 | modules/container 45 | modules/middleware 46 | modules/session 47 | modules/synchronization 48 | modules/util 49 | modules/database 50 | modules/memcached 51 | modules/mongodb 52 | modules/redis 53 | modules/google 54 | modules/sqla 55 | modules/pbkdf2 56 | -------------------------------------------------------------------------------- /beaker/docs/modules/cache.rst: -------------------------------------------------------------------------------- 1 | :mod:`beaker.cache` -- Cache module 2 | ================================================ 3 | 4 | .. automodule:: beaker.cache 5 | 6 | Module Contents 7 | --------------- 8 | 9 | .. autodata:: beaker.cache.cache_regions 10 | .. autofunction:: cache_region 11 | .. autofunction:: region_invalidate 12 | .. autoclass:: Cache 13 | :members: get, clear 14 | .. autoclass:: CacheManager 15 | :members: region, region_invalidate, cache, invalidate 16 | -------------------------------------------------------------------------------- /beaker/docs/modules/container.rst: -------------------------------------------------------------------------------- 1 | :mod:`beaker.container` -- Container and Namespace classes 2 | ========================================================== 3 | 4 | .. automodule:: beaker.container 5 | 6 | Module Contents 7 | --------------- 8 | 9 | .. autoclass:: DBMNamespaceManager 10 | :show-inheritance: 11 | .. autoclass:: FileNamespaceManager 12 | :show-inheritance: 13 | .. autoclass:: MemoryNamespaceManager 14 | :show-inheritance: 15 | .. autoclass:: NamespaceManager 16 | :members: 17 | .. autoclass:: OpenResourceNamespaceManager 18 | :show-inheritance: 19 | .. autoclass:: Value 20 | :members: 21 | :undoc-members: 22 | 23 | Deprecated Classes 24 | ------------------ 25 | .. autoclass:: Container 26 | .. autoclass:: ContainerMeta 27 | :show-inheritance: 28 | .. autoclass:: DBMContainer 29 | :show-inheritance: 30 | .. autoclass:: FileContainer 31 | :show-inheritance: 32 | .. autoclass:: MemoryContainer 33 | :show-inheritance: 34 | -------------------------------------------------------------------------------- /beaker/docs/modules/database.rst: -------------------------------------------------------------------------------- 1 | :mod:`beaker.ext.database` -- Database Container and NameSpace Manager classes 2 | ============================================================================== 3 | 4 | .. automodule:: beaker.ext.database 5 | 6 | Module Contents 7 | --------------- 8 | 9 | .. autoclass:: DatabaseContainer 10 | .. autoclass:: DatabaseNamespaceManager 11 | -------------------------------------------------------------------------------- /beaker/docs/modules/google.rst: -------------------------------------------------------------------------------- 1 | :mod:`beaker.ext.google` -- Google Container and NameSpace Manager classes 2 | ========================================================================== 3 | 4 | .. automodule:: beaker.ext.google 5 | 6 | Module Contents 7 | --------------- 8 | 9 | .. autoclass:: GoogleContainer 10 | .. autoclass:: GoogleNamespaceManager 11 | -------------------------------------------------------------------------------- /beaker/docs/modules/memcached.rst: -------------------------------------------------------------------------------- 1 | :mod:`beaker.ext.memcached` -- Memcached Container and NameSpace Manager classes 2 | ================================================================================ 3 | 4 | .. automodule:: beaker.ext.memcached 5 | 6 | Module Contents 7 | --------------- 8 | 9 | .. autoclass:: MemcachedContainer 10 | :show-inheritance: 11 | .. autoclass:: MemcachedNamespaceManager 12 | :show-inheritance: 13 | .. autoclass:: PyLibMCNamespaceManager 14 | :show-inheritance: 15 | -------------------------------------------------------------------------------- /beaker/docs/modules/middleware.rst: -------------------------------------------------------------------------------- 1 | :mod:`beaker.middleware` -- Middleware classes 2 | ============================================== 3 | 4 | .. automodule:: beaker.middleware 5 | 6 | Module Contents 7 | --------------- 8 | 9 | .. autoclass:: CacheMiddleware 10 | .. autoclass:: SessionMiddleware 11 | -------------------------------------------------------------------------------- /beaker/docs/modules/mongodb.rst: -------------------------------------------------------------------------------- 1 | :mod:`beaker.ext.mongodb` -- MongoDB NameSpace Manager and Synchronizer 2 | ============================================================================== 3 | 4 | .. automodule:: beaker.ext.mongodb 5 | 6 | Module Contents 7 | --------------- 8 | 9 | .. autoclass:: MongoNamespaceManager 10 | .. autoclass:: MongoSynchronizer 11 | -------------------------------------------------------------------------------- /beaker/docs/modules/pbkdf2.rst: -------------------------------------------------------------------------------- 1 | :mod:`beaker.crypto.pbkdf2` -- PKCS#5 v2.0 Password-Based Key Derivation classes 2 | ================================================================================ 3 | 4 | .. automodule:: beaker.crypto.pbkdf2 5 | 6 | Module Contents 7 | --------------- 8 | 9 | .. autofunction:: pbkdf2 10 | -------------------------------------------------------------------------------- /beaker/docs/modules/redis.rst: -------------------------------------------------------------------------------- 1 | :mod:`beaker.ext.redisnm` -- Redis NameSpace Manager and Synchronizer 2 | ============================================================================== 3 | 4 | .. automodule:: beaker.ext.redisnm 5 | 6 | Module Contents 7 | --------------- 8 | 9 | .. autoclass:: RedisNamespaceManager 10 | .. autoclass:: RedisSynchronizer 11 | -------------------------------------------------------------------------------- /beaker/docs/modules/rediscluster.rst: -------------------------------------------------------------------------------- 1 | :mod:`beaker.ext.redisclusternm` -- Redis cluster NameSpace Manager and Synchronizer 2 | ============================================================================== 3 | 4 | .. automodule:: beaker.ext.redisclusternm 5 | 6 | Module Contents 7 | --------------- 8 | 9 | .. autoclass:: RedisClusterNamespaceManager 10 | .. autoclass:: RedisClusterSynchronizer 11 | -------------------------------------------------------------------------------- /beaker/docs/modules/session.rst: -------------------------------------------------------------------------------- 1 | :mod:`beaker.session` -- Session classes 2 | ======================================== 3 | 4 | .. automodule:: beaker.session 5 | 6 | Module Contents 7 | --------------- 8 | 9 | .. autoclass:: CookieSession 10 | :members: save, expire, delete, invalidate 11 | .. autoclass:: Session 12 | :members: save, revert, lock, unlock, delete, invalidate 13 | .. autoclass:: SessionObject 14 | :members: persist, get_by_id, accessed 15 | .. autoclass:: SignedCookie 16 | .. autodata:: InvalidSignature 17 | -------------------------------------------------------------------------------- /beaker/docs/modules/sqla.rst: -------------------------------------------------------------------------------- 1 | :mod:`beaker.ext.sqla` -- SqlAlchemy Container and NameSpace Manager classes 2 | ============================================================================ 3 | 4 | .. automodule:: beaker.ext.sqla 5 | 6 | Module Contents 7 | --------------- 8 | 9 | .. autofunction:: make_cache_table 10 | .. autoclass:: SqlaContainer 11 | .. autoclass:: SqlaNamespaceManager 12 | -------------------------------------------------------------------------------- /beaker/docs/modules/synchronization.rst: -------------------------------------------------------------------------------- 1 | :mod:`beaker.synchronization` -- Synchronization classes 2 | ======================================================== 3 | 4 | .. automodule:: beaker.synchronization 5 | 6 | Module Contents 7 | --------------- 8 | 9 | .. autoclass:: ConditionSynchronizer 10 | .. autoclass:: FileSynchronizer 11 | .. autoclass:: NameLock 12 | .. autoclass:: null_synchronizer 13 | .. autoclass:: SynchronizerImpl 14 | :members: 15 | -------------------------------------------------------------------------------- /beaker/docs/modules/util.rst: -------------------------------------------------------------------------------- 1 | :mod:`beaker.util` -- Beaker Utilities 2 | ======================================================== 3 | 4 | .. automodule:: beaker.util 5 | 6 | Module Contents 7 | --------------- 8 | .. autofunction:: encoded_path 9 | .. autofunction:: func_namespace 10 | .. autoclass:: SyncDict 11 | .. autoclass:: ThreadLocal 12 | .. autofunction:: verify_directory 13 | .. autofunction:: parse_cache_config_options -------------------------------------------------------------------------------- /beaker/docs/sessions.rst: -------------------------------------------------------------------------------- 1 | .. _sessions: 2 | 3 | ======== 4 | Sessions 5 | ======== 6 | 7 | About 8 | ===== 9 | 10 | Sessions provide a place to persist data in web applications, Beaker's session 11 | system simplifies session implementation details by providing WSGI middleware 12 | that handles them. 13 | 14 | All cookies are signed with an HMAC signature to prevent tampering by the 15 | client. 16 | 17 | Lazy-Loading 18 | ------------ 19 | 20 | Only when a session object is actually accessed will the session be loaded 21 | from the file-system, preventing performance hits on pages that don't use 22 | the session. 23 | 24 | Using 25 | ===== 26 | 27 | The session object provided by Beaker's 28 | :class:`~beaker.middleware.SessionMiddleware` implements a dict-style interface 29 | with a few additional object methods. Once the SessionMiddleware is in place, 30 | a session object will be made available as ``beaker.session`` in the WSGI 31 | environ. 32 | 33 | When a session is created on the backend, a cookie is placed in the response to 34 | the client. 35 | 36 | Getting data out of the session:: 37 | 38 | myvar = session['somekey'] 39 | 40 | Testing for a value:: 41 | 42 | logged_in = 'user_id' in session 43 | 44 | Adding data to the session:: 45 | 46 | session['name'] = 'Fred Smith' 47 | 48 | Complete example using a basic WSGI app with sessions:: 49 | 50 | from beaker.middleware import SessionMiddleware 51 | 52 | def simple_app(environ, start_response): 53 | # Get the session object from the environ 54 | session = environ['beaker.session'] 55 | 56 | # Check to see if a value is in the session 57 | user = 'logged_in' in session 58 | 59 | # Set some other session variable 60 | session['user_id'] = 10 61 | 62 | start_response('200 OK', [('Content-type', 'text/plain')]) 63 | return ['User is logged in: %s' % user] 64 | 65 | # Configure the SessionMiddleware 66 | session_opts = { 67 | 'session.type': 'file', 68 | 'session.cookie_expires': True, 69 | } 70 | wsgi_app = SessionMiddleware(simple_app, session_opts) 71 | 72 | Now ``wsgi_app`` is a replacement of original application ``simple_app``. 73 | You should specify it as a request handler in your WSGI configuration file. 74 | 75 | .. note:: 76 | This example does **not** actually save the session for the next request. 77 | Adding the :meth:`~beaker.session.Session.save` call explained below is 78 | required, or having the session set to auto-save. 79 | 80 | .. _cookie_attributes: 81 | 82 | Session Attributes / Keys 83 | ------------------------- 84 | 85 | Sessions have several special attributes that can be used as needed by an 86 | application. 87 | 88 | * id - Unique 40 char SHA-generated session ID (by default this is uuid4). 89 | * last_accessed - The last time the session was accessed before the current 90 | access, if save_accessed_time is true; the last time it was modified if false; 91 | will be None if the session was just made 92 | 93 | There's several special session keys populated as well: 94 | 95 | * _accessed_time - When the session was loaded if save_accessed_time is true; 96 | when it was last written if false 97 | * _creation_time - When the session was created 98 | 99 | 100 | Saving 101 | ====== 102 | 103 | Sessions can be saved using the :meth:`~beaker.session.Session.save` method 104 | on the session object:: 105 | 106 | session.save() 107 | 108 | .. warning:: 109 | 110 | Beaker relies on Python's pickle module to pickle data objects for storage 111 | in the session. Objects that cannot be pickled should **not** be stored in 112 | the session. It's suggested to switch to **json** ``data_serializer`` to avoid 113 | possible security issues with pickle. 114 | 115 | This flags a session to be saved, and it will be stored on the chosen back-end 116 | at the end of the request. 117 | 118 | .. warning:: 119 | 120 | When using the ``memory`` backend, session will only be valid for the process 121 | that created it and will be lost when process is restarted. It is usually 122 | suggested to only use the ``memory`` backend for development and not for production. 123 | 124 | If it's necessary to immediately save the session to the back-end, the 125 | :meth:`~beaker.session.SessionObject.persist` method should be used:: 126 | 127 | session.persist() 128 | 129 | This is not usually the case however, as a session generally should not be 130 | saved should something catastrophic happen during a request. 131 | 132 | **Order Matters**: When using the Beaker middleware, you **must call save before 133 | the headers are sent to the client**. Since Beaker's middleware watches for when 134 | the ``start_response`` function is called to know that it should add its cookie 135 | header, the session must be saved before it is called. 136 | 137 | Keep in mind that Response objects in popular frameworks (WebOb, Werkzeug, 138 | etc.) call start_response immediately, so if you are using one of those 139 | objects to handle your Response, you must call .save() before the Response 140 | object is called:: 141 | 142 | # this would apply to WebOb and possibly others too 143 | from werkzeug.wrappers import Response 144 | 145 | # this will work 146 | def sessions_work(environ, start_response): 147 | environ['beaker.session']['count'] += 1 148 | resp = Response('hello') 149 | environ['beaker.session'].save() 150 | return resp(environ, start_response) 151 | 152 | # this will not work 153 | def sessions_broken(environ, start_response): 154 | environ['beaker.session']['count'] += 1 155 | resp = Response('hello') 156 | retval = resp(environ, start_response) 157 | environ['beaker.session'].save() 158 | return retval 159 | 160 | 161 | 162 | Auto-save 163 | --------- 164 | 165 | Saves can be done automatically by setting the ``auto`` configuration option 166 | for sessions. When set, calling the :meth:`~beaker.session.Session.save` method 167 | is no longer required, and the session will be saved automatically anytime it is 168 | accessed during a request. 169 | 170 | 171 | Deleting 172 | ======== 173 | 174 | Calling the :meth:`~beaker.session.Session.delete` method deletes the session 175 | from the back-end storage and sends an expiration on the cookie requesting the 176 | browser to clear it:: 177 | 178 | session.delete() 179 | 180 | This should be used at the end of a request when the session should be deleted 181 | and will not be used further in the request. 182 | 183 | If a session should be invalidated, and a new session created and used during 184 | the request, the :meth:`~beaker.session.Session.invalidate` method should be 185 | used:: 186 | 187 | session.invalidate() 188 | 189 | Removing Expired/Old Sessions 190 | ----------------------------- 191 | 192 | Beaker does **not** automatically delete expired or old cookies on any of its 193 | back-ends. This task is left up to the developer based on how sessions are 194 | being used, and on what back-end. 195 | 196 | The database backend records the last accessed time as a column in the database 197 | so a script could be run to delete session rows in the database that haven't 198 | been used in a long time. 199 | 200 | When using the file-based sessions, a script could run to remove files that 201 | haven't been touched in a long time, for example (in the session's data dir): 202 | 203 | .. code-block:: bash 204 | 205 | find . -type f -mtime +3 -print -exec rm {} \; 206 | 207 | 208 | Cookie Domain and Path 209 | ====================== 210 | 211 | In addition to setting a default cookie domain with the 212 | :ref:`cookie domain setting `, the cookie's domain and 213 | path can be set dynamically for a session with the domain and path properties. 214 | 215 | These settings will persist as long as the cookie exists, or until changed. 216 | 217 | Example:: 218 | 219 | # Setting the session's cookie domain and path 220 | session.domain = '.domain.com' 221 | session.path = '/admin' 222 | 223 | Cookie Security 224 | ====================== 225 | 226 | Beaker uses the defaults of setting cookie attributes `httponly` and `secure` 227 | to False. You may want to set those to True in production. `samesite` also setting 228 | with default value `Lax`, you can choice `Strict` for more protection. And the reasons for 229 | using these cookie attributes are explained in these Owasp guides - `HttpOnly`_ 230 | , `SecureFlag`_, `SameSite`_. 231 | 232 | Example:: 233 | 234 | # Best practice cookie flags for security 235 | session.httponly = True 236 | session.secure = True 237 | session.samesite = 'Lax' # or 'Strict' 238 | 239 | .. _SecureFlag: https://www.owasp.org/index.php/SecureFlag 240 | .. _HttpOnly: https://www.owasp.org/index.php/HttpOnly#Mitigating_the_Most_Common_XSS_attack_using_HttpOnly 241 | .. _SameSite: https://www.owasp.org/index.php/SameSite 242 | 243 | Cookie-Based 244 | ============ 245 | 246 | Session can be stored purely on the client-side using cookie-based sessions. 247 | This option can be turned on by setting the session type to ``cookie``. 248 | 249 | Using cookie-based session carries the limitation of how large a cookie can 250 | be (generally 4096 bytes). An exception will be thrown should a session get 251 | too large to fit in a cookie, so using cookie-based session should be done 252 | carefully and only small bits of data should be stored in them (the users login 253 | name, admin status, etc.). 254 | 255 | Large cookies can slow down page-loads as they increase latency to every 256 | page request since the cookie is sent for every request under that domain. 257 | Static content such as images and Javascript should be served off a domain 258 | that the cookie is not valid for to prevent this. 259 | 260 | Cookie-based sessions scale easily in a clustered environment as there's no 261 | need for a shared storage system when different servers handle the same 262 | session. 263 | 264 | .. _encryption: 265 | 266 | Encryption 267 | ---------- 268 | 269 | In the event that the cookie-based sessions should also be encrypted to 270 | prevent the user from being able to decode the data (in addition to not 271 | being able to tamper with it), Beaker can use 256-bit AES encryption to 272 | secure the contents of the cookie. 273 | 274 | Depending on the Python implementation used, Beaker may require an additional 275 | library to provide AES encryption. 276 | 277 | On CPython (the regular Python), one of the following libraries is required: 278 | 279 | * The `python-nss`_ library 280 | * The `pycryptopp`_ library 281 | * The `cryptography`_ library 282 | * The `PyCrypto`_ library 283 | 284 | On Jython, no additional packages are required, but at least on the Sun JRE, 285 | the size of the encryption key is by default limited to 128 bits, which causes 286 | generated sessions to be incompatible with those generated in CPython, and vice 287 | versa. To overcome this limitation, you need to install the unlimited strength 288 | jurisdiction policy files from Sun: 289 | 290 | * `Policy files for Java 5 `_ 291 | * `Policy files for Java 6 `_ 292 | 293 | .. _cryptography: https://pypi.python.org/pypi/cryptography/ 294 | .. _python-nss: https://pypi.python.org/pypi/python-nss/ 295 | .. _pycryptopp: https://pypi.python.org/pypi/pycryptopp/ 296 | .. _PyCrypto: https://pypi.python.org/pypi/pycrypto/ 297 | -------------------------------------------------------------------------------- /beaker/exceptions.py: -------------------------------------------------------------------------------- 1 | """Beaker exception classes""" 2 | 3 | 4 | class BeakerException(Exception): 5 | pass 6 | 7 | 8 | class BeakerWarning(RuntimeWarning): 9 | """Issued at runtime.""" 10 | 11 | 12 | class CreationAbortedError(Exception): 13 | """Deprecated.""" 14 | 15 | 16 | class InvalidCacheBackendError(BeakerException, ImportError): 17 | pass 18 | 19 | 20 | class MissingCacheParameter(BeakerException): 21 | pass 22 | 23 | 24 | class LockError(BeakerException): 25 | pass 26 | 27 | 28 | class InvalidCryptoBackendError(BeakerException): 29 | pass 30 | -------------------------------------------------------------------------------- /beaker/ext/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bbangert/beaker/913d195875899b31fdbf8b4fda64094870e2d3d6/beaker/ext/__init__.py -------------------------------------------------------------------------------- /beaker/ext/database.py: -------------------------------------------------------------------------------- 1 | from beaker._compat import pickle 2 | 3 | import logging 4 | import pickle 5 | from datetime import datetime 6 | 7 | from beaker.container import OpenResourceNamespaceManager, Container 8 | from beaker.exceptions import InvalidCacheBackendError, MissingCacheParameter 9 | from beaker.synchronization import file_synchronizer, null_synchronizer 10 | from beaker.util import verify_directory, SyncDict 11 | from beaker.ext.sqla import SqlaNamespaceManager 12 | 13 | log = logging.getLogger(__name__) 14 | 15 | sa = None 16 | types = None 17 | 18 | 19 | class DatabaseNamespaceManager(SqlaNamespaceManager): 20 | 21 | @classmethod 22 | def _init_dependencies(cls): 23 | SqlaNamespaceManager._init_dependencies() 24 | 25 | global sa, types 26 | if sa is not None: 27 | return 28 | # SqlaNamespaceManager will already error 29 | import sqlalchemy as sa 30 | from sqlalchemy import types 31 | 32 | def __init__(self, namespace, url=None, sa_opts=None, table_name='beaker_cache', 33 | data_dir=None, lock_dir=None, schema_name=None, **params): 34 | """Creates a database namespace manager 35 | 36 | ``url`` 37 | SQLAlchemy compliant db url 38 | ``sa_opts`` 39 | A dictionary of SQLAlchemy keyword options to initialize the engine 40 | with. 41 | ``table_name`` 42 | The table name to use in the database for the cache. 43 | ``schema_name`` 44 | The schema name to use in the database for the cache. 45 | """ 46 | OpenResourceNamespaceManager.__init__(self, namespace) 47 | 48 | if sa_opts is None: 49 | sa_opts = {} 50 | 51 | self.lock_dir = None 52 | 53 | if lock_dir: 54 | self.lock_dir = lock_dir 55 | elif data_dir: 56 | self.lock_dir = data_dir + "/container_db_lock" 57 | if self.lock_dir: 58 | verify_directory(self.lock_dir) 59 | 60 | # Check to see if the table's been created before 61 | sa_opts['sa.url'] = url = url or sa_opts['sa.url'] 62 | table_key = url + table_name 63 | 64 | def make_table(engine): 65 | meta = sa.MetaData() 66 | meta.bind = engine 67 | cache_table = sa.Table(table_name, meta, 68 | sa.Column('id', types.Integer, primary_key=True), 69 | sa.Column('namespace', types.String(255), nullable=False), 70 | sa.Column('accessed', types.DateTime, nullable=False), 71 | sa.Column('created', types.DateTime, nullable=False), 72 | sa.Column('data', types.PickleType, nullable=False), 73 | sa.UniqueConstraint('namespace'), 74 | schema=schema_name if schema_name else meta.schema) 75 | cache_table.create(bind=engine, checkfirst=True) 76 | return cache_table 77 | 78 | engine = self.__class__.binds.get(url, lambda: sa.engine_from_config(sa_opts, 'sa.')) 79 | table = self.__class__.tables.get(table_key, lambda: make_table(engine)) 80 | 81 | SqlaNamespaceManager.__init__(self, namespace, engine, table, 82 | data_dir=data_dir, lock_dir=lock_dir) 83 | 84 | 85 | class DatabaseContainer(Container): 86 | namespace_manager = DatabaseNamespaceManager 87 | -------------------------------------------------------------------------------- /beaker/ext/google.py: -------------------------------------------------------------------------------- 1 | from beaker._compat import pickle 2 | 3 | import logging 4 | from datetime import datetime 5 | 6 | from beaker.container import OpenResourceNamespaceManager, Container 7 | from beaker.exceptions import InvalidCacheBackendError 8 | from beaker.synchronization import null_synchronizer 9 | 10 | log = logging.getLogger(__name__) 11 | 12 | db = None 13 | 14 | 15 | class GoogleNamespaceManager(OpenResourceNamespaceManager): 16 | tables = {} 17 | 18 | @classmethod 19 | def _init_dependencies(cls): 20 | global db 21 | if db is not None: 22 | return 23 | try: 24 | db = __import__('google.appengine.ext.db').appengine.ext.db 25 | except ImportError: 26 | raise InvalidCacheBackendError("Datastore cache backend requires the " 27 | "'google.appengine.ext' library") 28 | 29 | def __init__(self, namespace, table_name='beaker_cache', **params): 30 | """Creates a datastore namespace manager""" 31 | OpenResourceNamespaceManager.__init__(self, namespace) 32 | 33 | def make_cache(): 34 | table_dict = dict(created=db.DateTimeProperty(), 35 | accessed=db.DateTimeProperty(), 36 | data=db.BlobProperty()) 37 | table = type(table_name, (db.Model,), table_dict) 38 | return table 39 | self.table_name = table_name 40 | self.cache = GoogleNamespaceManager.tables.setdefault(table_name, make_cache()) 41 | self.hash = {} 42 | self._is_new = False 43 | self.loaded = False 44 | self.log_debug = logging.DEBUG >= log.getEffectiveLevel() 45 | 46 | # Google wants namespaces to start with letters, change the namespace 47 | # to start with a letter 48 | self.namespace = 'p%s' % self.namespace 49 | 50 | def get_access_lock(self): 51 | return null_synchronizer() 52 | 53 | def get_creation_lock(self, key): 54 | # this is weird, should probably be present 55 | return null_synchronizer() 56 | 57 | def do_open(self, flags, replace): 58 | # If we already loaded the data, don't bother loading it again 59 | if self.loaded: 60 | self.flags = flags 61 | return 62 | 63 | item = self.cache.get_by_key_name(self.namespace) 64 | 65 | if not item: 66 | self._is_new = True 67 | self.hash = {} 68 | else: 69 | self._is_new = False 70 | try: 71 | self.hash = pickle.loads(str(item.data)) 72 | except (IOError, OSError, EOFError, pickle.PickleError): 73 | if self.log_debug: 74 | log.debug("Couln't load pickle data, creating new storage") 75 | self.hash = {} 76 | self._is_new = True 77 | self.flags = flags 78 | self.loaded = True 79 | 80 | def do_close(self): 81 | if self.flags is not None and (self.flags == 'c' or self.flags == 'w'): 82 | if self._is_new: 83 | item = self.cache(key_name=self.namespace) 84 | item.data = pickle.dumps(self.hash) 85 | item.created = datetime.now() 86 | item.accessed = datetime.now() 87 | item.put() 88 | self._is_new = False 89 | else: 90 | item = self.cache.get_by_key_name(self.namespace) 91 | item.data = pickle.dumps(self.hash) 92 | item.accessed = datetime.now() 93 | item.put() 94 | self.flags = None 95 | 96 | def do_remove(self): 97 | item = self.cache.get_by_key_name(self.namespace) 98 | item.delete() 99 | self.hash = {} 100 | 101 | # We can retain the fact that we did a load attempt, but since the 102 | # file is gone this will be a new namespace should it be saved. 103 | self._is_new = True 104 | 105 | def __getitem__(self, key): 106 | return self.hash[key] 107 | 108 | def __contains__(self, key): 109 | return key in self.hash 110 | 111 | def __setitem__(self, key, value): 112 | self.hash[key] = value 113 | 114 | def __delitem__(self, key): 115 | del self.hash[key] 116 | 117 | def keys(self): 118 | return self.hash.keys() 119 | 120 | 121 | class GoogleContainer(Container): 122 | namespace_class = GoogleNamespaceManager 123 | -------------------------------------------------------------------------------- /beaker/ext/memcached.py: -------------------------------------------------------------------------------- 1 | from .._compat import PY2 2 | 3 | from beaker.container import NamespaceManager, Container 4 | from beaker.crypto.util import sha1 5 | from beaker.exceptions import InvalidCacheBackendError, MissingCacheParameter 6 | from beaker.synchronization import file_synchronizer 7 | from beaker.util import verify_directory, SyncDict 8 | import warnings 9 | 10 | MAX_KEY_LENGTH = 250 11 | 12 | _client_libs = {} 13 | 14 | 15 | def _load_client(name='auto'): 16 | if name in _client_libs: 17 | return _client_libs[name] 18 | 19 | def _cmemcache(): 20 | global cmemcache 21 | import cmemcache 22 | warnings.warn("cmemcache is known to have serious " 23 | "concurrency issues; consider using 'memcache' ") 24 | return cmemcache 25 | 26 | def _memcache(): 27 | global memcache 28 | import memcache 29 | return memcache 30 | 31 | def _bmemcached(): 32 | global bmemcached 33 | import bmemcached 34 | return bmemcached 35 | 36 | def _auto(): 37 | for _client in (_cmemcache, _memcache, _bmemcached): 38 | try: 39 | return _client() 40 | except ImportError: 41 | pass 42 | else: 43 | raise InvalidCacheBackendError( 44 | "Memcached cache backend requires one memcache to be installed." 45 | ) 46 | 47 | clients = { 48 | 'cmemcache': _cmemcache, 49 | 'memcache': _memcache, 50 | 'bmemcached': _bmemcached, 51 | 'auto': _auto 52 | } 53 | _client_libs[name] = clib = clients[name]() 54 | return clib 55 | 56 | 57 | class MemcachedNamespaceManager(NamespaceManager): 58 | """Provides the :class:`.NamespaceManager` API over a memcache client library.""" 59 | 60 | clients = SyncDict() 61 | 62 | def __init__(self, namespace, url, 63 | memcache_module='auto', 64 | data_dir=None, lock_dir=None, 65 | **kw): 66 | NamespaceManager.__init__(self, namespace) 67 | 68 | _memcache_module = _load_client(memcache_module) 69 | 70 | if not url: 71 | raise MissingCacheParameter("url is required") 72 | 73 | self.lock_dir = None 74 | 75 | if lock_dir: 76 | self.lock_dir = lock_dir 77 | elif data_dir: 78 | self.lock_dir = data_dir + "/container_mcd_lock" 79 | if self.lock_dir: 80 | verify_directory(self.lock_dir) 81 | 82 | self.mc = MemcachedNamespaceManager.clients.get( 83 | (memcache_module, url), 84 | _memcache_module.Client, 85 | url.split(';')) 86 | 87 | def get_creation_lock(self, key): 88 | return file_synchronizer( 89 | identifier="memcachedcontainer/funclock/%s/%s" % 90 | (self.namespace, key), lock_dir=self.lock_dir) 91 | 92 | def _format_key(self, key): 93 | if not isinstance(key, str): 94 | key = key.decode('ascii') 95 | formated_key = (self.namespace + '_' + key).replace(' ', '\302\267') 96 | if len(formated_key) > MAX_KEY_LENGTH: 97 | if not PY2: 98 | formated_key = formated_key.encode('utf-8') 99 | formated_key = sha1(formated_key).hexdigest() 100 | return formated_key 101 | 102 | def __getitem__(self, key): 103 | return self.mc.get(self._format_key(key)) 104 | 105 | def __contains__(self, key): 106 | value = self.mc.get(self._format_key(key)) 107 | return value is not None 108 | 109 | def has_key(self, key): 110 | return key in self 111 | 112 | def set_value(self, key, value, expiretime=None): 113 | if expiretime: 114 | self.mc.set(self._format_key(key), value, time=expiretime) 115 | else: 116 | self.mc.set(self._format_key(key), value) 117 | 118 | def __setitem__(self, key, value): 119 | self.set_value(key, value) 120 | 121 | def __delitem__(self, key): 122 | self.mc.delete(self._format_key(key)) 123 | 124 | def do_remove(self): 125 | self.mc.flush_all() 126 | 127 | def keys(self): 128 | raise NotImplementedError( 129 | "Memcache caching does not " 130 | "support iteration of all cache keys") 131 | 132 | 133 | class MemcachedContainer(Container): 134 | """Container class which invokes :class:`.MemcacheNamespaceManager`.""" 135 | namespace_class = MemcachedNamespaceManager 136 | -------------------------------------------------------------------------------- /beaker/ext/mongodb.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import os 3 | import threading 4 | import time 5 | import pickle 6 | 7 | try: 8 | import pymongo 9 | import pymongo.errors 10 | import bson 11 | except ImportError: 12 | pymongo = None 13 | bson = None 14 | 15 | from beaker.container import NamespaceManager 16 | from beaker.synchronization import SynchronizerImpl 17 | from beaker.util import SyncDict, machine_identifier 18 | from beaker.crypto.util import sha1 19 | from beaker._compat import string_type, PY2 20 | 21 | 22 | class MongoNamespaceManager(NamespaceManager): 23 | """Provides the :class:`.NamespaceManager` API over MongoDB. 24 | 25 | Provided ``url`` can be both a mongodb connection string or 26 | an already existing MongoClient instance. 27 | 28 | The data will be stored into ``beaker_cache`` collection of the 29 | *default database*, so make sure your connection string or 30 | MongoClient point to a default database. 31 | """ 32 | MAX_KEY_LENGTH = 1024 33 | 34 | clients = SyncDict() 35 | 36 | def __init__(self, namespace, url, **kw): 37 | super(MongoNamespaceManager, self).__init__(namespace) 38 | self.lock_dir = None # MongoDB uses mongo itself for locking. 39 | 40 | if pymongo is None: 41 | raise RuntimeError('pymongo3 is not available') 42 | 43 | if isinstance(url, string_type): 44 | self.client = MongoNamespaceManager.clients.get(url, pymongo.MongoClient, url) 45 | else: 46 | self.client = url 47 | self.db = self.client.get_default_database() 48 | 49 | def _format_key(self, key): 50 | if not isinstance(key, str): 51 | key = key.decode('ascii') 52 | if len(key) > (self.MAX_KEY_LENGTH - len(self.namespace) - 1): 53 | if not PY2: 54 | key = key.encode('utf-8') 55 | key = sha1(key).hexdigest() 56 | return '%s:%s' % (self.namespace, key) 57 | 58 | def get_creation_lock(self, key): 59 | return MongoSynchronizer(self._format_key(key), self.client) 60 | 61 | def __getitem__(self, key): 62 | self._clear_expired() 63 | entry = self.db.backer_cache.find_one({'_id': self._format_key(key)}) 64 | if entry is None: 65 | raise KeyError(key) 66 | return pickle.loads(entry['value']) 67 | 68 | def __contains__(self, key): 69 | self._clear_expired() 70 | entry = self.db.backer_cache.find_one({'_id': self._format_key(key)}) 71 | return entry is not None 72 | 73 | def has_key(self, key): 74 | return key in self 75 | 76 | def set_value(self, key, value, expiretime=None): 77 | self._clear_expired() 78 | 79 | expiration = None 80 | if expiretime is not None: 81 | expiration = time.time() + expiretime 82 | 83 | value = pickle.dumps(value) 84 | self.db.backer_cache.update_one({'_id': self._format_key(key)}, 85 | {'$set': {'value': bson.Binary(value), 86 | 'expiration': expiration}}, 87 | upsert=True) 88 | 89 | def __setitem__(self, key, value): 90 | self.set_value(key, value) 91 | 92 | def __delitem__(self, key): 93 | self._clear_expired() 94 | self.db.backer_cache.delete_many({'_id': self._format_key(key)}) 95 | 96 | def do_remove(self): 97 | self.db.backer_cache.delete_many({'_id': {'$regex': '^%s' % self.namespace}}) 98 | 99 | def keys(self): 100 | return [e['key'].split(':', 1)[-1] for e in self.db.backer_cache.find_all( 101 | {'_id': {'$regex': '^%s' % self.namespace}} 102 | )] 103 | 104 | def _clear_expired(self): 105 | now = time.time() 106 | self.db.backer_cache.delete_many({'_id': {'$regex': '^%s' % self.namespace}, 107 | 'expiration': {'$ne': None, '$lte': now}}) 108 | 109 | 110 | class MongoSynchronizer(SynchronizerImpl): 111 | """Provides a Writer/Reader lock based on MongoDB. 112 | 113 | Provided ``url`` can be both a mongodb connection string or 114 | an already existing MongoClient instance. 115 | 116 | The data will be stored into ``beaker_locks`` collection of the 117 | *default database*, so make sure your connection string or 118 | MongoClient point to a default database. 119 | 120 | Locks are identified by local machine, PID and threadid, so 121 | are suitable for use in both local and distributed environments. 122 | """ 123 | # If a cache entry generation function can take a lot, 124 | # but 15 minutes is more than a reasonable time. 125 | LOCK_EXPIRATION = 900 126 | MACHINE_ID = machine_identifier() 127 | 128 | def __init__(self, identifier, url): 129 | super(MongoSynchronizer, self).__init__() 130 | self.identifier = identifier 131 | if isinstance(url, string_type): 132 | self.client = MongoNamespaceManager.clients.get(url, pymongo.MongoClient, url) 133 | else: 134 | self.client = url 135 | self.db = self.client.get_default_database() 136 | 137 | def _clear_expired_locks(self): 138 | now = datetime.datetime.utcnow() 139 | expired = now - datetime.timedelta(seconds=self.LOCK_EXPIRATION) 140 | self.db.beaker_locks.delete_many({'_id': self.identifier, 'timestamp': {'$lte': expired}}) 141 | return now 142 | 143 | def _get_owner_id(self): 144 | return '%s-%s-%s' % (self.MACHINE_ID, os.getpid(), threading.current_thread().ident) 145 | 146 | def do_release_read_lock(self): 147 | owner_id = self._get_owner_id() 148 | self.db.beaker_locks.update_one({'_id': self.identifier, 'readers': owner_id}, 149 | {'$pull': {'readers': owner_id}}) 150 | 151 | def do_acquire_read_lock(self, wait): 152 | now = self._clear_expired_locks() 153 | owner_id = self._get_owner_id() 154 | while True: 155 | try: 156 | self.db.beaker_locks.update_one({'_id': self.identifier, 'owner': None}, 157 | {'$set': {'timestamp': now}, 158 | '$push': {'readers': owner_id}}, 159 | upsert=True) 160 | return True 161 | except pymongo.errors.DuplicateKeyError: 162 | if not wait: 163 | return False 164 | time.sleep(0.2) 165 | 166 | def do_release_write_lock(self): 167 | self.db.beaker_locks.delete_one({'_id': self.identifier, 'owner': self._get_owner_id()}) 168 | 169 | def do_acquire_write_lock(self, wait): 170 | now = self._clear_expired_locks() 171 | owner_id = self._get_owner_id() 172 | while True: 173 | try: 174 | self.db.beaker_locks.update_one({'_id': self.identifier, 'owner': None, 175 | 'readers': []}, 176 | {'$set': {'owner': owner_id, 177 | 'timestamp': now}}, 178 | upsert=True) 179 | return True 180 | except pymongo.errors.DuplicateKeyError: 181 | if not wait: 182 | return False 183 | time.sleep(0.2) 184 | 185 | -------------------------------------------------------------------------------- /beaker/ext/redisclusternm.py: -------------------------------------------------------------------------------- 1 | import os 2 | import threading 3 | import time 4 | import pickle 5 | 6 | from beaker.container import NamespaceManager 7 | 8 | try: 9 | import redis 10 | except ImportError: 11 | redis = None 12 | 13 | from beaker.ext.redisnm import RedisNamespaceManager, RedisSynchronizer 14 | from beaker._compat import string_type 15 | 16 | 17 | class RedisClusterNamespaceManager(RedisNamespaceManager): 18 | """Provides the :class:`.NamespaceManager` API over Redis cluster. 19 | 20 | Provided ``urls`` can be both multiple redis connection strings separated by a comma or 21 | an already existing RedisCluster instance. 22 | 23 | Unlike a StrictRedis connection string, a RedisCluster one does not support 24 | database indicators, it is zero by default. 25 | 26 | Example: `redis://node-1:7001,redis://node-2:7002` 27 | 28 | Additional options can be passed in kwargs (e.g. `username="redis", password="secure_password"`). 29 | 30 | The data will be stored into redis keys, with their name 31 | starting with ``beaker_cache:``. 32 | """ 33 | 34 | def __init__(self, namespace, urls, timeout=None, **kwargs): 35 | super(RedisNamespaceManager, self).__init__(namespace) 36 | self.lock_dir = None # Redis uses redis itself for locking. 37 | self.timeout = timeout 38 | self.nodes = [] 39 | self.options = kwargs 40 | 41 | if redis is None: 42 | raise RuntimeError('redis is not available') 43 | 44 | if isinstance(urls, string_type): 45 | for url in urls.split(','): 46 | url_options = redis.connection.parse_url(url) 47 | if 'db' in url_options: 48 | raise redis.exceptions.RedisClusterException( 49 | "A ``db`` querystring option can only be 0 in cluster mode" 50 | ) 51 | self.nodes.append(redis.cluster.ClusterNode( 52 | host=url_options.get('host'), 53 | port=url_options.get('port') 54 | )) 55 | self.client = RedisClusterNamespaceManager.clients.get( 56 | urls, redis.cluster.RedisCluster, startup_nodes=self.nodes, **kwargs 57 | ) 58 | else: 59 | self.client = urls 60 | 61 | def get_creation_lock(self, key): 62 | return RedisClusterSynchronizer(self._format_key(key), self.client, self.nodes, **self.options) 63 | 64 | 65 | class RedisClusterSynchronizer(RedisSynchronizer): 66 | """Synchronizer based on redis cluster. 67 | 68 | Provided ``urls`` can be both multiple redis connection strings separated by a comma or 69 | an already existing RedisCluster instance. 70 | 71 | Unlike a StrictRedis connection string, a RedisCluster one does not support 72 | database indicators, it is zero by default. 73 | 74 | Example: ``redis://node-1:7001,redis://node-2:7002, 75 | 76 | This Synchronizer only supports 1 reader or 1 writer at time, not concurrent readers. 77 | """ 78 | RELEASE_LOCK_LUA = """ 79 | if redis.call('get', KEYS[1]) == ARGV[1] then 80 | return redis.call('del', KEYS[1]) 81 | else 82 | return 0 83 | end 84 | """ 85 | 86 | def __init__(self, identifier, urls, nodes=None, **kwargs): 87 | super(RedisSynchronizer, self).__init__() 88 | self.identifier = 'beaker_lock:%s' % identifier 89 | if isinstance(urls, string_type): 90 | self.client = RedisClusterNamespaceManager.clients.get( 91 | urls, redis.cluster.RedisCluster, startup_nodes=nodes, **kwargs 92 | ) 93 | else: 94 | self.client = urls 95 | self._release_lock = self.client.register_script(self.RELEASE_LOCK_LUA) 96 | 97 | def do_release_write_lock(self): 98 | identifier = self.identifier 99 | owner_id = self._get_owner_id() 100 | self._release_lock(keys=[identifier], args=[owner_id]) -------------------------------------------------------------------------------- /beaker/ext/redisnm.py: -------------------------------------------------------------------------------- 1 | import os 2 | import threading 3 | import time 4 | import pickle 5 | 6 | try: 7 | import redis 8 | except ImportError: 9 | redis = None 10 | 11 | from beaker.container import NamespaceManager 12 | from beaker.synchronization import SynchronizerImpl 13 | from beaker.util import SyncDict, machine_identifier 14 | from beaker.crypto.util import sha1 15 | from beaker._compat import string_type, PY2 16 | 17 | 18 | class RedisNamespaceManager(NamespaceManager): 19 | """Provides the :class:`.NamespaceManager` API over Redis. 20 | 21 | Provided ``url`` can be both a redis connection string or 22 | an already existing StrictRedis instance. 23 | 24 | The data will be stored into redis keys, with their name 25 | starting with ``beaker_cache:``. So make sure you provide 26 | a specific database number if you don't want to mix them 27 | with your own data. 28 | """ 29 | MAX_KEY_LENGTH = 1024 30 | 31 | clients = SyncDict() 32 | 33 | def __init__(self, namespace, url, timeout=None, **kw): 34 | super(RedisNamespaceManager, self).__init__(namespace) 35 | self.lock_dir = None # Redis uses redis itself for locking. 36 | self.timeout = timeout 37 | 38 | if redis is None: 39 | raise RuntimeError('redis is not available') 40 | 41 | if isinstance(url, string_type): 42 | self.client = RedisNamespaceManager.clients.get(url, redis.StrictRedis.from_url, url) 43 | else: 44 | self.client = url 45 | 46 | def _format_key(self, key): 47 | if not isinstance(key, str): 48 | key = key.decode('ascii') 49 | if len(key) > (self.MAX_KEY_LENGTH - len(self.namespace) - len('beaker_cache:') - 1): 50 | if not PY2: 51 | key = key.encode('utf-8') 52 | key = sha1(key).hexdigest() 53 | return 'beaker_cache:%s:%s' % (self.namespace, key) 54 | 55 | def get_creation_lock(self, key): 56 | return RedisSynchronizer(self._format_key(key), self.client) 57 | 58 | def __getitem__(self, key): 59 | entry = self.client.get(self._format_key(key)) 60 | if entry is None: 61 | raise KeyError(key) 62 | return pickle.loads(entry) 63 | 64 | def __contains__(self, key): 65 | return self.client.exists(self._format_key(key)) 66 | 67 | def has_key(self, key): 68 | return key in self 69 | 70 | def set_value(self, key, value, expiretime=None): 71 | value = pickle.dumps(value) 72 | if expiretime is None and self.timeout is not None: 73 | expiretime = self.timeout 74 | if expiretime is not None: 75 | self.client.setex(self._format_key(key), int(expiretime), value) 76 | else: 77 | self.client.set(self._format_key(key), value) 78 | 79 | def __setitem__(self, key, value): 80 | self.set_value(key, value) 81 | 82 | def __delitem__(self, key): 83 | self.client.delete(self._format_key(key)) 84 | 85 | def do_remove(self): 86 | for k in self.keys(): 87 | self.client.delete(k) 88 | 89 | def keys(self): 90 | return self.client.keys('beaker_cache:%s:*' % self.namespace) 91 | 92 | 93 | class RedisSynchronizer(SynchronizerImpl): 94 | """Synchronizer based on redis. 95 | 96 | Provided ``url`` can be both a redis connection string or 97 | an already existing StrictRedis instance. 98 | 99 | This Synchronizer only supports 1 reader or 1 writer at time, not concurrent readers. 100 | """ 101 | # If a cache entry generation function can take a lot, 102 | # but 15 minutes is more than a reasonable time. 103 | LOCK_EXPIRATION = 900 104 | MACHINE_ID = machine_identifier() 105 | 106 | def __init__(self, identifier, url): 107 | super(RedisSynchronizer, self).__init__() 108 | self.identifier = 'beaker_lock:%s' % identifier 109 | if isinstance(url, string_type): 110 | self.client = RedisNamespaceManager.clients.get(url, redis.StrictRedis.from_url, url) 111 | else: 112 | self.client = url 113 | 114 | def _get_owner_id(self): 115 | return ( 116 | '%s-%s-%s' % (self.MACHINE_ID, os.getpid(), threading.current_thread().ident) 117 | ).encode('ascii') 118 | 119 | def do_release_read_lock(self): 120 | self.do_release_write_lock() 121 | 122 | def do_acquire_read_lock(self, wait): 123 | self.do_acquire_write_lock(wait) 124 | 125 | def do_release_write_lock(self): 126 | identifier = self.identifier 127 | owner_id = self._get_owner_id() 128 | def execute_release(pipe): 129 | lock_value = pipe.get(identifier) 130 | if lock_value == owner_id: 131 | pipe.delete(identifier) 132 | self.client.transaction(execute_release, identifier) 133 | 134 | def do_acquire_write_lock(self, wait): 135 | owner_id = self._get_owner_id() 136 | while True: 137 | if self.client.set(self.identifier, owner_id, ex=self.LOCK_EXPIRATION, nx=True): 138 | return True 139 | 140 | if not wait: 141 | return False 142 | time.sleep(0.2) 143 | 144 | -------------------------------------------------------------------------------- /beaker/ext/sqla.py: -------------------------------------------------------------------------------- 1 | from beaker._compat import pickle 2 | 3 | import logging 4 | import pickle 5 | from datetime import datetime 6 | 7 | from beaker.container import OpenResourceNamespaceManager, Container 8 | from beaker.exceptions import InvalidCacheBackendError, MissingCacheParameter 9 | from beaker.synchronization import file_synchronizer, null_synchronizer 10 | from beaker.util import verify_directory, SyncDict 11 | 12 | 13 | log = logging.getLogger(__name__) 14 | 15 | sa = None 16 | 17 | 18 | class SqlaNamespaceManager(OpenResourceNamespaceManager): 19 | binds = SyncDict() 20 | tables = SyncDict() 21 | 22 | @classmethod 23 | def _init_dependencies(cls): 24 | global sa 25 | if sa is not None: 26 | return 27 | try: 28 | import sqlalchemy as sa 29 | except ImportError: 30 | raise InvalidCacheBackendError("SQLAlchemy, which is required by " 31 | "this backend, is not installed") 32 | 33 | def __init__(self, namespace, bind, table, data_dir=None, lock_dir=None, 34 | **kwargs): 35 | """Create a namespace manager for use with a database table via 36 | SQLAlchemy. 37 | 38 | ``bind`` 39 | SQLAlchemy ``Engine`` or ``Connection`` object 40 | 41 | ``table`` 42 | SQLAlchemy ``Table`` object in which to store namespace data. 43 | This should usually be something created by ``make_cache_table``. 44 | """ 45 | OpenResourceNamespaceManager.__init__(self, namespace) 46 | 47 | if lock_dir: 48 | self.lock_dir = lock_dir 49 | elif data_dir: 50 | self.lock_dir = data_dir + "/container_db_lock" 51 | if self.lock_dir: 52 | verify_directory(self.lock_dir) 53 | 54 | self.bind = self.__class__.binds.get(str(bind.url), lambda: bind) 55 | self.table = self.__class__.tables.get('%s:%s' % (bind.url, table.name), 56 | lambda: table) 57 | self.hash = {} 58 | self._is_new = False 59 | self.loaded = False 60 | 61 | def get_access_lock(self): 62 | return null_synchronizer() 63 | 64 | def get_creation_lock(self, key): 65 | return file_synchronizer( 66 | identifier="databasecontainer/funclock/%s" % self.namespace, 67 | lock_dir=self.lock_dir) 68 | 69 | def do_open(self, flags, replace): 70 | if self.loaded: 71 | self.flags = flags 72 | return 73 | select = sa.select(self.table.c.data).where(self.table.c.namespace == self.namespace) 74 | with self.bind.connect() as conn: 75 | result = conn.execute(select).fetchone() 76 | if not result: 77 | self._is_new = True 78 | self.hash = {} 79 | else: 80 | self._is_new = False 81 | try: 82 | self.hash = result.data 83 | except (IOError, OSError, EOFError, pickle.PickleError, 84 | pickle.PickleError): 85 | log.debug("Couln't load pickle data, creating new storage") 86 | self.hash = {} 87 | self._is_new = True 88 | self.flags = flags 89 | self.loaded = True 90 | 91 | def do_close(self): 92 | if self.flags is not None and (self.flags == 'c' or self.flags == 'w'): 93 | with self.bind.begin() as conn: 94 | if self._is_new: 95 | insert = self.table.insert() 96 | conn.execute(insert, dict(namespace=self.namespace, data=self.hash, 97 | accessed=datetime.now(), created=datetime.now())) 98 | self._is_new = False 99 | else: 100 | update = self.table.update().where(self.table.c.namespace == self.namespace) 101 | conn.execute(update, dict(data=self.hash, accessed=datetime.now())) 102 | self.flags = None 103 | 104 | def do_remove(self): 105 | delete = self.table.delete().where(self.table.c.namespace == self.namespace) 106 | with self.bind.begin() as conn: 107 | conn.execute(delete) 108 | self.hash = {} 109 | self._is_new = True 110 | 111 | def __getitem__(self, key): 112 | return self.hash[key] 113 | 114 | def __contains__(self, key): 115 | return key in self.hash 116 | 117 | def __setitem__(self, key, value): 118 | self.hash[key] = value 119 | 120 | def __delitem__(self, key): 121 | del self.hash[key] 122 | 123 | def keys(self): 124 | return self.hash.keys() 125 | 126 | 127 | class SqlaContainer(Container): 128 | namespace_manager = SqlaNamespaceManager 129 | 130 | 131 | def make_cache_table(metadata, table_name='beaker_cache', schema_name=None): 132 | """Return a ``Table`` object suitable for storing cached values for the 133 | namespace manager. Do not create the table.""" 134 | return sa.Table(table_name, metadata, 135 | sa.Column('namespace', sa.String(255), primary_key=True), 136 | sa.Column('accessed', sa.DateTime, nullable=False), 137 | sa.Column('created', sa.DateTime, nullable=False), 138 | sa.Column('data', sa.PickleType, nullable=False), 139 | schema=schema_name if schema_name else metadata.schema) 140 | -------------------------------------------------------------------------------- /beaker/middleware.py: -------------------------------------------------------------------------------- 1 | import warnings 2 | 3 | try: 4 | from paste.registry import StackedObjectProxy 5 | beaker_session = StackedObjectProxy(name="Beaker Session") 6 | beaker_cache = StackedObjectProxy(name="Cache Manager") 7 | except: 8 | beaker_cache = None 9 | beaker_session = None 10 | 11 | from beaker.cache import CacheManager 12 | from beaker.session import Session, SessionObject 13 | from beaker.util import coerce_cache_params, coerce_session_params, \ 14 | parse_cache_config_options 15 | 16 | 17 | class CacheMiddleware(object): 18 | cache = beaker_cache 19 | 20 | def __init__(self, app, config=None, environ_key='beaker.cache', **kwargs): 21 | """Initialize the Cache Middleware 22 | 23 | The Cache middleware will make a CacheManager instance available 24 | every request under the ``environ['beaker.cache']`` key by 25 | default. The location in environ can be changed by setting 26 | ``environ_key``. 27 | 28 | ``config`` 29 | dict All settings should be prefixed by 'cache.'. This 30 | method of passing variables is intended for Paste and other 31 | setups that accumulate multiple component settings in a 32 | single dictionary. If config contains *no cache. prefixed 33 | args*, then *all* of the config options will be used to 34 | initialize the Cache objects. 35 | 36 | ``environ_key`` 37 | Location where the Cache instance will keyed in the WSGI 38 | environ 39 | 40 | ``**kwargs`` 41 | All keyword arguments are assumed to be cache settings and 42 | will override any settings found in ``config`` 43 | 44 | """ 45 | self.app = app 46 | config = config or {} 47 | 48 | self.options = {} 49 | 50 | # Update the options with the parsed config 51 | self.options.update(parse_cache_config_options(config)) 52 | 53 | # Add any options from kwargs, but leave out the defaults this 54 | # time 55 | self.options.update( 56 | parse_cache_config_options(kwargs, include_defaults=False)) 57 | 58 | # Assume all keys are intended for cache if none are prefixed with 59 | # 'cache.' 60 | if not self.options and config: 61 | self.options = config 62 | 63 | self.options.update(kwargs) 64 | self.cache_manager = CacheManager(**self.options) 65 | self.environ_key = environ_key 66 | 67 | def __call__(self, environ, start_response): 68 | if environ.get('paste.registry'): 69 | if environ['paste.registry'].reglist: 70 | environ['paste.registry'].register(self.cache, 71 | self.cache_manager) 72 | environ[self.environ_key] = self.cache_manager 73 | return self.app(environ, start_response) 74 | 75 | 76 | class SessionMiddleware(object): 77 | session = beaker_session 78 | 79 | def __init__(self, wrap_app, config=None, environ_key='beaker.session', 80 | **kwargs): 81 | """Initialize the Session Middleware 82 | 83 | The Session middleware will make a lazy session instance 84 | available every request under the ``environ['beaker.session']`` 85 | key by default. The location in environ can be changed by 86 | setting ``environ_key``. 87 | 88 | ``config`` 89 | dict All settings should be prefixed by 'session.'. This 90 | method of passing variables is intended for Paste and other 91 | setups that accumulate multiple component settings in a 92 | single dictionary. If config contains *no session. prefixed 93 | args*, then *all* of the config options will be used to 94 | initialize the Session objects. 95 | 96 | ``environ_key`` 97 | Location where the Session instance will keyed in the WSGI 98 | environ 99 | 100 | ``**kwargs`` 101 | All keyword arguments are assumed to be session settings and 102 | will override any settings found in ``config`` 103 | 104 | """ 105 | config = config or {} 106 | 107 | # Load up the default params 108 | self.options = dict(invalidate_corrupt=True, type=None, 109 | data_dir=None, key='beaker.session.id', 110 | timeout=None, save_accessed_time=True, secret=None, 111 | log_file=None) 112 | 113 | # Pull out any config args meant for beaker session. if there are any 114 | for dct in [config, kwargs]: 115 | for key, val in dct.items(): 116 | if key.startswith('beaker.session.'): 117 | self.options[key[15:]] = val 118 | if key.startswith('session.'): 119 | self.options[key[8:]] = val 120 | if key.startswith('session_'): 121 | warnings.warn('Session options should start with session. ' 122 | 'instead of session_.', DeprecationWarning, 2) 123 | self.options[key[8:]] = val 124 | 125 | # Coerce and validate session params 126 | coerce_session_params(self.options) 127 | 128 | # Assume all keys are intended for session if none are prefixed with 129 | # 'session.' 130 | if not self.options and config: 131 | self.options = config 132 | 133 | self.options.update(kwargs) 134 | self.wrap_app = self.app = wrap_app 135 | self.environ_key = environ_key 136 | 137 | def __call__(self, environ, start_response): 138 | session = SessionObject(environ, **self.options) 139 | if environ.get('paste.registry'): 140 | if environ['paste.registry'].reglist: 141 | environ['paste.registry'].register(self.session, session) 142 | environ[self.environ_key] = session 143 | environ['beaker.get_session'] = self._get_session 144 | 145 | if 'paste.testing_variables' in environ and 'webtest_varname' in self.options: 146 | environ['paste.testing_variables'][self.options['webtest_varname']] = session 147 | 148 | def session_start_response(status, headers, exc_info=None): 149 | if session.accessed(): 150 | session.persist() 151 | if session.__dict__['_headers']['set_cookie']: 152 | cookie = session.__dict__['_headers']['cookie_out'] 153 | if cookie: 154 | headers.append(('Set-cookie', cookie)) 155 | return start_response(status, headers, exc_info) 156 | return self.wrap_app(environ, session_start_response) 157 | 158 | def _get_session(self): 159 | return Session({}, use_cookies=False, **self.options) 160 | 161 | 162 | def session_filter_factory(global_conf, **kwargs): 163 | def filter(app): 164 | return SessionMiddleware(app, global_conf, **kwargs) 165 | return filter 166 | 167 | 168 | def session_filter_app_factory(app, global_conf, **kwargs): 169 | return SessionMiddleware(app, global_conf, **kwargs) 170 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | #[egg_info] 2 | #tag_build = dev 3 | #tag_svn_revision = false 4 | 5 | [tool:pytest] 6 | where=tests 7 | verbose=True 8 | detailed-errors=True 9 | with-doctest=True 10 | #with-coverage=True 11 | cover-package=beaker 12 | cover-inclusive=True 13 | ignore-files=annotated_functions.py 14 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | import re 4 | import inspect 5 | 6 | from setuptools import setup, find_packages 7 | 8 | py_version = sys.version_info[:2] 9 | here = os.path.abspath(os.path.dirname(__file__)) 10 | v = open(os.path.join(here, 'beaker', '__init__.py')) 11 | VERSION = re.compile(r".*__version__ = '(.*?)'", re.S).match(v.read()).group(1) 12 | v.close() 13 | 14 | try: 15 | README = open(os.path.join(here, 'README.rst')).read() 16 | except IOError: 17 | README = '' 18 | 19 | 20 | INSTALL_REQUIRES = [] 21 | if not hasattr(inspect, 'signature'): 22 | # On Python 2.6, 2.7 and 3.2 we need funcsigs dependency 23 | INSTALL_REQUIRES.append('funcsigs') 24 | 25 | 26 | TESTS_REQUIRE = ['pytest', 'pycryptodome'] 27 | 28 | if py_version == (2, 6): 29 | TESTS_REQUIRE.append('WebTest<2.0.24') 30 | TESTS_REQUIRE.append('pycparser==2.18') 31 | else: 32 | TESTS_REQUIRE.append('webtest') 33 | 34 | if py_version == (3, 2): 35 | TESTS_REQUIRE.append('coverage < 4.0') 36 | else: 37 | TESTS_REQUIRE.append('coverage') 38 | 39 | if py_version == (3, 3): 40 | TESTS_REQUIRE.append('cryptography < 2.1.0') 41 | else: 42 | TESTS_REQUIRE.append('cryptography') 43 | 44 | if not sys.platform.startswith('java') and not sys.platform == 'cli': 45 | if py_version == (2, 6): 46 | TESTS_REQUIRE.append('sqlalchemy < 1.2') 47 | else: 48 | TESTS_REQUIRE.append('sqlalchemy') 49 | TESTS_REQUIRE.extend(['pymongo', 'redis']) 50 | try: 51 | import sqlite3 52 | except ImportError: 53 | TESTS_REQUIRE.append('pysqlite') 54 | TESTS_REQUIRE.extend(['python-memcached']) 55 | 56 | 57 | setup(name='Beaker', 58 | version=VERSION, 59 | description="A Session and Caching library with WSGI Middleware", 60 | long_description=README, 61 | classifiers=[ 62 | 'Development Status :: 5 - Production/Stable', 63 | 'Environment :: Web Environment', 64 | 'Intended Audience :: Developers', 65 | 'License :: OSI Approved :: BSD License', 66 | 'Programming Language :: Python', 67 | 'Programming Language :: Python :: 3.8', 68 | 'Programming Language :: Python :: 3.9', 69 | 'Programming Language :: Python :: 3.10', 70 | 'Programming Language :: Python :: 3.11', 71 | 'Programming Language :: Python :: 3.12', 72 | 'Programming Language :: Python :: 3.13', 73 | 'Topic :: Internet :: WWW/HTTP :: Dynamic Content', 74 | 'Topic :: Internet :: WWW/HTTP :: Dynamic Content', 75 | 'Topic :: Internet :: WWW/HTTP :: Dynamic Content', 76 | 'Topic :: Internet :: WWW/HTTP :: WSGI', 77 | 'Topic :: Internet :: WWW/HTTP :: WSGI :: Middleware', 78 | ], 79 | keywords='wsgi myghty session web cache middleware', 80 | author='Ben Bangert, Mike Bayer, Philip Jenvey, Alessandro Molina', 81 | author_email='ben@groovie.org, pjenvey@groovie.org, amol@turbogears.org', 82 | url='https://beaker.readthedocs.io/', 83 | license='BSD', 84 | license_files=['LICENSE'], 85 | packages=find_packages(exclude=['ez_setup', 'examples', 'tests', 'tests.*']), 86 | zip_safe=False, 87 | install_requires=INSTALL_REQUIRES, 88 | extras_require={ 89 | 'crypto': ['pycryptopp>=0.5.12'], 90 | 'pycrypto': ['pycrypto'], 91 | 'pycryptodome': ['pycryptodome'], 92 | 'cryptography': ['cryptography'], 93 | 'testsuite': [TESTS_REQUIRE] 94 | }, 95 | test_suite='tests', 96 | tests_require=TESTS_REQUIRE, 97 | entry_points=""" 98 | [paste.filter_factory] 99 | beaker_session = beaker.middleware:session_filter_factory 100 | 101 | [paste.filter_app_factory] 102 | beaker_session = beaker.middleware:session_filter_app_factory 103 | 104 | [beaker.backends] 105 | database = beaker.ext.database:DatabaseNamespaceManager 106 | memcached = beaker.ext.memcached:MemcachedNamespaceManager 107 | google = beaker.ext.google:GoogleNamespaceManager 108 | sqla = beaker.ext.sqla:SqlaNamespaceManager 109 | """ 110 | ) 111 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bbangert/beaker/913d195875899b31fdbf8b4fda64094870e2d3d6/tests/__init__.py -------------------------------------------------------------------------------- /tests/annotated_functions.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """This is a collection of annotated functions used by tests. 3 | 4 | They are grouped here to provide an easy way to import them at runtime 5 | to check whenever tests for annotated functions should be skipped or not 6 | on current python version. 7 | """ 8 | from beaker.cache import cache_region 9 | import time 10 | 11 | class AnnotatedAlfredCacher(object): 12 | @cache_region('short_term') 13 | def alfred_self(self, xx: int, y=None) -> str: 14 | return str(time.time()) + str(self) + str(xx) + str(y) 15 | 16 | -------------------------------------------------------------------------------- /tests/test_cache.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | from beaker._compat import u_, bytes_ 3 | 4 | import os 5 | import platform 6 | import shutil 7 | import tarfile 8 | import tempfile 9 | import time 10 | from beaker.middleware import CacheMiddleware 11 | from beaker import util 12 | from beaker.cache import Cache 13 | from unittest import SkipTest 14 | from beaker.util import skip_if 15 | import base64 16 | import zlib 17 | 18 | try: 19 | from webtest import TestApp as WebTestApp 20 | except ImportError: 21 | WebTestApp = None 22 | 23 | # Tarballs of the output of: 24 | # >>> from beaker.cache import Cache 25 | # >>> c = Cache('test', data_dir='db', type='dbm') 26 | # >>> c['foo'] = 'bar' 27 | # in the old format, Beaker @ revision: 24f57102d310 28 | dbm_cache_tar = bytes_("""\ 29 | eJzt3EtOwkAAgOEBjTHEBDfu2ekKZ6bTTnsBL+ABzPRB4osSRBMXHsNruXDl3nMYLaEbpYRAaIn6 30 | f8kwhFcn/APLSeNTUTdZsL4/m4Pg21wSqiCt9D1PC6mUZ7Xo+bWvrHB/N3HjXk+MrrLhQ/a48HXL 31 | nv+l0vg0yYcTdznMxhdpfFvHbpj1lyv0N8oq+jdhrr/b/A5Yo79R9G9ERX8XbXgLrNHfav7/G1Hd 32 | 30XGhYPMT5JYRbELVGISGVov9SKVRaGNQj2I49TrF+8oxpJrTAMHxizob+b7ay+Y/v5lE1/AP+8v 33 | 9o5ccdsWYvdViMPpIwdCtMRsiP3yTrucd8r5pJxbz8On9/KT2uVo3H5rG1cFAAAAAOD3aIuP7lv3 34 | pRjbXgkAAAAAAFjVyc1Idc6U1lYGgbSmL0Mjpe248+PYjY87I91x/UGeb3udAAAAAACgfh+fAAAA 35 | AADgr/t5/sPFTZ5cb/38D19Lzn9pRHX/zR4CtEZ/o+nfiEX9N3kI0Gr9vWl/W0z0BwAAAAAAAAAA 36 | AAAAAAAAqPAFyOvcKA== 37 | """) 38 | dbm_cache_tar = zlib.decompress(base64.b64decode(dbm_cache_tar)) 39 | 40 | # dumbdbm format 41 | dumbdbm_cache_tar = bytes_("""\ 42 | eJzt191qgzAYBmCPvYqc2UGx+ZKY6A3scCe7gJKoha6binOD3f2yn5Ouf3TTlNH3AQlEJcE3nyGV 43 | W0RT457Jsq9W6632W0Se0JI49/1E0vCIZZPPzHt5HmzPWNQ91M1r/XbwuVP3/6nKLcq2Gey6qftl 44 | 5Z6mWA3n56/IKOQfwk7+dvwV8Iv8FSH/IPbkb4uRl8BZ+fvg/WUE8g9if/62UDZf1VlZOiqc1VSq 45 | kudGVrKgushNkYuVc5VM/Rups5vjY3wErJU6nD+Z7fyFNFpEjIf4AFeef7Jq22TOZnzOpLiJLz0d 46 | CGyE+q/scHyMk/Wv+E79G0L9hzC7JSFMpv0PN0+J4rv7xNk+iTuKh07E6aXnB9Mao/7X/fExzt// 47 | FecS9R8C9v/r9rP+l49tubnk+e/z/J8JjvMfAAAAAAAAAADAn70DFJAAwQ== 48 | """) 49 | dumbdbm_cache_tar = zlib.decompress(base64.b64decode(dumbdbm_cache_tar)) 50 | 51 | def simple_app(environ, start_response): 52 | clear = False 53 | if environ.get('beaker.clear'): 54 | clear = True 55 | cache = environ['beaker.cache'].get_cache('testcache') 56 | if clear: 57 | cache.clear() 58 | try: 59 | value = cache.get_value('value') 60 | except: 61 | value = 0 62 | cache.set_value('value', value+1) 63 | start_response('200 OK', [('Content-type', 'text/plain')]) 64 | msg = 'The current value is: %s' % cache.get_value('value') 65 | return [msg.encode('utf-8')] 66 | 67 | def cache_manager_app(environ, start_response): 68 | cm = environ['beaker.cache'] 69 | cm.get_cache('test')['test_key'] = 'test value' 70 | 71 | start_response('200 OK', [('Content-type', 'text/plain')]) 72 | yield ("test_key is: %s\n" % cm.get_cache('test')['test_key']).encode('utf-8') 73 | cm.get_cache('test').clear() 74 | 75 | try: 76 | test_value = cm.get_cache('test')['test_key'] 77 | except KeyError: 78 | yield "test_key cleared".encode('utf-8') 79 | else: 80 | test_value = cm.get_cache('test')['test_key'] 81 | yield ("test_key wasn't cleared, is: %s\n" % test_value).encode('utf-8') 82 | 83 | def test_has_key(): 84 | cache = Cache('test', data_dir='./cache', type='dbm') 85 | o = object() 86 | cache.set_value("test", o) 87 | assert cache.has_key("test") 88 | assert "test" in cache 89 | assert not cache.has_key("foo") 90 | assert "foo" not in cache 91 | cache.remove_value("test") 92 | assert not cache.has_key("test") 93 | 94 | def test_expire_changes(): 95 | cache = Cache('test_bar', data_dir='./cache', type='dbm') 96 | cache.set_value('test', 10) 97 | assert cache.has_key('test') 98 | assert cache['test'] == 10 99 | 100 | # ensure that we can change a never-expiring value 101 | cache.set_value('test', 20, expiretime=1) 102 | assert cache.has_key('test') 103 | assert cache['test'] == 20 104 | time.sleep(1) 105 | assert not cache.has_key('test') 106 | 107 | # test that we can change it before its expired 108 | cache.set_value('test', 30, expiretime=50) 109 | assert cache.has_key('test') 110 | assert cache['test'] == 30 111 | 112 | cache.set_value('test', 40, expiretime=3) 113 | assert cache.has_key('test') 114 | assert cache['test'] == 40 115 | time.sleep(3) 116 | assert not cache.has_key('test') 117 | 118 | def test_fresh_createfunc(): 119 | cache = Cache('test_foo', data_dir='./cache', type='dbm') 120 | x = cache.get_value('test', createfunc=lambda: 10, expiretime=2) 121 | assert x == 10 122 | x = cache.get_value('test', createfunc=lambda: 12, expiretime=2) 123 | assert x == 10 124 | x = cache.get_value('test', createfunc=lambda: 14, expiretime=2) 125 | assert x == 10 126 | time.sleep(2) 127 | x = cache.get_value('test', createfunc=lambda: 16, expiretime=2) 128 | assert x == 16 129 | x = cache.get_value('test', createfunc=lambda: 18, expiretime=2) 130 | assert x == 16 131 | 132 | cache.remove_value('test') 133 | assert not cache.has_key('test') 134 | x = cache.get_value('test', createfunc=lambda: 20, expiretime=2) 135 | assert x == 20 136 | 137 | def test_has_key_multicache(): 138 | cache = Cache('test', data_dir='./cache', type='dbm') 139 | o = object() 140 | cache.set_value("test", o) 141 | assert cache.has_key("test") 142 | assert "test" in cache 143 | cache = Cache('test', data_dir='./cache', type='dbm') 144 | assert cache.has_key("test") 145 | 146 | def test_unicode_keys(): 147 | cache = Cache('test', data_dir='./cache', type='dbm') 148 | o = object() 149 | cache.set_value(u_('hiŏ'), o) 150 | assert u_('hiŏ') in cache 151 | assert u_('hŏa') not in cache 152 | cache.remove_value(u_('hiŏ')) 153 | assert u_('hiŏ') not in cache 154 | 155 | def test_remove_stale(): 156 | """test that remove_value() removes even if the value is expired.""" 157 | 158 | cache = Cache('test', type='memory') 159 | o = object() 160 | cache.namespace[b'key'] = (time.time() - 60, 5, o) 161 | container = cache._get_value('key') 162 | assert not container.has_current_value() 163 | assert b'key' in container.namespace 164 | cache.remove_value('key') 165 | assert b'key' not in container.namespace 166 | 167 | # safe to call again 168 | cache.remove_value('key') 169 | 170 | def test_multi_keys(): 171 | cache = Cache('newtests', data_dir='./cache', type='dbm') 172 | cache.clear() 173 | called = {} 174 | def create_func(): 175 | called['here'] = True 176 | return 'howdy' 177 | 178 | try: 179 | cache.get_value('key1') 180 | except KeyError: 181 | pass 182 | else: 183 | raise Exception("Failed to keyerror on nonexistent key") 184 | 185 | assert 'howdy' == cache.get_value('key2', createfunc=create_func) 186 | assert called['here'] == True 187 | del called['here'] 188 | 189 | try: 190 | cache.get_value('key3') 191 | except KeyError: 192 | pass 193 | else: 194 | raise Exception("Failed to keyerror on nonexistent key") 195 | try: 196 | cache.get_value('key1') 197 | except KeyError: 198 | pass 199 | else: 200 | raise Exception("Failed to keyerror on nonexistent key") 201 | 202 | assert 'howdy' == cache.get_value('key2', createfunc=create_func) 203 | assert called == {} 204 | 205 | @skip_if(lambda: WebTestApp is None, "webtest not installed") 206 | def test_increment(): 207 | app = WebTestApp(CacheMiddleware(simple_app)) 208 | res = app.get('/', extra_environ={'beaker.type':type, 'beaker.clear':True}) 209 | assert 'current value is: 1' in res 210 | res = app.get('/') 211 | assert 'current value is: 2' in res 212 | res = app.get('/') 213 | assert 'current value is: 3' in res 214 | 215 | @skip_if(lambda: WebTestApp is None, "webtest not installed") 216 | def test_cache_manager(): 217 | app = WebTestApp(CacheMiddleware(cache_manager_app)) 218 | res = app.get('/') 219 | assert 'test_key is: test value' in res 220 | assert 'test_key cleared' in res 221 | 222 | def test_clsmap_nonexistent(): 223 | from beaker.cache import clsmap 224 | 225 | try: 226 | clsmap['fake'] 227 | assert False 228 | except KeyError: 229 | pass 230 | 231 | def test_clsmap_present(): 232 | from beaker.cache import clsmap 233 | 234 | assert clsmap['memory'] 235 | 236 | 237 | def test_legacy_cache(): 238 | cache = Cache('newtests', data_dir='./cache', type='dbm') 239 | 240 | cache.set_value('x', '1') 241 | assert cache.get_value('x') == '1' 242 | 243 | cache.set_value('x', '2', type='file', data_dir='./cache') 244 | assert cache.get_value('x') == '1' 245 | assert cache.get_value('x', type='file', data_dir='./cache') == '2' 246 | 247 | cache.remove_value('x') 248 | cache.remove_value('x', type='file', data_dir='./cache') 249 | 250 | assert cache.get_value('x', expiretime=1, createfunc=lambda: '5') == '5' 251 | assert cache.get_value('x', expiretime=1, createfunc=lambda: '6', type='file', data_dir='./cache') == '6' 252 | assert cache.get_value('x', expiretime=1, createfunc=lambda: '7') == '5' 253 | assert cache.get_value('x', expiretime=1, createfunc=lambda: '8', type='file', data_dir='./cache') == '6' 254 | time.sleep(1) 255 | assert cache.get_value('x', expiretime=1, createfunc=lambda: '9') == '9' 256 | assert cache.get_value('x', expiretime=1, createfunc=lambda: '10', type='file', data_dir='./cache') == '10' 257 | assert cache.get_value('x', expiretime=1, createfunc=lambda: '11') == '9' 258 | assert cache.get_value('x', expiretime=1, createfunc=lambda: '12', type='file', data_dir='./cache') == '10' 259 | 260 | 261 | def test_upgrade(): 262 | # If we're on OSX, lets run this since its OSX dump files, otherwise 263 | # we have to skip it 264 | if platform.system() != 'Darwin': 265 | return 266 | for test in _test_upgrade_has_key, _test_upgrade_in, _test_upgrade_setitem: 267 | for mod, tar in (('dbm', dbm_cache_tar), 268 | ('dumbdbm', dumbdbm_cache_tar)): 269 | try: 270 | __import__(mod) 271 | except ImportError: 272 | continue 273 | dir = tempfile.mkdtemp() 274 | fd, name = tempfile.mkstemp(dir=dir) 275 | fp = os.fdopen(fd, 'wb') 276 | fp.write(tar) 277 | fp.close() 278 | tar = tarfile.open(name) 279 | for member in tar.getmembers(): 280 | tar.extract(member, dir) 281 | tar.close() 282 | try: 283 | test(os.path.join(dir, 'db')) 284 | finally: 285 | shutil.rmtree(dir) 286 | 287 | def _test_upgrade_has_key(dir): 288 | cache = Cache('test', data_dir=dir, type='dbm') 289 | assert cache.has_key('foo') 290 | assert cache.has_key('foo') 291 | 292 | def _test_upgrade_in(dir): 293 | cache = Cache('test', data_dir=dir, type='dbm') 294 | assert 'foo' in cache 295 | assert 'foo' in cache 296 | 297 | def _test_upgrade_setitem(dir): 298 | cache = Cache('test', data_dir=dir, type='dbm') 299 | assert cache['foo'] == 'bar' 300 | assert cache['foo'] == 'bar' 301 | 302 | 303 | def teardown_module(): 304 | import shutil 305 | shutil.rmtree('./cache', True) 306 | -------------------------------------------------------------------------------- /tests/test_cache_decorator.py: -------------------------------------------------------------------------------- 1 | import time 2 | from datetime import datetime 3 | 4 | from beaker.cache import CacheManager, cache_region, region_invalidate 5 | from beaker import util 6 | from unittest import SkipTest 7 | 8 | defaults = {'cache.data_dir':'./cache', 'cache.type':'dbm', 'cache.expire': 2} 9 | 10 | def teardown_module(): 11 | import shutil 12 | shutil.rmtree('./cache', True) 13 | 14 | @cache_region('short_term') 15 | def fred(x): 16 | return time.time() 17 | 18 | @cache_region('short_term') 19 | def george(x): 20 | return time.time() 21 | 22 | @cache_region('short_term') 23 | def albert(x): 24 | """A doc string""" 25 | return time.time() 26 | 27 | @cache_region('short_term') 28 | def alfred(x, xx, y=None): 29 | return str(time.time()) + str(x) + str(xx) + str(y) 30 | 31 | class AlfredCacher(object): 32 | @cache_region('short_term') 33 | def alfred_self(self, xx, y=None): 34 | return str(time.time()) + str(self) + str(xx) + str(y) 35 | 36 | try: 37 | from .annotated_functions import AnnotatedAlfredCacher 38 | except (ImportError, SyntaxError): 39 | AnnotatedAlfredCacher = None 40 | 41 | 42 | def make_cache_obj(**kwargs): 43 | opts = defaults.copy() 44 | opts.update(kwargs) 45 | cache = CacheManager(**util.parse_cache_config_options(opts)) 46 | return cache 47 | 48 | def make_cached_func(**opts): 49 | cache = make_cache_obj(**opts) 50 | @cache.cache() 51 | def load(person): 52 | now = datetime.now() 53 | return "Hi there %s, its currently %s" % (person, now) 54 | return cache, load 55 | 56 | def make_region_cached_func(): 57 | opts = {} 58 | opts['cache.regions'] = 'short_term, long_term' 59 | opts['cache.short_term.expire'] = '2' 60 | cache = make_cache_obj(**opts) 61 | 62 | @cache_region('short_term', 'region_loader') 63 | def load(person): 64 | now = datetime.now() 65 | return "Hi there %s, its currently %s" % (person, now) 66 | return load 67 | 68 | def make_region_cached_func_2(): 69 | opts = {} 70 | opts['cache.regions'] = 'short_term, long_term' 71 | opts['cache.short_term.expire'] = '2' 72 | cache = make_cache_obj(**opts) 73 | 74 | @cache_region('short_term') 75 | def load_person(person): 76 | now = datetime.now() 77 | return "Hi there %s, its currently %s" % (person, now) 78 | return load_person 79 | 80 | def test_check_region_decorator(): 81 | func = make_region_cached_func() 82 | result = func('Fred') 83 | assert 'Fred' in result 84 | 85 | result2 = func('Fred') 86 | assert result == result2 87 | 88 | result3 = func('George') 89 | assert 'George' in result3 90 | result4 = func('George') 91 | assert result3 == result4 92 | 93 | time.sleep(2) # Now it should have expired as cache is 2secs 94 | result2 = func('Fred') 95 | assert result != result2 96 | 97 | def test_different_default_names(): 98 | result = fred(1) 99 | time.sleep(0.1) 100 | result2 = george(1) 101 | assert result != result2 102 | 103 | def test_check_invalidate_region(): 104 | func = make_region_cached_func() 105 | result = func('Fred') 106 | assert 'Fred' in result 107 | 108 | result2 = func('Fred') 109 | assert result == result2 110 | region_invalidate(func, None, 'region_loader', 'Fred') 111 | 112 | result3 = func('Fred') 113 | assert result3 != result2 114 | 115 | result2 = func('Fred') 116 | assert result3 == result2 117 | 118 | # Invalidate a non-existent key 119 | region_invalidate(func, None, 'region_loader', 'Fredd') 120 | assert result3 == result2 121 | 122 | 123 | def test_check_invalidate_region_2(): 124 | func = make_region_cached_func_2() 125 | result = func('Fred') 126 | assert 'Fred' in result 127 | 128 | result2 = func('Fred') 129 | assert result == result2 130 | region_invalidate(func, None, 'Fred') 131 | 132 | result3 = func('Fred') 133 | assert result3 != result2 134 | 135 | result2 = func('Fred') 136 | assert result3 == result2 137 | 138 | # Invalidate a non-existent key 139 | region_invalidate(func, None, 'Fredd') 140 | assert result3 == result2 141 | 142 | def test_invalidate_cache(): 143 | cache, func = make_cached_func() 144 | val = func('foo') 145 | time.sleep(0.1) 146 | val2 = func('foo') 147 | assert val == val2 148 | 149 | cache.invalidate(func, 'foo') 150 | val3 = func('foo') 151 | assert val3 != val 152 | 153 | def test_class_key_cache(): 154 | cache = make_cache_obj() 155 | 156 | class Foo(object): 157 | @cache.cache('method') 158 | def go(self, x, y): 159 | return "hi foo" 160 | 161 | @cache.cache('standalone') 162 | def go(x, y): 163 | return "hi standalone" 164 | 165 | x = Foo().go(1, 2) 166 | y = go(1, 2) 167 | 168 | ns = go._arg_namespace 169 | assert cache.get_cache(ns).get('method 1 2') == x 170 | assert cache.get_cache(ns).get('standalone 1 2') == y 171 | 172 | def test_func_namespace(): 173 | def go(x, y): 174 | return "hi standalone" 175 | 176 | assert 'test_cache_decorator' in util.func_namespace(go) 177 | assert util.func_namespace(go).endswith('go') 178 | 179 | def test_class_key_region(): 180 | opts = {} 181 | opts['cache.regions'] = 'short_term' 182 | opts['cache.short_term.expire'] = '2' 183 | cache = make_cache_obj(**opts) 184 | 185 | class Foo(object): 186 | @cache_region('short_term', 'method') 187 | def go(self, x, y): 188 | return "hi foo" 189 | 190 | @cache_region('short_term', 'standalone') 191 | def go(x, y): 192 | return "hi standalone" 193 | 194 | x = Foo().go(1, 2) 195 | y = go(1, 2) 196 | ns = go._arg_namespace 197 | assert cache.get_cache_region(ns, 'short_term').get('method 1 2') == x 198 | assert cache.get_cache_region(ns, 'short_term').get('standalone 1 2') == y 199 | 200 | def test_classmethod_key_region(): 201 | opts = {} 202 | opts['cache.regions'] = 'short_term' 203 | opts['cache.short_term.expire'] = '2' 204 | cache = make_cache_obj(**opts) 205 | 206 | class Foo(object): 207 | @classmethod 208 | @cache_region('short_term', 'method') 209 | def go(cls, x, y): 210 | return "hi" 211 | 212 | x = Foo.go(1, 2) 213 | ns = Foo.go._arg_namespace 214 | assert cache.get_cache_region(ns, 'short_term').get('method 1 2') == x 215 | 216 | def test_class_key_region_invalidate(): 217 | opts = {} 218 | opts['cache.regions'] = 'short_term' 219 | opts['cache.short_term.expire'] = '2' 220 | cache = make_cache_obj(**opts) 221 | 222 | class Foo(object): 223 | @cache_region('short_term', 'method') 224 | def go(self, x, y): 225 | now = datetime.now() 226 | return "hi %s" % now 227 | 228 | def invalidate(self, x, y): 229 | region_invalidate(self.go, None, "method", x, y) 230 | 231 | x = Foo().go(1, 2) 232 | time.sleep(0.1) 233 | y = Foo().go(1, 2) 234 | Foo().invalidate(1, 2) 235 | z = Foo().go(1, 2) 236 | 237 | assert x == y 238 | assert x != z 239 | 240 | def test_check_region_decorator_keeps_docstring_and_name(): 241 | result = albert(1) 242 | time.sleep(0.1) 243 | result2 = albert(1) 244 | assert result == result2 245 | 246 | assert albert.__doc__ == "A doc string" 247 | assert albert.__name__ == "albert" 248 | 249 | 250 | def test_check_region_decorator_with_kwargs(): 251 | result = alfred(1, xx=5, y=3) 252 | time.sleep(0.1) 253 | 254 | result2 = alfred(1, y=3, xx=5) 255 | assert result == result2 256 | 257 | result3 = alfred(1, 5, y=5) 258 | assert result != result3 259 | 260 | result4 = alfred(1, 5, 3) 261 | assert result == result4 262 | 263 | result5 = alfred(1, 5, y=3) 264 | assert result == result5 265 | 266 | 267 | def test_check_region_decorator_with_kwargs_and_self(): 268 | a1 = AlfredCacher() 269 | a2 = AlfredCacher() 270 | 271 | result = a1.alfred_self(xx=5, y='blah') 272 | time.sleep(0.1) 273 | 274 | result2 = a2.alfred_self(y='blah', xx=5) 275 | assert result == result2 276 | 277 | result3 = a2.alfred_self(5, y=5) 278 | assert result != result3 279 | 280 | result4 = a2.alfred_self(5, 'blah') 281 | assert result == result4 282 | 283 | result5 = a2.alfred_self(5, y='blah') 284 | assert result == result5 285 | 286 | result6 = a2.alfred_self(6, 'blah') 287 | assert result != result6 288 | 289 | 290 | def test_check_region_decorator_with_kwargs_self_and_annotations(): 291 | if AnnotatedAlfredCacher is None: 292 | raise SkipTest('Python version not supporting annotations') 293 | 294 | a1 = AnnotatedAlfredCacher() 295 | a2 = AnnotatedAlfredCacher() 296 | 297 | result = a1.alfred_self(xx=5, y='blah') 298 | time.sleep(0.1) 299 | 300 | result2 = a2.alfred_self(y='blah', xx=5) 301 | assert result == result2 302 | 303 | result3 = a2.alfred_self(5, y=5) 304 | assert result != result3 305 | 306 | result4 = a2.alfred_self(5, 'blah') 307 | assert result == result4 308 | 309 | result5 = a2.alfred_self(5, y='blah') 310 | assert result == result5 311 | 312 | result6 = a2.alfred_self(6, 'blah') 313 | assert result != result6 314 | -------------------------------------------------------------------------------- /tests/test_cachemanager.py: -------------------------------------------------------------------------------- 1 | import time 2 | from datetime import datetime 3 | 4 | import shutil 5 | 6 | from beaker.cache import CacheManager, cache_regions 7 | from beaker.util import parse_cache_config_options 8 | 9 | defaults = {'cache.data_dir':'./cache', 'cache.type':'dbm', 'cache.expire': 2} 10 | 11 | def teardown_module(): 12 | import shutil 13 | shutil.rmtree('./cache', True) 14 | 15 | def make_cache_obj(**kwargs): 16 | opts = defaults.copy() 17 | opts.update(kwargs) 18 | cache = CacheManager(**parse_cache_config_options(opts)) 19 | return cache 20 | 21 | def make_region_cached_func(): 22 | global _cache_obj 23 | opts = {} 24 | opts['cache.regions'] = 'short_term, long_term' 25 | opts['cache.short_term.expire'] = '2' 26 | cache = make_cache_obj(**opts) 27 | 28 | @cache.region('short_term', 'region_loader') 29 | def load(person): 30 | now = datetime.now() 31 | return "Hi there %s, its currently %s" % (person, now) 32 | _cache_obj = cache 33 | return load 34 | 35 | def make_cached_func(): 36 | global _cache_obj 37 | cache = make_cache_obj() 38 | 39 | @cache.cache('loader') 40 | def load(person): 41 | now = datetime.now() 42 | return "Hi there %s, its currently %s" % (person, now) 43 | _cache_obj = cache 44 | return load 45 | 46 | def test_parse_doesnt_allow_none(): 47 | opts = {} 48 | opts['cache.regions'] = 'short_term, long_term' 49 | for region, params in parse_cache_config_options(opts)['cache_regions'].items(): 50 | for k, v in params.items(): 51 | assert v != 'None', k 52 | 53 | def test_parse_doesnt_allow_empty_region_name(): 54 | opts = {} 55 | opts['cache.regions'] = '' 56 | regions = parse_cache_config_options(opts)['cache_regions'] 57 | assert len(regions) == 0 58 | 59 | def test_decorators(): 60 | for func in (make_region_cached_func, make_cached_func): 61 | check_decorator(func()) 62 | 63 | def check_decorator(func): 64 | result = func('Fred') 65 | assert 'Fred' in result 66 | 67 | result2 = func('Fred') 68 | assert result == result2 69 | 70 | result3 = func('George') 71 | assert 'George' in result3 72 | result4 = func('George') 73 | assert result3 == result4 74 | 75 | time.sleep(2) 76 | result2 = func('Fred') 77 | assert result != result2 78 | 79 | def test_check_invalidate_region(): 80 | func = make_region_cached_func() 81 | result = func('Fred') 82 | assert 'Fred' in result 83 | 84 | result2 = func('Fred') 85 | assert result == result2 86 | _cache_obj.region_invalidate(func, None, 'region_loader', 'Fred') 87 | 88 | result3 = func('Fred') 89 | assert result3 != result2 90 | 91 | result2 = func('Fred') 92 | assert result3 == result2 93 | 94 | # Invalidate a non-existent key 95 | _cache_obj.region_invalidate(func, None, 'region_loader', 'Fredd') 96 | assert result3 == result2 97 | 98 | def test_check_invalidate(): 99 | func = make_cached_func() 100 | result = func('Fred') 101 | assert 'Fred' in result 102 | 103 | result2 = func('Fred') 104 | assert result == result2 105 | _cache_obj.invalidate(func, 'loader', 'Fred') 106 | 107 | result3 = func('Fred') 108 | assert result3 != result2 109 | 110 | result2 = func('Fred') 111 | assert result3 == result2 112 | 113 | # Invalidate a non-existent key 114 | _cache_obj.invalidate(func, 'loader', 'Fredd') 115 | assert result3 == result2 116 | 117 | def test_long_name(): 118 | func = make_cached_func() 119 | name = 'Fred' * 250 120 | result = func(name) 121 | assert name in result 122 | 123 | result2 = func(name) 124 | assert result == result2 125 | # This won't actually invalidate it since the key won't be sha'd 126 | _cache_obj.invalidate(func, 'loader', name, key_length=8000) 127 | 128 | result3 = func(name) 129 | assert result3 == result2 130 | 131 | # And now this should invalidate it 132 | _cache_obj.invalidate(func, 'loader', name) 133 | result4 = func(name) 134 | assert result3 != result4 135 | 136 | 137 | def test_cache_region_has_default_key_length(): 138 | try: 139 | cache = CacheManager(cache_regions={ 140 | 'short_term_without_key_length':{ 141 | 'expire': 60, 142 | 'type': 'memory' 143 | } 144 | }) 145 | 146 | # Check CacheManager registered the region in global regions 147 | assert 'short_term_without_key_length' in cache_regions 148 | 149 | @cache.region('short_term_without_key_length') 150 | def load_without_key_length(person): 151 | now = datetime.now() 152 | return "Hi there %s, its currently %s" % (person, now) 153 | 154 | # Ensure that same person gets same time 155 | msg = load_without_key_length('fred') 156 | msg2 = load_without_key_length('fred') 157 | assert msg == msg2, (msg, msg2) 158 | 159 | # Ensure that different person gets different time 160 | msg3 = load_without_key_length('george') 161 | assert msg3.split(',')[-1] != msg2.split(',')[-1] 162 | 163 | finally: 164 | # throw away region for this test 165 | cache_regions.pop('short_term_without_key_length', None) 166 | 167 | 168 | def test_cache_region_expire_is_always_int(): 169 | try: 170 | cache = CacheManager(cache_regions={ 171 | 'short_term_with_string_expire': { 172 | 'expire': '60', 173 | 'type': 'memory' 174 | } 175 | }) 176 | 177 | # Check CacheManager registered the region in global regions 178 | assert 'short_term_with_string_expire' in cache_regions 179 | 180 | @cache.region('short_term_with_string_expire') 181 | def load_with_str_expire(person): 182 | now = datetime.now() 183 | return "Hi there %s, its currently %s" % (person, now) 184 | 185 | # Ensure that same person gets same time 186 | msg = load_with_str_expire('fred') 187 | msg2 = load_with_str_expire('fred') 188 | assert msg == msg2, (msg, msg2) 189 | 190 | finally: 191 | # throw away region for this test 192 | cache_regions.pop('short_term_with_string_expire', None) 193 | 194 | 195 | def test_directory_goes_away(): 196 | cache = CacheManager(cache_regions={ 197 | 'short_term_without_key_length':{ 198 | 'expire': 60, 199 | 'type': 'dbm', 200 | 'data_dir': '/tmp/beaker-tests/cache/data', 201 | 'lock_dir': '/tmp/beaker-tests/cache/lock' 202 | } 203 | }) 204 | 205 | 206 | @cache.region('short_term_without_key_length') 207 | def load_with_str_expire(person): 208 | now = datetime.now() 209 | return "Hi there %s, its currently %s" % (person, now) 210 | 211 | 212 | # Ensure that same person gets same time 213 | msg = load_with_str_expire('fred') 214 | msg2 = load_with_str_expire('fred') 215 | 216 | shutil.rmtree('/tmp/beaker-tests') 217 | 218 | msg3 = load_with_str_expire('fred') 219 | assert msg == msg2, (msg, msg2) 220 | assert msg2 != msg3, (msg2, msg3) 221 | -------------------------------------------------------------------------------- /tests/test_container.py: -------------------------------------------------------------------------------- 1 | import os 2 | import pickle 3 | import random 4 | import shutil 5 | import sys 6 | import time 7 | import pytest 8 | 9 | from beaker.container import * 10 | from beaker.synchronization import _synchronizers 11 | from beaker.cache import clsmap 12 | from threading import Thread 13 | 14 | class CachedWidget(object): 15 | totalcreates = 0 16 | delay = 0 17 | 18 | def __init__(self): 19 | CachedWidget.totalcreates += 1 20 | time.sleep(CachedWidget.delay) 21 | self.time = time.time() 22 | 23 | def _run_container_test(cls, totaltime, expiretime, delay, threadlocal): 24 | print("\ntesting %s for %d secs with expiretime %s delay %d" % ( 25 | cls, totaltime, expiretime, delay)) 26 | 27 | CachedWidget.totalcreates = 0 28 | CachedWidget.delay = delay 29 | 30 | # allow for python overhead when checking current time against expire times 31 | fudge = 10 32 | 33 | starttime = time.time() 34 | 35 | running = [True] 36 | class RunThread(Thread): 37 | def run(self): 38 | print("%s starting" % self) 39 | 40 | if threadlocal: 41 | localvalue = Value( 42 | 'test', 43 | cls('test', data_dir='./cache'), 44 | createfunc=CachedWidget, 45 | expiretime=expiretime, 46 | starttime=starttime) 47 | localvalue.clear_value() 48 | else: 49 | localvalue = value 50 | 51 | try: 52 | while running[0]: 53 | item = localvalue.get_value() 54 | if expiretime is not None: 55 | currenttime = time.time() 56 | itemtime = item.time 57 | assert itemtime + expiretime + delay + fudge >= currenttime, \ 58 | "created: %f expire: %f delay: %f currenttime: %f" % \ 59 | (itemtime, expiretime, delay, currenttime) 60 | time.sleep(random.random() * .00001) 61 | except: 62 | running[0] = False 63 | raise 64 | print("%s finishing" % self) 65 | 66 | if not threadlocal: 67 | value = Value( 68 | 'test', 69 | cls('test', data_dir='./cache'), 70 | createfunc=CachedWidget, 71 | expiretime=expiretime, 72 | starttime=starttime) 73 | value.clear_value() 74 | else: 75 | value = None 76 | 77 | threads = [RunThread() for i in range(1, 8)] 78 | 79 | for t in threads: 80 | t.start() 81 | 82 | time.sleep(totaltime) 83 | 84 | failed = not running[0] 85 | running[0] = False 86 | 87 | for t in threads: 88 | t.join() 89 | 90 | assert not failed, "One or more threads failed" 91 | if expiretime is None: 92 | expected = 1 93 | else: 94 | expected = totaltime / expiretime + 1 95 | assert CachedWidget.totalcreates <= expected, \ 96 | "Number of creates %d exceeds expected max %d" % (CachedWidget.totalcreates, expected) 97 | 98 | def test_memory_container(totaltime=10, expiretime=None, delay=0, threadlocal=False): 99 | _run_container_test(clsmap['memory'], 100 | totaltime, expiretime, delay, threadlocal) 101 | 102 | def test_dbm_container(totaltime=10, expiretime=None, delay=0): 103 | _run_container_test(clsmap['dbm'], totaltime, expiretime, delay, False) 104 | 105 | def test_file_container(totaltime=10, expiretime=None, delay=0, threadlocal=False): 106 | _run_container_test(clsmap['file'], totaltime, expiretime, delay, threadlocal) 107 | 108 | def test_memory_container_tlocal(): 109 | test_memory_container(expiretime=15, delay=2, threadlocal=True) 110 | 111 | def test_memory_container_2(): 112 | test_memory_container(expiretime=12) 113 | 114 | def test_memory_container_3(): 115 | test_memory_container(expiretime=15, delay=2) 116 | 117 | def test_dbm_container_2(): 118 | test_dbm_container(expiretime=12) 119 | 120 | def test_dbm_container_3(): 121 | test_dbm_container(expiretime=15, delay=2) 122 | 123 | def test_file_container_2(): 124 | test_file_container(expiretime=12) 125 | 126 | def test_file_container_3(): 127 | test_file_container(expiretime=15, delay=2) 128 | 129 | def test_file_container_tlocal(): 130 | test_file_container(expiretime=15, delay=2, threadlocal=True) 131 | 132 | 133 | @pytest.mark.skipif(sys.version_info < (3, 6), 134 | reason="Cryptography not supported on Python 3 lower than 3.6") 135 | def test_file_open_bug(): 136 | """ensure errors raised during reads or writes don't lock the namespace open.""" 137 | 138 | value = Value('test', clsmap['file']('reentrant_test', data_dir='./cache')) 139 | if os.path.exists(value.namespace.file): 140 | os.remove(value.namespace.file) 141 | 142 | value.set_value("x") 143 | 144 | f = open(value.namespace.file, 'w') 145 | f.write("BLAH BLAH BLAH") 146 | f.close() 147 | 148 | with pytest.raises(pickle.UnpicklingError): 149 | value.set_value("y") 150 | 151 | _synchronizers.clear() 152 | 153 | value = Value('test', clsmap['file']('reentrant_test', data_dir='./cache')) 154 | 155 | # TODO: do we have an assertRaises() in nose to use here ? 156 | with pytest.raises(pickle.UnpicklingError): 157 | value.set_value("z") 158 | 159 | 160 | def test_removing_file_refreshes(): 161 | """test that the cache doesn't ignore file removals""" 162 | 163 | x = [0] 164 | 165 | def create(): 166 | x[0] += 1 167 | return x[0] 168 | 169 | value = Value('test', 170 | clsmap['file']('refresh_test', data_dir='./cache'), 171 | createfunc=create, starttime=time.time() 172 | ) 173 | if os.path.exists(value.namespace.file): 174 | os.remove(value.namespace.file) 175 | assert value.get_value() == 1 176 | assert value.get_value() == 1 177 | os.remove(value.namespace.file) 178 | assert value.get_value() == 2 179 | 180 | 181 | def teardown_module(): 182 | shutil.rmtree('./cache', True) 183 | -------------------------------------------------------------------------------- /tests/test_converters.py: -------------------------------------------------------------------------------- 1 | from beaker._compat import u_ 2 | import unittest 3 | 4 | from beaker.converters import asbool, aslist 5 | 6 | 7 | class AsBool(unittest.TestCase): 8 | def test_truth_str(self): 9 | for v in ('true', 'yes', 'on', 'y', 't', '1'): 10 | self.assertTrue(asbool(v), "%s should be considered True" % (v,)) 11 | v = v.upper() 12 | self.assertTrue(asbool(v), "%s should be considered True" % (v,)) 13 | 14 | def test_false_str(self): 15 | for v in ('false', 'no', 'off', 'n', 'f', '0'): 16 | self.assertFalse(asbool(v), v) 17 | v = v.upper() 18 | self.assertFalse(asbool(v), v) 19 | 20 | def test_coerce(self): 21 | """Things that can coerce right straight to booleans.""" 22 | self.assertTrue(asbool(True)) 23 | self.assertTrue(asbool(1)) 24 | self.assertTrue(asbool(42)) 25 | self.assertFalse(asbool(False)) 26 | self.assertFalse(asbool(0)) 27 | 28 | def test_bad_values(self): 29 | self.assertRaises(ValueError, asbool, ('mommy!')) 30 | self.assertRaises(ValueError, asbool, (u_('Blargl?'))) 31 | 32 | 33 | class AsList(unittest.TestCase): 34 | def test_string(self): 35 | self.assertEqual(aslist('abc'), ['abc']) 36 | self.assertEqual(aslist('1a2a3', 'a'), ['1', '2', '3']) 37 | 38 | def test_None(self): 39 | self.assertEqual(aslist(None), []) 40 | 41 | def test_listy_noops(self): 42 | """Lists and tuples should come back unchanged.""" 43 | x = [1, 2, 3] 44 | self.assertEqual(aslist(x), x) 45 | y = ('z', 'y', 'x') 46 | self.assertEqual(aslist(y), y) 47 | 48 | def test_listify(self): 49 | """Other objects should just result in a single item list.""" 50 | self.assertEqual(aslist(dict()), [{}]) 51 | 52 | 53 | if __name__ == '__main__': 54 | unittest.main() 55 | 56 | -------------------------------------------------------------------------------- /tests/test_cookie_domain_only.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from beaker.middleware import SessionMiddleware 4 | from beaker import crypto 5 | 6 | webtest = pytest.importorskip("webtest") 7 | 8 | pytest.mark.skipif(not crypto.get_crypto_module('default').has_aes, 9 | reason="No AES library is installed, can't test " + 10 | "cookie-only Sessions") 11 | 12 | 13 | def simple_app(environ, start_response): 14 | session = environ['beaker.session'] 15 | if 'value' not in session: 16 | session['value'] = 0 17 | session['value'] += 1 18 | domain = environ.get('domain') 19 | if domain: 20 | session.domain = domain 21 | if not environ['PATH_INFO'].startswith('/nosave'): 22 | session.save() 23 | start_response('200 OK', [('Content-type', 'text/plain')]) 24 | msg = 'The current value is: %d and cookie is %s' % (session['value'], session) 25 | return [msg.encode('utf-8')] 26 | 27 | 28 | def test_increment(): 29 | options = {'session.validate_key':'hoobermas', 30 | 'session.type':'cookie'} 31 | app = webtest.TestApp(SessionMiddleware(simple_app, **options)) 32 | res = app.get('/') 33 | assert 'current value is: 1' in res 34 | 35 | res = app.get('/', extra_environ=dict(domain='.hoop.com', 36 | HTTP_HOST='www.hoop.com')) 37 | assert 'current value is: 1' in res 38 | assert 'Domain=.hoop.com' in res.headers['Set-Cookie'] 39 | 40 | res = app.get('/', extra_environ=dict(HTTP_HOST='www.hoop.com')) 41 | assert 'Domain=.hoop.com' in res.headers['Set-Cookie'] 42 | assert 'current value is: 2' in res 43 | 44 | 45 | def test_cookie_attributes_are_preserved(): 46 | options = {'session.type': 'memory', 47 | 'session.httponly': True, 48 | 'session.secure': True, 49 | 'session.cookie_path': '/app', 50 | 'session.cookie_domain': 'localhost'} 51 | app = webtest.TestApp(SessionMiddleware(simple_app, **options)) 52 | res = app.get('/app', extra_environ=dict( 53 | HTTP_COOKIE='beaker.session.id=oldsessid', domain='.hoop.com')) 54 | cookie = res.headers['Set-Cookie'] 55 | assert 'domain=.hoop.com' in cookie.lower() 56 | assert 'path=/app' in cookie.lower() 57 | assert 'secure' in cookie.lower() 58 | assert 'httponly' in cookie.lower() 59 | assert 'samesite=lax' in cookie.lower() 60 | 61 | 62 | if __name__ == '__main__': 63 | from paste import httpserver 64 | wsgi_app = SessionMiddleware(simple_app, {}) 65 | httpserver.serve(wsgi_app, host='127.0.0.1', port=8080) 66 | -------------------------------------------------------------------------------- /tests/test_cookie_expires.py: -------------------------------------------------------------------------------- 1 | from beaker.middleware import SessionMiddleware 2 | from beaker.session import Session, CookieSession 3 | import datetime 4 | import re 5 | 6 | def test_cookie_expires(): 7 | """Explore valid arguments for cookie_expires.""" 8 | def app(*args, **kw): 9 | pass 10 | 11 | key = 'beaker.session.cookie_expires' 12 | now = datetime.datetime.now() 13 | 14 | values = ['300', 300, 15 | True, 'True', 'true', 't', 16 | False, 'False', 'false', 'f', 17 | datetime.timedelta(minutes=5), now] 18 | 19 | expected = [datetime.timedelta(seconds=300), 20 | datetime.timedelta(seconds=300), 21 | True, True, True, True, 22 | False, False, False, False, 23 | datetime.timedelta(minutes=5), now] 24 | 25 | actual = [] 26 | 27 | for pos, v in enumerate(values): 28 | try: 29 | s = SessionMiddleware(app, config={key:v}) 30 | val = s.options['cookie_expires'] 31 | except: 32 | val = None 33 | assert val == expected[pos] 34 | 35 | 36 | def cookie_expiration(session): 37 | cookie = session.cookie.output() 38 | expiry_m = re.match('Set-Cookie: beaker.session.id=[0-9a-f]{32}(; expires=[^;]+)?; Path=/', cookie) 39 | assert expiry_m 40 | expiry = expiry_m.group(1) 41 | if expiry is None: 42 | return True 43 | if re.match('; expires=(Mon|Tue), 1[89]-Jan-2038 [0-9:]{8} GMT', expiry): 44 | return False 45 | else: 46 | return expiry[10:] 47 | 48 | 49 | def test_cookie_exprires_2(): 50 | """Exhibit Set-Cookie: values.""" 51 | expires = cookie_expiration(Session({}, cookie_expires=True)) 52 | 53 | assert expires is True, expires 54 | no_expires = cookie_expiration(Session({}, cookie_expires=False)) 55 | 56 | assert no_expires is False, no_expires 57 | 58 | def test_cookie_expires_different_locale(): 59 | from locale import setlocale, LC_TIME 60 | expires_date = datetime.datetime(2019, 5, 22) 61 | setlocale(LC_TIME, 'it_IT.UTF-8') 62 | # if you get locale.Error: unsupported locale setting. you have to enable that locale in your OS. 63 | assert expires_date.strftime("%a, %d-%b-%Y %H:%M:%S GMT").startswith('mer,') 64 | session = Session({}, cookie_expires=True, validate_key='validate_key') 65 | assert session._set_cookie_expires(expires_date) 66 | expires = cookie_expiration(session) 67 | assert expires == 'Wed, 22-May-2019 00:00:00 GMT', expires 68 | setlocale(LC_TIME, '') # restore default locale for further tests 69 | 70 | def test_set_cookie_expires(): 71 | """Exhibit Set-Cookie: values.""" 72 | session = Session({}, cookie_expires=True) 73 | assert cookie_expiration(session) is True 74 | session._set_cookie_expires(False) 75 | assert cookie_expiration(session) is False 76 | session._set_cookie_expires(True) 77 | assert cookie_expiration(session) is True 78 | 79 | def test_cookiesession_expires_values(): 80 | BASE_OPTIONS = { 81 | 'invalidate_corrupt': True, 82 | 'type': 'cookie', 83 | 'data_dir': None, 84 | 'key': 'ckan', 85 | 'timeout': None, 86 | 'save_accessed_time': True, 87 | 'secret': '12341234', 88 | 'log_file': None, 89 | 'data_serializer': 'json', 90 | 'validate_key': 'asdfasdf', 91 | 'httponly': True, 92 | 'secure': False, 93 | 'samesite': 'Strict', 94 | 'auto': False, 95 | 'cookie_domain': None 96 | } 97 | 98 | for cookie_expires in (True, False, datetime.timedelta(seconds=10)): 99 | options = dict(BASE_OPTIONS, cookie_expires=cookie_expires) 100 | cookie_session = CookieSession( 101 | {}, 102 | **options, 103 | ) 104 | # Check the cookie has expire 105 | # True = expire when browser is closed, so no Expire= 106 | cookie_session.save() 107 | if cookie_expires != True: 108 | assert "expires" in cookie_session.cookie.output() 109 | 110 | # Check we can save it again. 111 | cookie_session.save() 112 | if cookie_expires != True: 113 | assert "expires" in cookie_session.cookie.output() 114 | 115 | # Check we can load it back. 116 | loaded_back = CookieSession({"cookie": cookie_session.cookie}, **options) 117 | loaded_back.save() 118 | if cookie_expires != True: 119 | assert "expires" in cookie_session.cookie.output() 120 | -------------------------------------------------------------------------------- /tests/test_database.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | from beaker._compat import u_ 3 | 4 | from beaker.cache import clsmap, Cache, util 5 | from beaker.exceptions import InvalidCacheBackendError 6 | from beaker.middleware import CacheMiddleware 7 | from unittest import SkipTest 8 | 9 | try: 10 | from webtest import TestApp as WebTestApp 11 | except ImportError: 12 | WebTestApp = None 13 | 14 | 15 | try: 16 | clsmap['ext:database']._init_dependencies() 17 | except InvalidCacheBackendError: 18 | raise SkipTest("an appropriate SQLAlchemy backend is not installed") 19 | 20 | db_url = 'sqlite:///test.db' 21 | 22 | def simple_app(environ, start_response): 23 | extra_args = {} 24 | clear = False 25 | if environ.get('beaker.clear'): 26 | clear = True 27 | extra_args['type'] = 'ext:database' 28 | extra_args['url'] = db_url 29 | extra_args['data_dir'] = './cache' 30 | cache = environ['beaker.cache'].get_cache('testcache', **extra_args) 31 | if clear: 32 | cache.clear() 33 | try: 34 | value = cache.get_value('value') 35 | except: 36 | value = 0 37 | cache.set_value('value', value+1) 38 | start_response('200 OK', [('Content-type', 'text/plain')]) 39 | return [('The current value is: %s' % cache.get_value('value')).encode('utf-8')] 40 | 41 | def cache_manager_app(environ, start_response): 42 | cm = environ['beaker.cache'] 43 | cm.get_cache('test')['test_key'] = 'test value' 44 | 45 | start_response('200 OK', [('Content-type', 'text/plain')]) 46 | yield ("test_key is: %s\n" % cm.get_cache('test')['test_key']).encode('utf-8') 47 | cm.get_cache('test').clear() 48 | 49 | try: 50 | test_value = cm.get_cache('test')['test_key'] 51 | except KeyError: 52 | yield ("test_key cleared").encode('utf-8') 53 | else: 54 | yield ("test_key wasn't cleared, is: %s\n" % test_value).encode('utf-8') 55 | 56 | def test_has_key(): 57 | cache = Cache('test', data_dir='./cache', url=db_url, type='ext:database') 58 | o = object() 59 | cache.set_value("test", o) 60 | assert "test" in cache 61 | assert "test" in cache 62 | assert "foo" not in cache 63 | assert "foo" not in cache 64 | cache.remove_value("test") 65 | assert "test" not in cache 66 | 67 | def test_has_key_multicache(): 68 | cache = Cache('test', data_dir='./cache', url=db_url, type='ext:database') 69 | o = object() 70 | cache.set_value("test", o) 71 | assert "test" in cache 72 | assert "test" in cache 73 | cache = Cache('test', data_dir='./cache', url=db_url, type='ext:database') 74 | assert "test" in cache 75 | cache.remove_value('test') 76 | 77 | def test_clear(): 78 | cache = Cache('test', data_dir='./cache', url=db_url, type='ext:database') 79 | o = object() 80 | cache.set_value("test", o) 81 | assert "test" in cache 82 | cache.clear() 83 | assert "test" not in cache 84 | 85 | def test_unicode_keys(): 86 | cache = Cache('test', data_dir='./cache', url=db_url, type='ext:database') 87 | o = object() 88 | cache.set_value(u_('hiŏ'), o) 89 | assert u_('hiŏ') in cache 90 | assert u_('hŏa') not in cache 91 | cache.remove_value(u_('hiŏ')) 92 | assert u_('hiŏ') not in cache 93 | 94 | @util.skip_if(lambda: WebTestApp is None, "webtest not installed") 95 | def test_increment(): 96 | app = WebTestApp(CacheMiddleware(simple_app)) 97 | res = app.get('/', extra_environ={'beaker.clear':True}) 98 | assert 'current value is: 1' in res 99 | res = app.get('/') 100 | assert 'current value is: 2' in res 101 | res = app.get('/') 102 | assert 'current value is: 3' in res 103 | 104 | @util.skip_if(lambda: WebTestApp is None, "webtest not installed") 105 | def test_cache_manager(): 106 | app = WebTestApp(CacheMiddleware(cache_manager_app)) 107 | res = app.get('/') 108 | assert 'test_key is: test value' in res 109 | assert 'test_key cleared' in res 110 | -------------------------------------------------------------------------------- /tests/test_domain_setting.py: -------------------------------------------------------------------------------- 1 | from beaker.middleware import SessionMiddleware 2 | from unittest import SkipTest 3 | try: 4 | from webtest import TestApp as WebTestApp 5 | except ImportError: 6 | raise SkipTest("webtest not installed") 7 | 8 | def teardown_module(): 9 | import shutil 10 | shutil.rmtree('./cache', True) 11 | 12 | def simple_app(environ, start_response): 13 | session = environ['beaker.session'] 14 | domain = environ.get('domain') 15 | if domain: 16 | session.domain = domain 17 | if 'value' not in session: 18 | session['value'] = 0 19 | session['value'] += 1 20 | if not environ['PATH_INFO'].startswith('/nosave'): 21 | session.save() 22 | start_response('200 OK', [('Content-type', 'text/plain')]) 23 | msg = 'The current value is: %s, session id is %s' % (session.get('value', 0), 24 | session.id) 25 | return [msg.encode('utf-8')] 26 | 27 | 28 | def test_same_domain(): 29 | options = {'session.data_dir':'./cache', 30 | 'session.secret':'blah', 31 | 'session.cookie_domain': '.hoop.com'} 32 | app = WebTestApp(SessionMiddleware(simple_app, **options)) 33 | res = app.get('/', extra_environ=dict(HTTP_HOST='subdomain.hoop.com')) 34 | assert 'current value is: 1' in res 35 | assert 'Domain=.hoop.com' in res.headers['Set-Cookie'] 36 | res = app.get('/', extra_environ=dict(HTTP_HOST='another.hoop.com')) 37 | assert 'current value is: 2' in res 38 | assert [] == res.headers.getall('Set-Cookie') 39 | res = app.get('/', extra_environ=dict(HTTP_HOST='more.subdomain.hoop.com')) 40 | assert 'current value is: 3' in res 41 | 42 | 43 | def test_different_domain(): 44 | options = {'session.data_dir':'./cache', 45 | 'session.secret':'blah'} 46 | app = WebTestApp(SessionMiddleware(simple_app, **options)) 47 | res = app.get('/', extra_environ=dict(domain='.hoop.com', 48 | HTTP_HOST='www.hoop.com')) 49 | res = app.get('/', extra_environ=dict(domain='.hoop.co.uk', 50 | HTTP_HOST='www.hoop.com')) 51 | assert 'Domain=.hoop.co.uk' in res.headers['Set-Cookie'] 52 | assert 'current value is: 2' in res 53 | 54 | res = app.get('/', extra_environ=dict(domain='.hoop.co.uk', 55 | HTTP_HOST='www.test.com')) 56 | assert 'current value is: 1' in res 57 | 58 | 59 | if __name__ == '__main__': 60 | from paste import httpserver 61 | wsgi_app = SessionMiddleware(simple_app, {}) 62 | httpserver.serve(wsgi_app, host='127.0.0.1', port=8080) 63 | -------------------------------------------------------------------------------- /tests/test_increment.py: -------------------------------------------------------------------------------- 1 | import re 2 | import unittest 3 | 4 | from beaker.middleware import SessionMiddleware 5 | try: 6 | from webtest import TestApp as WebTestApp 7 | except ImportError: 8 | raise unittest.SkipTest("webtest not installed") 9 | 10 | 11 | def teardown_module(): 12 | import shutil 13 | shutil.rmtree('./cache', True) 14 | 15 | def no_save_app(environ, start_response): 16 | session = environ['beaker.session'] 17 | sess_id = environ.get('SESSION_ID') 18 | start_response('200 OK', [('Content-type', 'text/plain')]) 19 | msg = 'The current value is: %s, session id is %s' % (session.get('value'), 20 | session.id) 21 | return [msg.encode('utf-8')] 22 | 23 | def simple_app(environ, start_response): 24 | session = environ['beaker.session'] 25 | sess_id = environ.get('SESSION_ID') 26 | if sess_id: 27 | session = session.get_by_id(sess_id) 28 | if not session: 29 | start_response('200 OK', [('Content-type', 'text/plain')]) 30 | return [("No session id of %s found." % sess_id).encode('utf-8')] 31 | if not 'value' in session: 32 | session['value'] = 0 33 | session['value'] += 1 34 | if not environ['PATH_INFO'].startswith('/nosave'): 35 | session.save() 36 | start_response('200 OK', [('Content-type', 'text/plain')]) 37 | msg = 'The current value is: %s, session id is %s' % (session.get('value'), 38 | session.id) 39 | return [msg.encode('utf-8')] 40 | 41 | def simple_auto_app(environ, start_response): 42 | """Like the simple_app, but assume that sessions auto-save""" 43 | session = environ['beaker.session'] 44 | sess_id = environ.get('SESSION_ID') 45 | if sess_id: 46 | session = session.get_by_id(sess_id) 47 | if not session: 48 | start_response('200 OK', [('Content-type', 'text/plain')]) 49 | return [("No session id of %s found." % sess_id).encode('utf-8')] 50 | if not 'value' in session: 51 | session['value'] = 0 52 | session['value'] += 1 53 | if environ['PATH_INFO'].startswith('/nosave'): 54 | session.revert() 55 | start_response('200 OK', [('Content-type', 'text/plain')]) 56 | msg = 'The current value is: %s, session id is %s' % (session.get('value', 0), 57 | session.id) 58 | return [msg.encode('utf-8')] 59 | 60 | def test_no_save(): 61 | options = {'session.data_dir':'./cache', 'session.secret':'blah'} 62 | app = WebTestApp(SessionMiddleware(no_save_app, **options)) 63 | res = app.get('/') 64 | assert 'current value is: None' in res 65 | assert [] == res.headers.getall('Set-Cookie') 66 | 67 | 68 | def test_increment(): 69 | options = {'session.data_dir':'./cache', 'session.secret':'blah'} 70 | app = WebTestApp(SessionMiddleware(simple_app, **options)) 71 | res = app.get('/') 72 | assert 'current value is: 1' in res 73 | res = app.get('/') 74 | assert 'current value is: 2' in res 75 | res = app.get('/') 76 | assert 'current value is: 3' in res 77 | 78 | def test_increment_auto(): 79 | options = {'session.data_dir':'./cache', 'session.secret':'blah'} 80 | app = WebTestApp(SessionMiddleware(simple_auto_app, auto=True, **options)) 81 | res = app.get('/') 82 | assert 'current value is: 1' in res 83 | res = app.get('/') 84 | assert 'current value is: 2' in res 85 | res = app.get('/') 86 | assert 'current value is: 3' in res 87 | 88 | 89 | def test_different_sessions(): 90 | options = {'session.data_dir':'./cache', 'session.secret':'blah'} 91 | app = WebTestApp(SessionMiddleware(simple_app, **options)) 92 | app2 = WebTestApp(SessionMiddleware(simple_app, **options)) 93 | res = app.get('/') 94 | assert 'current value is: 1' in res 95 | res = app2.get('/') 96 | assert 'current value is: 1' in res 97 | res = app2.get('/') 98 | res = app2.get('/') 99 | res = app2.get('/') 100 | res2 = app.get('/') 101 | assert 'current value is: 2' in res2 102 | assert 'current value is: 4' in res 103 | 104 | def test_different_sessions_auto(): 105 | options = {'session.data_dir':'./cache', 'session.secret':'blah'} 106 | app = WebTestApp(SessionMiddleware(simple_auto_app, auto=True, **options)) 107 | app2 = WebTestApp(SessionMiddleware(simple_auto_app, auto=True, **options)) 108 | res = app.get('/') 109 | assert 'current value is: 1' in res 110 | res = app2.get('/') 111 | assert 'current value is: 1' in res 112 | res = app2.get('/') 113 | res = app2.get('/') 114 | res = app2.get('/') 115 | res2 = app.get('/') 116 | assert 'current value is: 2' in res2 117 | assert 'current value is: 4' in res 118 | 119 | def test_nosave(): 120 | options = {'session.data_dir':'./cache', 'session.secret':'blah'} 121 | app = WebTestApp(SessionMiddleware(simple_app, **options)) 122 | res = app.get('/nosave') 123 | assert 'current value is: 1' in res 124 | res = app.get('/nosave') 125 | assert 'current value is: 1' in res 126 | 127 | res = app.get('/') 128 | assert 'current value is: 1' in res 129 | res = app.get('/') 130 | assert 'current value is: 2' in res 131 | 132 | def test_revert(): 133 | options = {'session.data_dir':'./cache', 'session.secret':'blah'} 134 | app = WebTestApp(SessionMiddleware(simple_auto_app, auto=True, **options)) 135 | res = app.get('/nosave') 136 | assert 'current value is: 0' in res 137 | res = app.get('/nosave') 138 | assert 'current value is: 0' in res 139 | 140 | res = app.get('/') 141 | assert 'current value is: 1' in res 142 | assert [] == res.headers.getall('Set-Cookie') 143 | res = app.get('/') 144 | assert [] == res.headers.getall('Set-Cookie') 145 | assert 'current value is: 2' in res 146 | 147 | # Finally, ensure that reverting shows the proper one 148 | res = app.get('/nosave') 149 | assert [] == res.headers.getall('Set-Cookie') 150 | assert 'current value is: 2' in res 151 | 152 | def test_load_session_by_id(): 153 | options = {'session.data_dir':'./cache', 'session.secret':'blah'} 154 | app = WebTestApp(SessionMiddleware(simple_app, **options)) 155 | res = app.get('/') 156 | assert 'current value is: 1' in res 157 | res = app.get('/') 158 | res = app.get('/') 159 | assert 'current value is: 3' in res 160 | old_id = re.sub(r'^.*?session id is (\S+)$', r'\1', res.body.decode('utf-8'), re.M) 161 | 162 | # Clear the cookies and do a new request 163 | app = WebTestApp(SessionMiddleware(simple_app, **options)) 164 | res = app.get('/') 165 | assert 'current value is: 1' in res 166 | 167 | # Load a bogus session to see that its not there 168 | res = app.get('/', extra_environ={'SESSION_ID': 'jil2j34il2j34ilj23'}) 169 | assert 'No session id of' in res 170 | 171 | # Saved session was at 3, now it'll be 4 172 | res = app.get('/', extra_environ={'SESSION_ID': str(old_id)}) 173 | assert 'current value is: 4' in res 174 | 175 | # Prior request is now up to 2 176 | res = app.get('/') 177 | assert 'current value is: 2' in res 178 | 179 | 180 | if __name__ == '__main__': 181 | from paste import httpserver 182 | wsgi_app = SessionMiddleware(simple_app, {}) 183 | httpserver.serve(wsgi_app, host='127.0.0.1', port=8080) 184 | -------------------------------------------------------------------------------- /tests/test_managers/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bbangert/beaker/913d195875899b31fdbf8b4fda64094870e2d3d6/tests/test_managers/__init__.py -------------------------------------------------------------------------------- /tests/test_managers/base.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | import threading 3 | import unittest 4 | 5 | import time 6 | 7 | import datetime 8 | 9 | from beaker._compat import u_ 10 | from beaker.cache import Cache 11 | from beaker.middleware import SessionMiddleware, CacheMiddleware 12 | from webtest import TestApp as WebTestApp 13 | 14 | 15 | class CacheManagerBaseTests(unittest.TestCase): 16 | SUPPORTS_EXPIRATION = True 17 | SUPPORTS_TIMEOUT = True 18 | CACHE_ARGS = {} 19 | 20 | @classmethod 21 | def setUpClass(cls): 22 | def simple_session_app(environ, start_response): 23 | session = environ['beaker.session'] 24 | sess_id = environ.get('SESSION_ID') 25 | if environ['PATH_INFO'].startswith('/invalid'): 26 | # Attempt to access the session 27 | id = session.id 28 | session['value'] = 2 29 | else: 30 | if sess_id: 31 | session = session.get_by_id(sess_id) 32 | if not session: 33 | start_response('200 OK', [('Content-type', 'text/plain')]) 34 | return [("No session id of %s found." % sess_id).encode('utf-8')] 35 | if not session.has_key('value'): 36 | session['value'] = 0 37 | session['value'] += 1 38 | if not environ['PATH_INFO'].startswith('/nosave'): 39 | session.save() 40 | start_response('200 OK', [('Content-type', 'text/plain')]) 41 | return [('The current value is: %d, session id is %s' % (session['value'], 42 | session.id)).encode('utf-8')] 43 | 44 | def simple_app(environ, start_response): 45 | extra_args = cls.CACHE_ARGS 46 | clear = False 47 | if environ.get('beaker.clear'): 48 | clear = True 49 | cache = environ['beaker.cache'].get_cache('testcache', **extra_args) 50 | if clear: 51 | cache.clear() 52 | try: 53 | value = cache.get_value('value') 54 | except: 55 | value = 0 56 | cache.set_value('value', value + 1) 57 | start_response('200 OK', [('Content-type', 'text/plain')]) 58 | return [('The current value is: %s' % cache.get_value('value')).encode('utf-8')] 59 | 60 | def using_none_app(environ, start_response): 61 | extra_args = cls.CACHE_ARGS 62 | clear = False 63 | if environ.get('beaker.clear'): 64 | clear = True 65 | cache = environ['beaker.cache'].get_cache('testcache', **extra_args) 66 | if clear: 67 | cache.clear() 68 | try: 69 | value = cache.get_value('value') 70 | except: 71 | value = 10 72 | cache.set_value('value', None) 73 | start_response('200 OK', [('Content-type', 'text/plain')]) 74 | return [('The current value is: %s' % value).encode('utf-8')] 75 | 76 | def cache_manager_app(environ, start_response): 77 | cm = environ['beaker.cache'] 78 | cm.get_cache('test')['test_key'] = 'test value' 79 | 80 | start_response('200 OK', [('Content-type', 'text/plain')]) 81 | yield ("test_key is: %s\n" % cm.get_cache('test')['test_key']).encode('utf-8') 82 | cm.get_cache('test').clear() 83 | 84 | try: 85 | test_value = cm.get_cache('test')['test_key'] 86 | except KeyError: 87 | yield "test_key cleared".encode('utf-8') 88 | else: 89 | yield ( 90 | "test_key wasn't cleared, is: %s\n" % cm.get_cache('test')['test_key'] 91 | ).encode('utf-8') 92 | 93 | cls.simple_session_app = staticmethod(simple_session_app) 94 | cls.simple_app = staticmethod(simple_app) 95 | cls.using_none_app = staticmethod(using_none_app) 96 | cls.cache_manager_app = staticmethod(cache_manager_app) 97 | 98 | def setUp(self): 99 | Cache('test', **self.CACHE_ARGS).clear() 100 | 101 | def test_session(self): 102 | app = WebTestApp(SessionMiddleware(self.simple_session_app, **self.CACHE_ARGS)) 103 | res = app.get('/') 104 | assert 'current value is: 1' in res 105 | res = app.get('/') 106 | assert 'current value is: 2' in res 107 | res = app.get('/') 108 | assert 'current value is: 3' in res 109 | 110 | def test_session_invalid(self): 111 | app = WebTestApp(SessionMiddleware(self.simple_session_app, **self.CACHE_ARGS)) 112 | res = app.get('/invalid', headers=dict( 113 | Cookie='beaker.session.id=df7324911e246b70b5781c3c58328442; Path=/')) 114 | assert 'current value is: 2' in res 115 | 116 | def test_session_timeout(self): 117 | app = WebTestApp(SessionMiddleware(self.simple_session_app, timeout=1, **self.CACHE_ARGS)) 118 | 119 | session = app.app._get_session() 120 | session.save() 121 | if self.SUPPORTS_TIMEOUT: 122 | assert session.namespace.timeout == 121 123 | 124 | res = app.get('/') 125 | assert 'current value is: 1' in res 126 | res = app.get('/') 127 | assert 'current value is: 2' in res 128 | res = app.get('/') 129 | assert 'current value is: 3' in res 130 | 131 | def test_has_key(self): 132 | cache = Cache('test', **self.CACHE_ARGS) 133 | o = object() 134 | cache.set_value("test", o) 135 | assert cache.has_key("test") 136 | assert "test" in cache 137 | assert not cache.has_key("foo") 138 | assert "foo" not in cache 139 | cache.remove_value("test") 140 | assert not cache.has_key("test") 141 | 142 | def test_clear(self): 143 | cache = Cache('test', **self.CACHE_ARGS) 144 | cache.set_value('test', 20) 145 | cache.set_value('fred', 10) 146 | assert cache.has_key('test') 147 | assert 'test' in cache 148 | assert cache.has_key('fred') 149 | cache.clear() 150 | assert not cache.has_key("test") 151 | 152 | def test_has_key_multicache(self): 153 | cache = Cache('test', **self.CACHE_ARGS) 154 | o = object() 155 | cache.set_value("test", o) 156 | assert cache.has_key("test") 157 | assert "test" in cache 158 | cache = Cache('test', **self.CACHE_ARGS) 159 | assert cache.has_key("test") 160 | 161 | def test_unicode_keys(self): 162 | cache = Cache('test', **self.CACHE_ARGS) 163 | o = object() 164 | cache.set_value(u_('hiŏ'), o) 165 | assert u_('hiŏ') in cache 166 | assert u_('hŏa') not in cache 167 | cache.remove_value(u_('hiŏ')) 168 | assert u_('hiŏ') not in cache 169 | 170 | def test_long_unicode_keys(self): 171 | cache = Cache('test', **self.CACHE_ARGS) 172 | o = object() 173 | long_str = u_( 174 | 'Очень длинная строка, которая не влезает в сто двадцать восемь байт и поэтому не проходит ограничение в check_key, что очень прискорбно, не правда ли, друзья? Давайте же скорее исправим это досадное недоразумение!' 175 | ) 176 | cache.set_value(long_str, o) 177 | assert long_str in cache 178 | cache.remove_value(long_str) 179 | assert long_str not in cache 180 | 181 | def test_spaces_in_unicode_keys(self): 182 | cache = Cache('test', **self.CACHE_ARGS) 183 | o = object() 184 | cache.set_value(u_('hi ŏ'), o) 185 | assert u_('hi ŏ') in cache 186 | assert u_('hŏa') not in cache 187 | cache.remove_value(u_('hi ŏ')) 188 | assert u_('hi ŏ') not in cache 189 | 190 | def test_spaces_in_keys(self): 191 | cache = Cache('test', **self.CACHE_ARGS) 192 | cache.set_value("has space", 24) 193 | assert cache.has_key("has space") 194 | assert 24 == cache.get_value("has space") 195 | cache.set_value("hasspace", 42) 196 | assert cache.has_key("hasspace") 197 | assert 42 == cache.get_value("hasspace") 198 | 199 | def test_increment(self): 200 | app = WebTestApp(CacheMiddleware(self.simple_app)) 201 | res = app.get('/', extra_environ={'beaker.clear': True}) 202 | assert 'current value is: 1' in res 203 | res = app.get('/') 204 | assert 'current value is: 2' in res 205 | res = app.get('/') 206 | assert 'current value is: 3' in res 207 | 208 | app = WebTestApp(CacheMiddleware(self.simple_app)) 209 | res = app.get('/', extra_environ={'beaker.clear': True}) 210 | assert 'current value is: 1' in res 211 | res = app.get('/') 212 | assert 'current value is: 2' in res 213 | res = app.get('/') 214 | assert 'current value is: 3' in res 215 | 216 | def test_cache_manager(self): 217 | app = WebTestApp(CacheMiddleware(self.cache_manager_app)) 218 | res = app.get('/') 219 | assert 'test_key is: test value' in res 220 | assert 'test_key cleared' in res 221 | 222 | def test_store_none(self): 223 | app = WebTestApp(CacheMiddleware(self.using_none_app)) 224 | res = app.get('/', extra_environ={'beaker.clear': True}) 225 | assert 'current value is: 10' in res 226 | res = app.get('/') 227 | assert 'current value is: None' in res 228 | 229 | def test_expiretime(self): 230 | cache = Cache('test', **self.CACHE_ARGS) 231 | cache.set_value("has space", 24, expiretime=1) 232 | assert cache.has_key("has space") 233 | time.sleep(1.1) 234 | assert not cache.has_key("has space") 235 | 236 | def test_expiretime_automatic(self): 237 | if not self.SUPPORTS_EXPIRATION: 238 | self.skipTest('NamespaceManager does not support automatic expiration') 239 | 240 | cache = Cache('test', **self.CACHE_ARGS) 241 | cache.set_value("has space", 24, expiretime=1) 242 | assert cache.namespace.has_key("has space") 243 | time.sleep(1.1) 244 | assert not cache.namespace.has_key("has space") 245 | 246 | def test_createfunc(self): 247 | cache = Cache('test', **self.CACHE_ARGS) 248 | 249 | def createfunc(): 250 | createfunc.count += 1 251 | return createfunc.count 252 | createfunc.count = 0 253 | 254 | def keepitlocked(): 255 | lock = cache.namespace.get_creation_lock('test') 256 | lock.acquire() 257 | keepitlocked.acquired = True 258 | time.sleep(1.0) 259 | lock.release() 260 | keepitlocked.acquired = False 261 | 262 | v0 = cache.get_value('test', createfunc=createfunc) 263 | self.assertEqual(v0, 1) 264 | 265 | v0 = cache.get_value('test', createfunc=createfunc) 266 | self.assertEqual(v0, 1) 267 | 268 | cache.remove_value('test') 269 | 270 | begin = datetime.datetime.utcnow() 271 | t = threading.Thread(target=keepitlocked) 272 | t.start() 273 | while not keepitlocked.acquired: 274 | # Wait for the thread that should lock the cache to start. 275 | time.sleep(0.001) 276 | 277 | v0 = cache.get_value('test', createfunc=createfunc) 278 | self.assertEqual(v0, 2) 279 | 280 | # Ensure that the `get_value` was blocked by the concurrent thread. 281 | assert datetime.datetime.utcnow() - begin > datetime.timedelta(seconds=1) 282 | 283 | t.join() 284 | -------------------------------------------------------------------------------- /tests/test_managers/test_ext_mongodb.py: -------------------------------------------------------------------------------- 1 | from beaker.cache import Cache 2 | from . import base 3 | 4 | 5 | class TestMongoDB(base.CacheManagerBaseTests): 6 | SUPPORTS_TIMEOUT = False 7 | CACHE_ARGS = { 8 | 'type': 'ext:mongodb', 9 | 'url': 'mongodb://localhost:27017/beaker_testdb' 10 | } 11 | 12 | def test_client_reuse(self): 13 | cache1 = Cache('test1', **self.CACHE_ARGS) 14 | cli1 = cache1.namespace.client 15 | cache2 = Cache('test2', **self.CACHE_ARGS) 16 | cli2 = cache2.namespace.client 17 | self.assertTrue(cli1 is cli2) -------------------------------------------------------------------------------- /tests/test_managers/test_ext_redis.py: -------------------------------------------------------------------------------- 1 | from beaker.cache import Cache 2 | from . import base 3 | 4 | 5 | class TestRedis(base.CacheManagerBaseTests): 6 | CACHE_ARGS = { 7 | 'type': 'ext:redis', 8 | 'url': 'redis://localhost:6379/13' 9 | } 10 | 11 | def test_client_reuse(self): 12 | cache1 = Cache('test1', **self.CACHE_ARGS) 13 | cli1 = cache1.namespace.client 14 | cache2 = Cache('test2', **self.CACHE_ARGS) 15 | cli2 = cache2.namespace.client 16 | self.assertTrue(cli1 is cli2) -------------------------------------------------------------------------------- /tests/test_managers/test_ext_rediscluster.py: -------------------------------------------------------------------------------- 1 | from beaker.cache import Cache 2 | from . import base 3 | 4 | 5 | class TestRedis(base.CacheManagerBaseTests): 6 | CACHE_ARGS = { 7 | 'type': 'ext:rediscluster', 8 | 'urls': 'redis://localhost:5000' 9 | } 10 | 11 | def test_client_reuse(self): 12 | cache1 = Cache('test1', **self.CACHE_ARGS) 13 | cli1 = cache1.namespace.client 14 | cache2 = Cache('test2', **self.CACHE_ARGS) 15 | cli2 = cache2.namespace.client 16 | self.assertTrue(cli1 is cli2) 17 | -------------------------------------------------------------------------------- /tests/test_memcached.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | from beaker._compat import u_ 3 | 4 | import unittest.mock 5 | 6 | from beaker.cache import Cache, CacheManager, util 7 | from beaker.middleware import CacheMiddleware, SessionMiddleware 8 | from beaker.exceptions import InvalidCacheBackendError 9 | from beaker.util import parse_cache_config_options 10 | import unittest 11 | 12 | try: 13 | from webtest import TestApp as WebTestApp 14 | except ImportError: 15 | WebTestApp = None 16 | 17 | try: 18 | from beaker.ext import memcached 19 | client = memcached._load_client() 20 | except InvalidCacheBackendError: 21 | raise unittest.SkipTest("an appropriate memcached backend is not installed") 22 | 23 | mc_url = '127.0.0.1:11211' 24 | 25 | c =client.Client([mc_url]) 26 | c.set('x', 'y') 27 | if not c.get('x'): 28 | raise unittest.SkipTest("Memcached is not running at %s" % mc_url) 29 | 30 | def teardown_module(): 31 | import shutil 32 | shutil.rmtree('./cache', True) 33 | 34 | def simple_session_app(environ, start_response): 35 | session = environ['beaker.session'] 36 | sess_id = environ.get('SESSION_ID') 37 | if environ['PATH_INFO'].startswith('/invalid'): 38 | # Attempt to access the session 39 | id = session.id 40 | session['value'] = 2 41 | else: 42 | if sess_id: 43 | session = session.get_by_id(sess_id) 44 | if not session: 45 | start_response('200 OK', [('Content-type', 'text/plain')]) 46 | return ["No session id of %s found." % sess_id] 47 | if not session.has_key('value'): 48 | session['value'] = 0 49 | session['value'] += 1 50 | if not environ['PATH_INFO'].startswith('/nosave'): 51 | session.save() 52 | start_response('200 OK', [('Content-type', 'text/plain')]) 53 | return [ 54 | ('The current value is: %d, session id is %s' % ( 55 | session['value'], session.id 56 | )).encode('utf-8') 57 | ] 58 | 59 | def simple_app(environ, start_response): 60 | extra_args = {} 61 | clear = False 62 | if environ.get('beaker.clear'): 63 | clear = True 64 | extra_args['type'] = 'ext:memcached' 65 | extra_args['url'] = mc_url 66 | extra_args['data_dir'] = './cache' 67 | cache = environ['beaker.cache'].get_cache('testcache', **extra_args) 68 | if clear: 69 | cache.clear() 70 | try: 71 | value = cache.get_value('value') 72 | except: 73 | value = 0 74 | cache.set_value('value', value+1) 75 | start_response('200 OK', [('Content-type', 'text/plain')]) 76 | return [ 77 | ('The current value is: %s' % cache.get_value('value')).encode('utf-8') 78 | ] 79 | 80 | 81 | def using_none_app(environ, start_response): 82 | extra_args = {} 83 | clear = False 84 | if environ.get('beaker.clear'): 85 | clear = True 86 | extra_args['type'] = 'ext:memcached' 87 | extra_args['url'] = mc_url 88 | extra_args['data_dir'] = './cache' 89 | cache = environ['beaker.cache'].get_cache('testcache', **extra_args) 90 | if clear: 91 | cache.clear() 92 | try: 93 | value = cache.get_value('value') 94 | except: 95 | value = 10 96 | cache.set_value('value', None) 97 | start_response('200 OK', [('Content-type', 'text/plain')]) 98 | return [ 99 | ('The current value is: %s' % value).encode('utf-8') 100 | ] 101 | 102 | 103 | def cache_manager_app(environ, start_response): 104 | cm = environ['beaker.cache'] 105 | cm.get_cache('test')['test_key'] = 'test value' 106 | 107 | start_response('200 OK', [('Content-type', 'text/plain')]) 108 | yield ( 109 | "test_key is: %s\n" % cm.get_cache('test')['test_key'] 110 | ).encode('utf-8') 111 | cm.get_cache('test').clear() 112 | 113 | try: 114 | test_value = cm.get_cache('test')['test_key'] 115 | except KeyError: 116 | yield "test_key cleared".encode('utf-8') 117 | else: 118 | yield ("test_key wasn't cleared, is: %s\n" % ( 119 | cm.get_cache('test')['test_key'], 120 | )).encode('utf-8') 121 | 122 | 123 | @util.skip_if(lambda: WebTestApp is None, "webtest not installed") 124 | def test_session(): 125 | app = WebTestApp(SessionMiddleware(simple_session_app, data_dir='./cache', type='ext:memcached', url=mc_url)) 126 | res = app.get('/') 127 | assert 'current value is: 1' in res 128 | res = app.get('/') 129 | assert 'current value is: 2' in res 130 | res = app.get('/') 131 | assert 'current value is: 3' in res 132 | 133 | 134 | @util.skip_if(lambda: WebTestApp is None, "webtest not installed") 135 | def test_session_invalid(): 136 | app = WebTestApp(SessionMiddleware(simple_session_app, data_dir='./cache', type='ext:memcached', url=mc_url)) 137 | res = app.get('/invalid', headers=dict(Cookie='beaker.session.id=df7324911e246b70b5781c3c58328442; Path=/')) 138 | assert 'current value is: 2' in res 139 | 140 | 141 | def test_has_key(): 142 | cache = Cache('test', data_dir='./cache', url=mc_url, type='ext:memcached') 143 | o = object() 144 | cache.set_value("test", o) 145 | assert cache.has_key("test") 146 | assert "test" in cache 147 | assert not cache.has_key("foo") 148 | assert "foo" not in cache 149 | cache.remove_value("test") 150 | assert not cache.has_key("test") 151 | 152 | def test_dropping_keys(): 153 | cache = Cache('test', data_dir='./cache', url=mc_url, type='ext:memcached') 154 | cache.set_value('test', 20) 155 | cache.set_value('fred', 10) 156 | assert cache.has_key('test') 157 | assert 'test' in cache 158 | assert cache.has_key('fred') 159 | 160 | # Directly nuke the actual key, to simulate it being removed by memcached 161 | cache.namespace.mc.delete('test_test') 162 | assert not cache.has_key('test') 163 | assert cache.has_key('fred') 164 | 165 | # Nuke the keys dict, it might die, who knows 166 | cache.namespace.mc.delete('test:keys') 167 | assert cache.has_key('fred') 168 | 169 | # And we still need clear to work, even if it won't work well 170 | cache.clear() 171 | 172 | def test_deleting_keys(): 173 | cache = Cache('test', data_dir='./cache', url=mc_url, type='ext:memcached') 174 | cache.set_value('test', 20) 175 | 176 | # Nuke the keys dict, it might die, who knows 177 | cache.namespace.mc.delete('test:keys') 178 | 179 | assert cache.has_key('test') 180 | 181 | # make sure we can still delete keys even though our keys dict got nuked 182 | del cache['test'] 183 | 184 | assert not cache.has_key('test') 185 | 186 | def test_has_key_multicache(): 187 | cache = Cache('test', data_dir='./cache', url=mc_url, type='ext:memcached') 188 | o = object() 189 | cache.set_value("test", o) 190 | assert cache.has_key("test") 191 | assert "test" in cache 192 | cache = Cache('test', data_dir='./cache', url=mc_url, type='ext:memcached') 193 | assert cache.has_key("test") 194 | 195 | def test_unicode_keys(): 196 | cache = Cache('test', data_dir='./cache', url=mc_url, type='ext:memcached') 197 | o = object() 198 | cache.set_value(u_('hiŏ'), o) 199 | assert u_('hiŏ') in cache 200 | assert u_('hŏa') not in cache 201 | cache.remove_value(u_('hiŏ')) 202 | assert u_('hiŏ') not in cache 203 | 204 | def test_long_unicode_keys(): 205 | cache = Cache('test', data_dir='./cache', url=mc_url, type='ext:memcached') 206 | o = object() 207 | long_str = u_('Очень длинная строка, которая не влезает в сто двадцать восемь байт и поэтому не проходит ограничение в check_key, что очень прискорбно, не правда ли, друзья? Давайте же скорее исправим это досадное недоразумение!') 208 | cache.set_value(long_str, o) 209 | assert long_str in cache 210 | cache.remove_value(long_str) 211 | assert long_str not in cache 212 | 213 | def test_spaces_in_unicode_keys(): 214 | cache = Cache('test', data_dir='./cache', url=mc_url, type='ext:memcached') 215 | o = object() 216 | cache.set_value(u_('hi ŏ'), o) 217 | assert u_('hi ŏ') in cache 218 | assert u_('hŏa') not in cache 219 | cache.remove_value(u_('hi ŏ')) 220 | assert u_('hi ŏ') not in cache 221 | 222 | def test_spaces_in_keys(): 223 | cache = Cache('test', data_dir='./cache', url=mc_url, type='ext:memcached') 224 | cache.set_value("has space", 24) 225 | assert cache.has_key("has space") 226 | assert 24 == cache.get_value("has space") 227 | cache.set_value("hasspace", 42) 228 | assert cache.has_key("hasspace") 229 | assert 42 == cache.get_value("hasspace") 230 | 231 | @util.skip_if(lambda: WebTestApp is None, "webtest not installed") 232 | def test_increment(): 233 | app = WebTestApp(CacheMiddleware(simple_app)) 234 | res = app.get('/', extra_environ={'beaker.clear':True}) 235 | assert 'current value is: 1' in res.text 236 | res = app.get('/') 237 | assert 'current value is: 2' in res.text 238 | res = app.get('/') 239 | assert 'current value is: 3' in res.text 240 | 241 | app = WebTestApp(CacheMiddleware(simple_app)) 242 | res = app.get('/', extra_environ={'beaker.clear':True}) 243 | assert 'current value is: 1' in res 244 | res = app.get('/') 245 | assert 'current value is: 2' in res 246 | res = app.get('/') 247 | assert 'current value is: 3' in res 248 | 249 | @util.skip_if(lambda: WebTestApp is None, "webtest not installed") 250 | def test_cache_manager(): 251 | app = WebTestApp(CacheMiddleware(cache_manager_app)) 252 | res = app.get('/') 253 | assert 'test_key is: test value' in res.text 254 | assert 'test_key cleared' in res.text 255 | 256 | @util.skip_if(lambda: WebTestApp is None, "webtest not installed") 257 | def test_store_none(): 258 | app = WebTestApp(CacheMiddleware(using_none_app)) 259 | res = app.get('/', extra_environ={'beaker.clear':True}) 260 | assert 'current value is: 10' in res.text 261 | res = app.get('/') 262 | assert 'current value is: None' in res.text 263 | -------------------------------------------------------------------------------- /tests/test_namespacing.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | 4 | 5 | def teardown_module(): 6 | import shutil 7 | shutil.rmtree('./cache', True) 8 | 9 | 10 | def test_consistent_namespacing(): 11 | sys.path.append(os.path.dirname(__file__)) 12 | from tests.test_namespacing_files.namespace_go import go 13 | go() 14 | -------------------------------------------------------------------------------- /tests/test_namespacing_files/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bbangert/beaker/913d195875899b31fdbf8b4fda64094870e2d3d6/tests/test_namespacing_files/__init__.py -------------------------------------------------------------------------------- /tests/test_namespacing_files/namespace_get.py: -------------------------------------------------------------------------------- 1 | from beaker.cache import CacheManager 2 | from beaker.util import parse_cache_config_options 3 | from datetime import datetime 4 | 5 | defaults = {'cache.data_dir':'./cache', 'cache.type':'dbm', 'cache.expire': 60, 'cache.regions': 'short_term'} 6 | 7 | cache = CacheManager(**parse_cache_config_options(defaults)) 8 | 9 | def get_cached_value(): 10 | @cache.region('short_term', 'test_namespacing') 11 | def get_value(): 12 | return datetime.now() 13 | 14 | return get_value() 15 | 16 | -------------------------------------------------------------------------------- /tests/test_namespacing_files/namespace_go.py: -------------------------------------------------------------------------------- 1 | from __future__ import print_function 2 | import time 3 | 4 | 5 | def go(): 6 | from . import namespace_get 7 | a = namespace_get.get_cached_value() 8 | time.sleep(0.3) 9 | b = namespace_get.get_cached_value() 10 | 11 | time.sleep(0.3) 12 | 13 | from ..test_namespacing_files import namespace_get as upper_ns_get 14 | c = upper_ns_get.get_cached_value() 15 | time.sleep(0.3) 16 | d = upper_ns_get.get_cached_value() 17 | 18 | print(a) 19 | print(b) 20 | print(c) 21 | print(d) 22 | 23 | assert a == b, 'Basic caching problem - should never happen' 24 | assert c == d, 'Basic caching problem - should never happen' 25 | assert a == c, 'Namespaces not consistent when using different import paths' 26 | -------------------------------------------------------------------------------- /tests/test_pbkdf2.py: -------------------------------------------------------------------------------- 1 | from __future__ import unicode_literals 2 | 3 | from binascii import b2a_hex, a2b_hex 4 | from beaker.crypto.pbkdf2 import pbkdf2 5 | 6 | 7 | def test_pbkdf2_test1(): 8 | result = pbkdf2("password", "ATHENA.MIT.EDUraeburn", 1, dklen=16) 9 | expected = a2b_hex(b"cdedb5281bb2f801565a1122b2563515") 10 | assert result == expected, (result, expected) 11 | 12 | 13 | def test_pbkdf2_test2(): 14 | result = b2a_hex(pbkdf2("password", "ATHENA.MIT.EDUraeburn", 1200, dklen=32)) 15 | expected = b"5c08eb61fdf71e4e4ec3cf6ba1f5512ba7e52ddbc5e5142f708a31e2e62b1e13" 16 | assert result == expected, (result, expected) 17 | 18 | 19 | def test_pbkdf2_test3(): 20 | result = b2a_hex(pbkdf2("X"*64, "pass phrase equals block size", 1200, dklen=32)) 21 | expected = b"139c30c0966bc32ba55fdbf212530ac9c5ec59f1a452f5cc9ad940fea0598ed1" 22 | assert result == expected, (result, expected) 23 | 24 | 25 | def test_pbkdf2_test4(): 26 | result = b2a_hex(pbkdf2("X"*65, "pass phrase exceeds block size", 1200, dklen=32)) 27 | expected = b"9ccad6d468770cd51b10e6a68721be611a8b4d282601db3b36be9246915ec82a" 28 | assert result == expected, (result, expected) 29 | 30 | 31 | def test_pbkd2_issue81(): 32 | """Test for Regression on Incorrect behavior of bytes_() under Python3.4 33 | 34 | https://github.com/bbangert/beaker/issues/81 35 | """ 36 | result = pbkdf2("MASTER_KEY", b"SALT", 1) 37 | expected = pbkdf2("MASTER_KEY", "SALT", 1) 38 | assert result == expected, (result, expected) 39 | -------------------------------------------------------------------------------- /tests/test_sqla.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | from beaker._compat import u_ 3 | from beaker.cache import clsmap, Cache, util 4 | from beaker.exceptions import InvalidCacheBackendError 5 | from beaker.middleware import CacheMiddleware 6 | from unittest import SkipTest 7 | 8 | try: 9 | from webtest import TestApp as WebTestApp 10 | except ImportError: 11 | WebTestApp = None 12 | 13 | try: 14 | clsmap['ext:sqla']._init_dependencies() 15 | except InvalidCacheBackendError: 16 | raise SkipTest("an appropriate SQLAlchemy backend is not installed") 17 | 18 | import sqlalchemy as sa 19 | from beaker.ext.sqla import make_cache_table 20 | 21 | engine = sa.create_engine('sqlite://') 22 | metadata = sa.MetaData() 23 | cache_table = make_cache_table(metadata) 24 | metadata.create_all(engine) 25 | 26 | def simple_app(environ, start_response): 27 | extra_args = {} 28 | clear = False 29 | if environ.get('beaker.clear'): 30 | clear = True 31 | extra_args['type'] = 'ext:sqla' 32 | extra_args['bind'] = engine 33 | extra_args['table'] = cache_table 34 | extra_args['data_dir'] = './cache' 35 | cache = environ['beaker.cache'].get_cache('testcache', **extra_args) 36 | if clear: 37 | cache.clear() 38 | try: 39 | value = cache.get_value('value') 40 | except: 41 | value = 0 42 | cache.set_value('value', value+1) 43 | start_response('200 OK', [('Content-type', 'text/plain')]) 44 | return [('The current value is: %s' % cache.get_value('value')).encode('utf-8')] 45 | 46 | def cache_manager_app(environ, start_response): 47 | cm = environ['beaker.cache'] 48 | cm.get_cache('test')['test_key'] = 'test value' 49 | 50 | start_response('200 OK', [('Content-type', 'text/plain')]) 51 | yield ("test_key is: %s\n" % cm.get_cache('test')['test_key']).encode('utf-8') 52 | cm.get_cache('test').clear() 53 | 54 | try: 55 | test_value = cm.get_cache('test')['test_key'] 56 | except KeyError: 57 | yield ("test_key cleared").encode('utf-8') 58 | else: 59 | test_value = cm.get_cache('test')['test_key'] 60 | yield ("test_key wasn't cleared, is: %s\n" % test_value).encode('utf-8') 61 | 62 | def make_cache(): 63 | """Return a ``Cache`` for use by the unit tests.""" 64 | return Cache('test', data_dir='./cache', bind=engine, table=cache_table, 65 | type='ext:sqla') 66 | 67 | def test_has_key(): 68 | cache = make_cache() 69 | o = object() 70 | cache.set_value("test", o) 71 | assert cache.has_key("test") 72 | assert "test" in cache 73 | assert not cache.has_key("foo") 74 | assert "foo" not in cache 75 | cache.remove_value("test") 76 | assert not cache.has_key("test") 77 | 78 | def test_has_key_multicache(): 79 | cache = make_cache() 80 | o = object() 81 | cache.set_value("test", o) 82 | assert cache.has_key("test") 83 | assert "test" in cache 84 | cache = make_cache() 85 | assert cache.has_key("test") 86 | cache.remove_value('test') 87 | 88 | def test_clear(): 89 | cache = make_cache() 90 | o = object() 91 | cache.set_value("test", o) 92 | assert cache.has_key("test") 93 | cache.clear() 94 | assert not cache.has_key("test") 95 | 96 | def test_unicode_keys(): 97 | cache = make_cache() 98 | o = object() 99 | cache.set_value(u_('hiŏ'), o) 100 | assert u_('hiŏ') in cache 101 | assert u_('hŏa') not in cache 102 | cache.remove_value(u_('hiŏ')) 103 | assert u_('hiŏ') not in cache 104 | 105 | @util.skip_if(lambda: WebTestApp is None, "webtest not installed") 106 | def test_increment(): 107 | app = WebTestApp(CacheMiddleware(simple_app)) 108 | res = app.get('/', extra_environ={'beaker.clear': True}) 109 | assert 'current value is: 1' in res 110 | res = app.get('/') 111 | assert 'current value is: 2' in res 112 | res = app.get('/') 113 | assert 'current value is: 3' in res 114 | 115 | @util.skip_if(lambda: WebTestApp is None, "webtest not installed") 116 | def test_cache_manager(): 117 | app = WebTestApp(CacheMiddleware(cache_manager_app)) 118 | res = app.get('/') 119 | assert 'test_key is: test value' in res 120 | assert 'test_key cleared' in res 121 | -------------------------------------------------------------------------------- /tests/test_syncdict.py: -------------------------------------------------------------------------------- 1 | from beaker.util import SyncDict, WeakValuedRegistry 2 | import random, time, weakref 3 | import threading 4 | 5 | class Value(object): 6 | values = {} 7 | 8 | def do_something(self, id): 9 | Value.values[id] = self 10 | 11 | def stop_doing_something(self, id): 12 | del Value.values[id] 13 | 14 | mutex = threading.Lock() 15 | 16 | def create(id): 17 | assert not Value.values, "values still remain" 18 | global totalcreates 19 | totalcreates += 1 20 | return Value() 21 | 22 | def threadtest(s, id): 23 | print("create thread %d starting" % id) 24 | 25 | global running 26 | global totalgets 27 | while running: 28 | try: 29 | value = s.get('test', lambda: create(id)) 30 | value.do_something(id) 31 | except Exception as e: 32 | print("Error", e) 33 | running = False 34 | break 35 | else: 36 | totalgets += 1 37 | time.sleep(random.random() * .01) 38 | value.stop_doing_something(id) 39 | del value 40 | time.sleep(random.random() * .01) 41 | 42 | def runtest(s): 43 | 44 | global values 45 | values = {} 46 | 47 | global totalcreates 48 | totalcreates = 0 49 | 50 | global totalgets 51 | totalgets = 0 52 | 53 | global running 54 | running = True 55 | 56 | threads = [] 57 | for id_ in range(1, 20): 58 | t = threading.Thread(target=threadtest, args=(s, id_)) 59 | t.start() 60 | threads.append(t) 61 | 62 | for i in range(0, 10): 63 | if not running: 64 | break 65 | time.sleep(1) 66 | 67 | failed = not running 68 | 69 | running = False 70 | 71 | for t in threads: 72 | t.join() 73 | 74 | assert not failed, "test failed" 75 | 76 | print("total object creates %d" % totalcreates) 77 | print("total object gets %d" % totalgets) 78 | 79 | 80 | def test_dict(): 81 | # normal dictionary test, where we will remove the value 82 | # periodically. the number of creates should be equal to 83 | # the number of removes plus one. 84 | print("\ntesting with normal dict") 85 | runtest(SyncDict()) 86 | 87 | 88 | def test_weakdict(): 89 | print("\ntesting with weak dict") 90 | runtest(WeakValuedRegistry()) 91 | -------------------------------------------------------------------------------- /tests/test_synchronizer.py: -------------------------------------------------------------------------------- 1 | from beaker.synchronization import * 2 | 3 | # TODO: spawn threads, test locking. 4 | 5 | 6 | def teardown_module(): 7 | import shutil 8 | shutil.rmtree('./cache', True) 9 | 10 | def test_reentrant_file(): 11 | sync1 = file_synchronizer('test', lock_dir='./cache') 12 | sync2 = file_synchronizer('test', lock_dir='./cache') 13 | sync1.acquire_write_lock() 14 | sync2.acquire_write_lock() 15 | sync2.release_write_lock() 16 | sync1.release_write_lock() 17 | 18 | def test_null(): 19 | sync = null_synchronizer() 20 | assert sync.acquire_write_lock() 21 | sync.release_write_lock() 22 | 23 | def test_mutex(): 24 | sync = mutex_synchronizer('someident') 25 | sync.acquire_write_lock() 26 | sync.release_write_lock() 27 | 28 | -------------------------------------------------------------------------------- /tests/test_unicode_cache_keys.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | """If we try to use a character not in ascii range as a cache key, we get an 3 | unicodeencode error. See 4 | https://bitbucket.org/bbangert/beaker/issue/31/cached-function-decorators-break-when-some 5 | for more on this 6 | """ 7 | 8 | from beaker._compat import u_ 9 | from beaker.cache import CacheManager 10 | 11 | def eq_(a, b, msg=''): 12 | assert a == b, msg 13 | 14 | memory_cache = CacheManager(type='memory') 15 | 16 | @memory_cache.cache('foo') 17 | def foo(whatever): 18 | return whatever 19 | 20 | class bar(object): 21 | 22 | @memory_cache.cache('baz') 23 | def baz(self, qux): 24 | return qux 25 | 26 | @classmethod 27 | @memory_cache.cache('bar') 28 | def quux(cls, garply): 29 | return garply 30 | 31 | def test_A_unicode_encode_key_str(): 32 | eq_(foo('Espanol'), 'Espanol') 33 | eq_(foo(12334), 12334) 34 | eq_(foo(u_('Espanol')), u_('Espanol')) 35 | eq_(foo(u_('Español')), u_('Español')) 36 | b = bar() 37 | eq_(b.baz('Espanol'), 'Espanol') 38 | eq_(b.baz(12334), 12334) 39 | eq_(b.baz(u_('Espanol')), u_('Espanol')) 40 | eq_(b.baz(u_('Español')), u_('Español')) 41 | eq_(b.quux('Espanol'), 'Espanol') 42 | eq_(b.quux(12334), 12334) 43 | eq_(b.quux(u_('Espanol')), u_('Espanol')) 44 | eq_(b.quux(u_('Español')), u_('Español')) 45 | 46 | 47 | def test_B_replacing_non_ascii(): 48 | """we replace the offending character with other non ascii one. Since 49 | the function distinguishes between the two it should not return the 50 | past value 51 | """ 52 | assert foo(u_('Espaáol')) != u_('Español') 53 | eq_(foo(u_('Espaáol')), u_('Espaáol')) 54 | 55 | def test_C_more_unicode(): 56 | """We again test the same stuff but this time we use 57 | http://tools.ietf.org/html/draft-josefsson-idn-test-vectors-00#section-5 58 | as keys""" 59 | keys = [ 60 | # arabic (egyptian) 61 | u_("\u0644\u064a\u0647\u0645\u0627\u0628\u062a\u0643\u0644\u0645\u0648\u0634\u0639\u0631\u0628\u064a\u061f"), 62 | # Chinese (simplified) 63 | u_("\u4ed6\u4eec\u4e3a\u4ec0\u4e48\u4e0d\u8bf4\u4e2d\u6587"), 64 | # Chinese (traditional) 65 | u_("\u4ed6\u5011\u7232\u4ec0\u9ebd\u4e0d\u8aaa\u4e2d\u6587"), 66 | # czech 67 | u_("\u0050\u0072\u006f\u010d\u0070\u0072\u006f\u0073\u0074\u011b\u006e\u0065\u006d\u006c\u0075\u0076\u00ed\u010d\u0065\u0073\u006b\u0079"), 68 | # hebrew 69 | u_("\u05dc\u05de\u05d4\u05d4\u05dd\u05e4\u05e9\u05d5\u05d8\u05dc\u05d0\u05de\u05d3\u05d1\u05e8\u05d9\u05dd\u05e2\u05d1\u05e8\u05d9\u05ea"), 70 | # Hindi (Devanagari) 71 | u_("\u092f\u0939\u0932\u094b\u0917\u0939\u093f\u0928\u094d\u0926\u0940\u0915\u094d\u092f\u094b\u0902\u0928\u0939\u0940\u0902\u092c\u094b\u0932\u0938\u0915\u0924\u0947\u0939\u0948\u0902"), 72 | # Japanese (kanji and hiragana) 73 | u_("\u306a\u305c\u307f\u3093\u306a\u65e5\u672c\u8a9e\u3092\u8a71\u3057\u3066\u304f\u308c\u306a\u3044\u306e\u304b"), 74 | # Russian (Cyrillic) 75 | u_("\u043f\u043e\u0447\u0435\u043c\u0443\u0436\u0435\u043e\u043d\u0438\u043d\u0435\u0433\u043e\u0432\u043e\u0440\u044f\u0442\u043f\u043e\u0440\u0443\u0441\u0441\u043a\u0438"), 76 | # Spanish 77 | u_("\u0050\u006f\u0072\u0071\u0075\u00e9\u006e\u006f\u0070\u0075\u0065\u0064\u0065\u006e\u0073\u0069\u006d\u0070\u006c\u0065\u006d\u0065\u006e\u0074\u0065\u0068\u0061\u0062\u006c\u0061\u0072\u0065\u006e\u0045\u0073\u0070\u0061\u00f1\u006f\u006c"), 78 | # Vietnamese 79 | u_("\u0054\u1ea1\u0069\u0073\u0061\u006f\u0068\u1ecd\u006b\u0068\u00f4\u006e\u0067\u0074\u0068\u1ec3\u0063\u0068\u1ec9\u006e\u00f3\u0069\u0074\u0069\u1ebf\u006e\u0067\u0056\u0069\u1ec7\u0074"), 80 | # Japanese 81 | u_("\u0033\u5e74\u0042\u7d44\u91d1\u516b\u5148\u751f"), 82 | # Japanese 83 | u_("\u5b89\u5ba4\u5948\u7f8e\u6075\u002d\u0077\u0069\u0074\u0068\u002d\u0053\u0055\u0050\u0045\u0052\u002d\u004d\u004f\u004e\u004b\u0045\u0059\u0053"), 84 | # Japanese 85 | u_("\u0048\u0065\u006c\u006c\u006f\u002d\u0041\u006e\u006f\u0074\u0068\u0065\u0072\u002d\u0057\u0061\u0079\u002d\u305d\u308c\u305e\u308c\u306e\u5834\u6240"), 86 | # Japanese 87 | u_("\u3072\u3068\u3064\u5c4b\u6839\u306e\u4e0b\u0032"), 88 | # Japanese 89 | u_("\u004d\u0061\u006a\u0069\u3067\u004b\u006f\u0069\u3059\u308b\u0035\u79d2\u524d"), 90 | # Japanese 91 | u_("\u30d1\u30d5\u30a3\u30fc\u0064\u0065\u30eb\u30f3\u30d0"), 92 | # Japanese 93 | u_("\u305d\u306e\u30b9\u30d4\u30fc\u30c9\u3067"), 94 | # greek 95 | u_("\u03b5\u03bb\u03bb\u03b7\u03bd\u03b9\u03ba\u03ac"), 96 | # Maltese (Malti) 97 | u_("\u0062\u006f\u006e\u0121\u0075\u0073\u0061\u0127\u0127\u0061"), 98 | # Russian (Cyrillic) 99 | u_("\u043f\u043e\u0447\u0435\u043c\u0443\u0436\u0435\u043e\u043d\u0438\u043d\u0435\u0433\u043e\u0432\u043e\u0440\u044f\u0442\u043f\u043e\u0440\u0443\u0441\u0441\u043a\u0438") 100 | ] 101 | for i in keys: 102 | eq_(foo(i),i) 103 | 104 | def test_D_invalidate(): 105 | """Invalidate cache""" 106 | memory_cache.invalidate(foo) 107 | eq_(foo('Espanol'), 'Espanol') 108 | --------------------------------------------------------------------------------