├── memcachepool ├── __init__.py ├── tests │ ├── settings.py │ ├── __init__.py │ └── test_cache.py ├── client.py ├── pool.py └── cache.py ├── MANIFEST.in ├── CHANGES.rst ├── LICENCE ├── setup.py └── README.rst /memcachepool/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include README.rst 2 | include CHANGES.rst 3 | -------------------------------------------------------------------------------- /memcachepool/tests/settings.py: -------------------------------------------------------------------------------- 1 | ok = 1 2 | SECRET_KEY = 'secret' 3 | -------------------------------------------------------------------------------- /memcachepool/tests/__init__.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | 4 | def setUp(): 5 | os.environ['DJANGO_SETTINGS_MODULE'] = 'memcachepool.tests.settings' 6 | -------------------------------------------------------------------------------- /CHANGES.rst: -------------------------------------------------------------------------------- 1 | 2013-12-13 - 0.4 2 | ################ 3 | 4 | - Use pickle.HIGHEST_PROTOCOL #8 5 | - 6 | ..... 7 | 8 | 2012-10-05 - 0.3.1 9 | ################## 10 | 11 | - fixed cache.clear 12 | - make sure timeout=0 is treated as infinite 13 | 14 | 15 | 2012-10-05 - 0.3 16 | ################ 17 | 18 | - now based on umemcache 0.5 19 | - added support for MAX_ITEM_SIZE 20 | 21 | 2012-09-07 - 0.2 22 | ################ 23 | 24 | - Added the licence info 25 | 26 | 2012-08-28 - 0.1 27 | ################ 28 | 29 | - Initial release 30 | -------------------------------------------------------------------------------- /LICENCE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2012, Mozilla Corporation 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | import os 2 | from setuptools import setup, find_packages 3 | 4 | here = os.path.abspath(os.path.dirname(__file__)) 5 | 6 | with open(os.path.join(here, 'README.rst')) as f: 7 | README = f.read() 8 | 9 | with open(os.path.join(here, 'CHANGES.rst')) as f: 10 | CHANGES = f.read() 11 | 12 | 13 | requires = ['Django', 'umemcache'] 14 | test_requires = ['nose'] 15 | 16 | 17 | setup(name='django-memcached-pool', 18 | version='0.5', 19 | description='A Memcached Pool for Django', 20 | long_description=README + '\n\n' + CHANGES, 21 | classifiers=[ 22 | "Programming Language :: Python", 23 | "Framework :: Pylons", 24 | "Topic :: Internet :: WWW/HTTP", 25 | "Topic :: Internet :: WWW/HTTP :: WSGI :: Application", 26 | "License :: OSI Approved :: Apache Software License", 27 | ], 28 | author='Mozilla Services', 29 | author_email='services-dev@mozilla.org', 30 | url='https://github.com/mozilla/django-memcached-pool', 31 | keywords='django memcached pool', 32 | packages=find_packages(), 33 | zip_safe=False, 34 | install_requires=requires, 35 | tests_require=test_requires, 36 | test_suite="memcachepool.tests") 37 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | django-memcached-pool 2 | ===================== 3 | 4 | An efficient fast Django Memcached backend with a pool of connectors, based on 5 | ultramemcache. 6 | 7 | See https://github.com/esnme/ultramemcache 8 | 9 | Each connection added in the pool stays connected to Memcache or Membase, 10 | drastically limiting the number of reconnections and open sockets your 11 | application will use on high load. 12 | 13 | If you configure more than one Memcache server, each new connection 14 | will randomly pick one. 15 | 16 | Everytime a socket timeout occurs on a server, it's blacklisted so 17 | new connections avoid picking it for a while. 18 | 19 | To use this backend, make sure the package is installed in your environment 20 | then use `memcachepool.cache.UMemcacheCache` as backend in your settings. 21 | 22 | **Also, make sure you use umemcache >= 1.5** 23 | 24 | Here's an example:: 25 | 26 | 27 | CACHES = { 28 | 'default': { 29 | 'BACKEND': 'memcachepool.cache.UMemcacheCache', 30 | 'LOCATION': '127.0.0.1:11211', 31 | 'OPTIONS': { 32 | 'MAX_POOL_SIZE': 100, 33 | 'BLACKLIST_TIME': 20, 34 | 'SOCKET_TIMEOUT': 5, 35 | 'MAX_ITEM_SIZE': 1000*100, 36 | } 37 | } 38 | } 39 | 40 | 41 | Options: 42 | 43 | - **MAX_POOL_SIZE:** -- The maximum number of connectors in the pool. default: 35. 44 | - **BLACKLIST_TIME** -- The time in seconds a server stays in the blacklist. default: 60 45 | - **SOCKET_TIMEOUT** -- the time in seconds for the socket timeout. default: 4 46 | - **MAX_ITEM_SIZE** -- The maximum size for an item in Memcache. 47 | 48 | -------------------------------------------------------------------------------- /memcachepool/tests/test_cache.py: -------------------------------------------------------------------------------- 1 | import socket 2 | import time 3 | from unittest import TestCase 4 | 5 | 6 | class TestCache(TestCase): 7 | 8 | def test_pool(self): 9 | from memcachepool.cache import UMemcacheCache 10 | 11 | # creating the cache class 12 | cache = UMemcacheCache('127.0.0.1:11211', {}) 13 | 14 | # simple calls 15 | cache.set('a', '1') 16 | self.assertEqual(cache.get('a'), '1') 17 | 18 | # should support any type and deal with serialization 19 | # like python-memcached does 20 | cache.set('a', 1) 21 | self.assertEqual(cache.get('a'), 1) 22 | cache.delete('a') 23 | self.assertEqual(cache.get('a'), None) 24 | 25 | def test_many(self): 26 | # make sure all the 'many' APIs work 27 | from memcachepool.cache import UMemcacheCache 28 | 29 | # creating the cache class 30 | cache = UMemcacheCache('127.0.0.1:11211', {}) 31 | 32 | cache.set_many({'a': 1, 'b': 2}) 33 | 34 | res = cache.get_many(['a', 'b']).values() 35 | self.assertTrue(1 in res) 36 | self.assertTrue(2 in res) 37 | 38 | cache.delete_many(['a', 'b']) 39 | self.assertEqual(cache.get_many(['a', 'b']), {}) 40 | 41 | def test_incr_decr(self): 42 | # Testing incr and decr operations 43 | from memcachepool.cache import UMemcacheCache 44 | 45 | # creating the cache class 46 | cache = UMemcacheCache('127.0.0.1:11211', {}) 47 | cache.set('a', 1) 48 | cache.incr('a', 1) 49 | self.assertEquals(cache.get('a'), 2) 50 | cache.decr('a', 1) 51 | self.assertEquals(cache.get('a'), 1) 52 | 53 | def test_types(self): 54 | # Testing if correct types are returned 55 | from memcachepool.cache import UMemcacheCache 56 | 57 | # creating the cache class 58 | cache = UMemcacheCache('127.0.0.1:11211', {}) 59 | cache.set('a', int(1)) 60 | self.assertEquals(cache.get('a'), 1) 61 | self.assertTrue(isinstance(cache.get('a'), int)) 62 | 63 | cache.set('a', long(1)) 64 | self.assertEquals(cache.get('a'), 1) 65 | self.assertTrue(isinstance(cache.get('a'), long)) 66 | 67 | def test_loadbalance(self): 68 | from memcachepool.cache import UMemcacheCache 69 | 70 | # creating the cache class with two backends (one is off) 71 | params = {'SOCKET_TIMEOUT': 1, 'BLACKLIST_TIME': 1} 72 | cache = UMemcacheCache('127.0.0.1:11214;127.0.0.2:11213', params) 73 | 74 | # the load balancer should blacklist both IPs. 75 | # and return an error 76 | self.assertRaises(socket.error, cache.set, 'a', '1') 77 | self.assertTrue(len(cache._blacklist), 2) 78 | 79 | # wait for two seconds. 80 | time.sleep(1.1) 81 | 82 | # calling _pick_server should purge the blacklist 83 | cache._pick_server() 84 | self.assertEqual(len(cache._blacklist), 0) 85 | -------------------------------------------------------------------------------- /memcachepool/client.py: -------------------------------------------------------------------------------- 1 | import socket 2 | import time 3 | from errno import EISCONN, EINVAL 4 | from functools import wraps 5 | 6 | from umemcache import Client as OriginalClient 7 | from umemcache import MemcachedError 8 | 9 | 10 | _RETRY = ('set', 'get', 'gets', 'get_multi', 'gets_multi', 11 | 'add', 'replace', 'append', 'prepend', 'delete', 12 | 'cas', 'incr', 'decr', 'stats', 'flush_all', 13 | 'version') 14 | _ERRORS = (IOError, RuntimeError, MemcachedError, socket.error) 15 | 16 | 17 | class Client(object): 18 | """On connection errors, tries to reconnect 19 | """ 20 | def __init__(self, address, max_item_size=None, max_connect_retries=5, 21 | reconnect_delay=.5): 22 | self.address = address 23 | self.max_item_size = max_item_size 24 | self._client = None 25 | self.funcs = [] 26 | self._create_client() 27 | self.max_connect_retries = max_connect_retries 28 | self.reconnect_delay = reconnect_delay 29 | 30 | def _create_connector(self): 31 | if self.max_item_size is not None: 32 | self._client = OriginalClient(self.address, self.max_item_size) 33 | else: 34 | self._client = OriginalClient(self.address) 35 | 36 | self.funcs = [func for func in dir(self._client) 37 | if not func.startswith('_')] 38 | 39 | def _create_client(self): 40 | reconnect = self._client is not None 41 | 42 | if reconnect: 43 | try: 44 | self._client.close() 45 | except Exception: 46 | pass 47 | 48 | self._create_connector() 49 | 50 | if reconnect: 51 | retries = 0 52 | delay = self.reconnect_delay 53 | while retries < self.max_connect_retries: 54 | try: 55 | self._client.connect() 56 | except socket.error, exc: 57 | if exc.errno == EISCONN: 58 | return # we're good 59 | if exc.errno == EINVAL: 60 | # we're doomed, retry 61 | self._create_connector() 62 | 63 | time.sleep(delay) 64 | retries += 1 65 | delay *= 2 # growing the delay 66 | 67 | raise exc 68 | 69 | def _with_retry(self, func): 70 | @wraps(func) 71 | def __with_retry(*args, **kw): 72 | retries = 0 73 | delay = self.reconnect_delay 74 | current_func = func 75 | 76 | while retries < self.max_connect_retries: 77 | try: 78 | return current_func(*args, **kw) 79 | except _ERRORS, exc: 80 | self._create_client() 81 | current_func = getattr(self._client, func.__name__) 82 | time.sleep(delay) 83 | retries += 1 84 | delay *= 3 # growing the delay 85 | 86 | raise exc 87 | return __with_retry 88 | 89 | def __getattr__(self, name): 90 | if not name in self.funcs: 91 | return self.__dict__[name] 92 | 93 | original = getattr(self._client, name) 94 | 95 | if name in _RETRY: 96 | return self._with_retry(original) 97 | 98 | return original 99 | -------------------------------------------------------------------------------- /memcachepool/pool.py: -------------------------------------------------------------------------------- 1 | import Queue 2 | import time 3 | import contextlib 4 | import sys 5 | 6 | # Sentinel used to mark an empty slot in the MCClientPool queue. 7 | # Using sys.maxint as the timestamp ensures that empty slots will always 8 | # sort *after* live connection objects in the queue. 9 | EMPTY_SLOT = (sys.maxint, None) 10 | 11 | 12 | class ClientPool(object): 13 | 14 | def __init__(self, factory, maxsize=None, timeout=60, 15 | wait_for_connection=None): 16 | self.factory = factory 17 | self.maxsize = maxsize 18 | self.timeout = timeout 19 | self.clients = Queue.PriorityQueue(maxsize) 20 | self.wait_for_connection = wait_for_connection 21 | # If there is a maxsize, prime the queue with empty slots. 22 | if maxsize is not None: 23 | for _ in xrange(maxsize): 24 | self.clients.put(EMPTY_SLOT) 25 | 26 | @contextlib.contextmanager 27 | def reserve(self): 28 | """Context-manager to obtain a Client object from the pool.""" 29 | ts, client = self._checkout_connection() 30 | try: 31 | yield client 32 | finally: 33 | self._checkin_connection(ts, client) 34 | 35 | def _checkout_connection(self): 36 | # If there's no maxsize, no need to block waiting for a connection. 37 | blocking = self.maxsize is not None 38 | # Loop until we get a non-stale connection, or we create a new one. 39 | while True: 40 | try: 41 | ts, client = self.clients.get(blocking, 42 | self.wait_for_connection) 43 | except Queue.Empty: 44 | if blocking: 45 | #timeout 46 | raise Exception("No connections available in the pool") 47 | else: 48 | # No maxsize and no free connections, create a new one. 49 | # XXX TODO: we should be using a monotonic clock here. 50 | now = int(time.time()) 51 | return now, self.factory() 52 | else: 53 | now = int(time.time()) 54 | # If we got an empty slot placeholder, create a new connection. 55 | if client is None: 56 | try: 57 | return now, self.factory() 58 | except Exception, e: 59 | if self.maxsize is not None: 60 | # return slot to queue 61 | self.clients.put(EMPTY_SLOT) 62 | raise e 63 | # If the connection is not stale, go ahead and use it. 64 | if ts + self.timeout > now: 65 | return ts, client 66 | # Otherwise, the connection is stale. 67 | # Close it, push an empty slot onto the queue, and retry. 68 | client.disconnect() 69 | self.clients.put(EMPTY_SLOT) 70 | continue 71 | 72 | def _checkin_connection(self, ts, client): 73 | """Return a connection to the pool.""" 74 | # If the connection is now stale, don't return it to the pool. 75 | # Push an empty slot instead so that it will be refreshed when needed. 76 | now = int(time.time()) 77 | if ts + self.timeout > now: 78 | self.clients.put((ts, client)) 79 | else: 80 | if self.maxsize is not None: 81 | self.clients.put(EMPTY_SLOT) 82 | -------------------------------------------------------------------------------- /memcachepool/cache.py: -------------------------------------------------------------------------------- 1 | try: 2 | import cPickle as pickle # NOQA 3 | except ImportError: 4 | import pickle # NOQA 5 | 6 | import errno 7 | import socket 8 | import time 9 | 10 | from django.core.cache.backends.memcached import MemcachedCache 11 | from memcachepool.pool import ClientPool 12 | 13 | 14 | DEFAULT_ITEM_SIZE = 1000 * 1000 15 | 16 | 17 | # XXX not sure if keeping the base BaseMemcachedCache class has anymore value 18 | class UMemcacheCache(MemcachedCache): 19 | "An implementation of a cache binding using python-memcached" 20 | 21 | _FLAG_SERIALIZED = 1 22 | _FLAG_INT = 1 << 1 23 | _FLAG_LONG = 1 << 2 24 | 25 | def __init__(self, server, params): 26 | from memcachepool import client 27 | kls = super(MemcachedCache, self) 28 | kls.__init__(server, params, library=client, 29 | value_not_found_exception=ValueError) 30 | # see how to pass the pool value 31 | self.maxsize = int(params.get('MAX_POOL_SIZE', 35)) 32 | self.blacklist_time = int(params.get('BLACKLIST_TIME', 60)) 33 | self.socktimeout = int(params.get('SOCKET_TIMEOUT', 4)) 34 | self.max_item_size = long(params.get('MAX_ITEM_SIZE', 35 | DEFAULT_ITEM_SIZE)) 36 | self._pool = ClientPool(self._get_client, maxsize=self.maxsize, 37 | wait_for_connection=self.socktimeout) 38 | self._blacklist = {} 39 | self.retries = int(params.get('MAX_RETRIES', 3)) 40 | self._pick_index = 0 41 | 42 | def call(self, func, *args, **kwargs): 43 | retries = 0 44 | while retries < self.retries: 45 | with self._pool.reserve() as conn: 46 | try: 47 | return getattr(conn, func)(*args, **kwargs) 48 | except Exception, exc: 49 | # log 50 | retries += 1 51 | raise exc 52 | 53 | # XXX using python-memcached style pickling 54 | # but maybe we could use something else like 55 | # json 56 | # 57 | # at least this makes it compatible with 58 | # existing data 59 | def serialize(self, data): 60 | return pickle.dumps(data, pickle.HIGHEST_PROTOCOL) 61 | 62 | def unserialize(self, data): 63 | return pickle.loads(data) 64 | 65 | def _get_memcache_timeout(self, timeout): 66 | if timeout == 0: 67 | return timeout 68 | return super(UMemcacheCache, self)._get_memcache_timeout(timeout) 69 | 70 | def _pick_server(self): 71 | # update the blacklist 72 | for server, age in self._blacklist.items(): 73 | if time.time() - age > self.blacklist_time: 74 | del self._blacklist[server] 75 | 76 | # build the list of available servers 77 | choices = list(set(self._servers) ^ set(self._blacklist.keys())) 78 | 79 | if not choices: 80 | return None 81 | 82 | if self._pick_index >= len(choices): 83 | self._pick_index = 0 84 | 85 | choice = choices[self._pick_index] 86 | self._pick_index += 1 87 | return choice 88 | 89 | def _blacklist_server(self, server): 90 | self._blacklist[server] = time.time() 91 | 92 | def _get_client(self): 93 | server = self._pick_server() 94 | last_error = None 95 | 96 | def create_client(server): 97 | cli = self._lib.Client(server, max_item_size=self.max_item_size) 98 | cli.sock.settimeout(self.socktimeout) 99 | return cli 100 | 101 | while server is not None: 102 | cli = create_client(server) 103 | try: 104 | cli.connect() 105 | return cli 106 | except (socket.timeout, socket.error), e: 107 | if not isinstance(e, socket.timeout): 108 | if e.errno != errno.ECONNREFUSED: 109 | # unmanaged case yet 110 | raise 111 | 112 | # well that's embarrassing, let's blacklist this one 113 | # and try again 114 | self._blacklist_server(server) 115 | server = self._pick_server() 116 | last_error = e 117 | 118 | if last_error is not None: 119 | raise last_error 120 | else: 121 | raise socket.timeout('No server left in the pool') 122 | 123 | def _flag_for_value(self, value): 124 | if isinstance(value, int): 125 | return self._FLAG_INT 126 | elif isinstance(value, long): 127 | return self._FLAG_LONG 128 | return self._FLAG_SERIALIZED 129 | 130 | def _value_for_flag(self, value, flag): 131 | if flag == self._FLAG_INT: 132 | return int(value) 133 | elif flag == self._FLAG_LONG: 134 | return long(value) 135 | return self.unserialize(value) 136 | 137 | def add(self, key, value, timeout=0, version=None): 138 | flag = self._flag_for_value(value) 139 | if flag == self._FLAG_SERIALIZED: 140 | value = self.serialize(value) 141 | else: 142 | value = '%d' % value 143 | 144 | key = self.make_key(key, version=version) 145 | 146 | return self.call('add', value, self._get_memcache_timeout(timeout), 147 | flag) 148 | 149 | def get(self, key, default=None, version=None): 150 | key = self.make_key(key, version=version) 151 | val = self.call('get', key) 152 | 153 | if val is None: 154 | return default 155 | 156 | return self._value_for_flag(value=val[0], flag=val[1]) 157 | 158 | def set(self, key, value, timeout=0, version=None): 159 | flag = self._flag_for_value(value) 160 | if flag == self._FLAG_SERIALIZED: 161 | value = self.serialize(value) 162 | else: 163 | value = '%d' % value 164 | key = self.make_key(key, version=version) 165 | self.call('set', key, value, self._get_memcache_timeout(timeout), flag) 166 | 167 | def delete(self, key, version=None): 168 | key = self.make_key(key, version=version) 169 | self.call('delete', key) 170 | 171 | def get_many(self, keys, version=None): 172 | if keys == {}: 173 | return {} 174 | 175 | new_keys = map(lambda x: self.make_key(x, version=version), keys) 176 | 177 | ret = {} 178 | 179 | for key in new_keys: 180 | res = self.call('get', key) 181 | if res is None: 182 | continue 183 | ret[key] = res 184 | 185 | if ret: 186 | res = {} 187 | m = dict(zip(new_keys, keys)) 188 | 189 | for k, v in ret.items(): 190 | res[m[k]] = self._value_for_flag(value=v[0], flag=v[1]) 191 | 192 | return res 193 | 194 | return ret 195 | 196 | def close(self, **kwargs): 197 | # XXX none of your business Django 198 | pass 199 | 200 | def incr(self, key, delta=1, version=None): 201 | key = self.make_key(key, version=version) 202 | try: 203 | val = self.call('incr', key, delta) 204 | 205 | # python-memcache responds to incr on non-existent keys by 206 | # raising a ValueError, pylibmc by raising a pylibmc.NotFound 207 | # and Cmemcache returns None. In all cases, 208 | # we should raise a ValueError though. 209 | except self.LibraryValueNotFoundException: 210 | val = None 211 | if val is None: 212 | raise ValueError("Key '%s' not found" % key) 213 | return val 214 | 215 | def decr(self, key, delta=1, version=None): 216 | key = self.make_key(key, version=version) 217 | try: 218 | val = self.call('decr', key, delta) 219 | 220 | # python-memcache responds to incr on non-existent keys by 221 | # raising a ValueError, pylibmc by raising a pylibmc.NotFound 222 | # and Cmemcache returns None. In all cases, 223 | # we should raise a ValueError though. 224 | except self.LibraryValueNotFoundException: 225 | val = None 226 | if val is None: 227 | raise ValueError("Key '%s' not found" % key) 228 | return val 229 | 230 | def set_many(self, data, timeout=0, version=None): 231 | safe_data = {} 232 | for key, value in data.items(): 233 | key = self.make_key(key, version=version) 234 | flag = self._flag_for_value(value) 235 | if flag == self._FLAG_SERIALIZED: 236 | value = self.serialize(value) 237 | else: 238 | value = '%d' % value 239 | safe_data[key] = value 240 | 241 | for key, value in safe_data.items(): 242 | self.call('set', key, value, self._get_memcache_timeout(timeout), 243 | flag) 244 | 245 | def delete_many(self, keys, version=None): 246 | for key in keys: 247 | self.call('delete', self.make_key(key, version=version)) 248 | 249 | def clear(self): 250 | self.call('flush_all') 251 | --------------------------------------------------------------------------------