├── .gitattributes ├── .gitignore ├── .pyup.yml ├── .travis.yml ├── LICENSE ├── MANIFEST.in ├── README.md ├── docker-compose.yml ├── easy_cache ├── __init__.py ├── _version.py ├── abc.py ├── compat.py ├── contrib │ ├── __init__.py │ └── redis_cache.py ├── core.py ├── decorators.py └── utils.py ├── setup.cfg ├── setup.py ├── tests ├── __init__.py ├── benchmarks.py ├── conf.py ├── tests_basic.py └── tests_cache_clients.py ├── tox.ini └── versioneer.py /.gitattributes: -------------------------------------------------------------------------------- 1 | easy_cache/_version.py export-subst 2 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | 5 | # C extensions 6 | *.so 7 | 8 | # Distribution / packaging 9 | MANIFEST 10 | .Python 11 | env/ 12 | build/ 13 | develop-eggs/ 14 | dist/ 15 | downloads/ 16 | eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | *.egg-info/ 23 | .installed.cfg 24 | *.egg 25 | 26 | # PyInstaller 27 | # Usually these files are written by a python script from a template 28 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 29 | *.manifest 30 | *.spec 31 | 32 | # Installer logs 33 | pip-log.txt 34 | pip-delete-this-directory.txt 35 | 36 | # Unit test / coverage reports 37 | htmlcov/ 38 | .tox/ 39 | .coverage 40 | .cache 41 | nosetests.xml 42 | coverage.xml 43 | 44 | # Translations 45 | *.mo 46 | *.pot 47 | 48 | # Django stuff: 49 | *.log 50 | 51 | # Sphinx documentation 52 | docs/_build/ 53 | 54 | # PyBuilder 55 | target/ 56 | 57 | # idea 58 | .idea/ 59 | .python-version 60 | .vscode 61 | .pytest_cache -------------------------------------------------------------------------------- /.pyup.yml: -------------------------------------------------------------------------------- 1 | # autogenerated pyup.io config file 2 | # see https://pyup.io/docs/configuration/ for all available options 3 | 4 | update: insecure 5 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: python 2 | python: 3 | - "3.5" 4 | - "3.6" 5 | services: 6 | - memcached 7 | - redis-server 8 | install: 9 | - pip install Django==$DJANGO_VERSION 10 | - pip install -e .[tests] 11 | env: 12 | matrix: 13 | - DJANGO_VERSION=1.8 14 | - DJANGO_VERSION=1.11 15 | - DJANGO_VERSION=2.0.0 16 | global: 17 | - EASY_CACHE_REDIS_HOST=127.0.0.1:6379 18 | - EASY_CACHE_MEMCACHED_HOST=127.0.0.1:11211 19 | 20 | script: pytest -vv 21 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2015-2017 Churkin Oleg 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | 23 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include README.md 2 | include versioneer.py 3 | include easy_cache/_version.py 4 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Easy caching decorators 2 | 3 | This package is intended to simplify caching and invalidation process in python-based (primarily) web applications. It's possible to cache execution results of functions; **instance**, **class** and **static** methods; properties. Cache keys may be constructed in various different ways and may depend on any number of parameters. 4 | 5 | The package supports tag-based cache invalidation and better works with Django, however any other frameworks can be used – see examples below. 6 | 7 | The main idea of this package: you don't need to touch any existing function code to cache its execution results. 8 | 9 | ## Requirements 10 | 11 | Library was tested in the following environments: 12 | 13 | * Python 3.7, 3.8, 3.9, 3.10 14 | * Django >=2.0.0 15 | 16 | Feel free to try it in yours, but it's not guaranteed it will work. Submit an issue if you think it should. 17 | 18 | ## Installation 19 | 20 | ```shell 21 | pip install easy_cache 22 | ``` 23 | 24 | ## Introduction 25 | 26 | ### Different ways to cache something 27 | 28 | Imagine you have a time consuming function and you need to cache an execution results, the classic way to achieve this is the next one: 29 | 30 | ```python 31 | # classic way 32 | from django.core.cache import cache 33 | 34 | def time_consuming_operation(n): 35 | """Calculate sum of number from 1 to provided n""" 36 | cache_key = 'time_consuming_operation_{}'.format(n) 37 | result = cache.get(cache_key, None) 38 | 39 | if result is None: 40 | # not found in cache 41 | result = sum(range(n + 1)) 42 | # cache result for one hour 43 | cache.set(cache_key, result, 3600) 44 | 45 | return result 46 | 47 | def invalidate_cache(n): 48 | cache.delete('time_consuming_operation_{}'.format(n)) 49 | ``` 50 | 51 | Well, we had to add annoying boilerplate code to achieve this. 52 | Now let's take a look how `easy_cache` can avoid the problem and simplify the code: 53 | 54 | ```python 55 | # easy way 56 | from easy_cache import ecached 57 | 58 | @ecached('time_consuming_operation_{n}', 3600) 59 | def time_consuming_operation(n): 60 | return sum(range(n + 1)) 61 | 62 | def invalidate_cache(n): 63 | time_consuming_operation.invalidate_cache_by_key(n) 64 | ``` 65 | 66 | As we can see the function code left clear. 67 | Heart of the package is two decorators with the similar parameters: 68 | 69 | ### ecached 70 | 71 | Should be used to decorate any callable and cache returned result. 72 | 73 | Parameters: 74 | 75 | * `cache_key` – cache key generator, default value is `None` so the key will be composed automatically based on a function name, namespace and passed parameters. Also the following types are supported: 76 | * **string** – may contain [Python advanced string formatting syntax](https://docs.python.org/2/library/string.html#formatstrings), a given value will be formatted with a dict of parameters passed to decorated function, see examples below. 77 | * **sequence of strings** – each string must be function parameter name. 78 | * **callable** – is used to generate cache key, decorated function parameters will be passed to this callable and returned value will be used as a cache key. Also one additional signature is available: `callable(meta)`, where `meta` is a dict-like object with some additional attributes – see below. 79 | * `timeout` – value will be cached with provided timeout, basically it should be number of seconds, however it depends on cache backend type. Default value is `DEFAULT_VALUE` – internal constant means that actually no value is provided to cache backend and thus backend should decide what timeout to use. Callable is also supported. 80 | * `tags` – sequence of strings or callable. Should provide or return list of tags added to cached value so cache may be invalidated later with any tag name. Tag may support advanced string formatting syntax. See `cache_key` docs and examples for more details. 81 | * `prefix` – this parameter works both: as regular tag and also as cache key prefix, as usual advanced string formatting and callable are supported here. 82 | * `cache_alias` – cache backend alias name, it can also be [Django cache backend alias name](https://docs.djangoproject.com/en/1.10/ref/settings/#std:setting-CACHES). 83 | * `cache_instance` – cache backend instance may be provided directly via this parameter. 84 | 85 | ### ecached_property 86 | 87 | Should be used to create so-called cached properties, has signature exactly the same as for `ecached`. 88 | 89 | ## Simple examples 90 | 91 | Code examples is the best way to show the power of this package. 92 | 93 | ### Decorators can be simply used with default parameters only 94 | 95 | ```python 96 | from easy_cache import ecached, create_cache_key 97 | 98 | # default parameters 99 | # cache key will be generated automatically: 100 | # 101 | # <__module__>.<__class__>. + function parameters converted to strings, 102 | # 103 | # so be careful when using complex objects, it's 104 | # better to write custom cache key generator in such cases. 105 | # 106 | # timeout will be default for specified cache backend 107 | # "default" cache backend will be used if you use Django 108 | @ecached() 109 | def time_consuming_operation(*args, **kwargs): 110 | pass 111 | 112 | # simple static cache key and cache timeout 100 seconds 113 | @ecached('time_consuming_operation', 100) 114 | def time_consuming_operation(): 115 | pass 116 | 117 | # cache key with advanced string formatting syntax 118 | @ecached('my_key:{b}:{d}:{c}') 119 | def time_consuming_operation(a, b, c=100, d='foo'): 120 | pass 121 | 122 | # or 123 | @ecached('key:{kwargs[param1]}:{kwargs[param2]}:{args[0]}') 124 | def time_consuming_operation(*args, **kwargs): 125 | pass 126 | 127 | # use specific cache alias, see "caches framework" below 128 | from functools import partial 129 | 130 | memcached = partial(ecached, cache_alias='memcached') 131 | 132 | # equivalent to cache_key='{a}:{b}' 133 | @memcached(['a', 'b'], timeout=600) 134 | def time_consuming_operation(a, b, c='default'): 135 | pass 136 | ``` 137 | 138 | ### Using custom cache key generators 139 | 140 | ```python 141 | # working with parameters provided to cached function 142 | # cache key generator must have the same signature as decorated function 143 | from easy_cache import create_cache_key 144 | 145 | def custom_cache_key(self, a, b, c, d): 146 | return create_cache_key(self.id, a, d) 147 | 148 | # working with `meta` object 149 | def custom_cache_key_meta(meta): 150 | return '{}:{}:{}'.format(meta['self'].id, meta['a'], meta['d']) 151 | 152 | # or equivalent 153 | from easy_cache import meta_accepted 154 | 155 | @meta_accepted 156 | def custom_cache_key_meta(parameter_with_any_name): 157 | meta = parameter_with_any_name 158 | return '{}:{}:{}'.format(meta['self'].id, meta['a'], meta['d']) 159 | 160 | 161 | class A(object): 162 | id = 1 163 | 164 | @ecached(custom_cache_key) 165 | def time_consuming_operation(self, a, b, c=10, d=20): 166 | ... 167 | 168 | @ecached(custom_cache_key_meta) 169 | def time_consuming_operation(self, a, b, c=10, d=20): 170 | ... 171 | ``` 172 | 173 | ### How to cache `staticmethod` and `classmethod` correctly 174 | 175 | ```python 176 | # ecached decorator always comes topmost 177 | class B(object): 178 | 179 | # cache only for each different year 180 | @ecached(lambda start_date: 'get_list:{}'.format(start_date.year)) 181 | @staticmethod 182 | def get_list_by_date(start_date): 183 | ... 184 | 185 | CONST = 'abc' 186 | 187 | @ecached('info_cache:{cls.CONST}', 3600, cache_alias='redis_cache') 188 | @classmethod 189 | def get_info(cls): 190 | ... 191 | ``` 192 | 193 | ### MetaCallable object description 194 | 195 | Meta object has the following parameters: 196 | 197 | * `args` – tuple with positional arguments provided to decorated function 198 | * `kwargs` – dictionary with keyword arguments provided to decorated function 199 | * `returned_value` – value returned from decorated function, available only when meta object is handled in `tags` or `prefix` generators. You have to check `has_returned_value` property before using this parameter: 200 | 201 | ```python 202 | def generate_cache_key(meta): 203 | if meta.has_returned_value: 204 | # ... do something with meta.returned_value ... 205 | ``` 206 | 207 | * `call_args` - dictionary with all positional and keyword arguments provided 208 | to decorated function, you may also access them via `__getitem__` dict interface, e. g. `meta['param1']`. 209 | * `function` - decorated callable 210 | * `scope` - object to which decorated callable is attached, `None` otherwise. Usually it's an instance or a class. 211 | 212 | ### Tags invalidation, refresh and cached properties 213 | 214 | Tags-based cache invalidation allows you to invalidate several cache keys at once. 215 | 216 | Imagine you created a web-based book store and your users can mark a book as liked, so you need to maintain a list of liked books for every user but, an information about a book may contain a lot of different data, e.g. authors names, rating, availability in stock, some data from external services and so on. 217 | 218 | Some of this information can be calculated on runtime only so you decided to cache the list of liked books. 219 | 220 | But what if a book title was updated and we have to find all cache keys where this book is stored and invalidate them. Such task may be pretty complex to complete, however if you tagged all the necessary cache keys with a specific tag you will just need to invalidate the tag only and related cache keys will be invalidated "automatically". 221 | 222 | Here are more complex examples introducing Django models and effective tags usage. 223 | Check code comments and doc-strings for detailed description. 224 | 225 | ```python 226 | from django.db import models 227 | from easy_cache import ecached, ecached_property, create_cache_key 228 | 229 | 230 | class Book(models.Model): 231 | title = models.CharField(max_length=250) 232 | 233 | def __unicode__(self): 234 | return self.title 235 | 236 | 237 | class User(models.Model): 238 | name = models.CharField(max_length=100) 239 | state = models.CharField( 240 | max_length=15, 241 | choices=(('active', 'active'), ('deleted', 'deleted')), 242 | ) 243 | friends = models.ManyToManyField('self', symmetrical=True) 244 | favorite_books = models.ManyToManyField('Book') 245 | 246 | def __unicode__(self): 247 | return self.name 248 | 249 | @ecached('users_by_state:{state}', 60, tags=['users_by_states']) 250 | @classmethod 251 | def get_users_by_state(cls, state): 252 | """ 253 | Caches user list by provided state parameter: there will be separate 254 | cached value for every different state parameter, so we are having 2 different 255 | cache keys: 256 | 257 | users_by_state:active – cached list of active users 258 | users_by_state:deleted – cached list of deleted users 259 | 260 | Note that `ecached` decorator always comes topmost. 261 | 262 | To invalidate concrete cached state call the following method 263 | with the required `state`, e.g.: 264 | >>> User.get_users_by_state.invalidate_cache_by_key('active') 265 | ... removes `users_by_state:active` cache key 266 | or 267 | >>> User.get_users_by_state.invalidate_cache_by_key(state='deleted') 268 | ... removes `users_by_state:deleted` cache key 269 | 270 | If you'd like to invalidate all caches for all states call: 271 | >>> User.get_users_by_state.invalidate_cache_by_tags('users_by_states') 272 | ... removes both keys, since `users_by_states` tag attached to all of them, 273 | 274 | `invalidate_cache_by_tags` supports both string and list parameter types: 275 | >>> invalidate_cache_by_tags(['tag1', 'tag2', 'tag3']) 276 | 277 | To refresh concrete cached state call the following method 278 | with required `state`, e.g: 279 | >>> User.get_users_by_state.refresh_cache('active') 280 | ... calls `get_users_by_state('active')` and saves returned value to cache 281 | or 282 | >>> User.get_users_by_state.refresh_cache(state='deleted') 283 | 284 | """ 285 | return list(cls.objects.filter(state=state)) 286 | 287 | @ecached_property('user_friends_count:{self.id}', timeout=3600) 288 | def friends_count(self): 289 | """ 290 | Caches friends count of each user for 1 hour. 291 | 292 | To access cache invalidation functions for a property you 293 | have to use class object instead of instance. 294 | 295 | Call the following method, to invalidate cache: 296 | >>> User.friends_count.invalidate_cache_by_key(user) 297 | ... removes cache key `user_friends_count:{user.id}` 298 | or 299 | >>> type(self).friends_count.invalidate_cache_by_key(user) 300 | or 301 | >>> self.__class__.friends_count.invalidate_cache_by_key(user) 302 | 303 | Where `user` is desired User instance to invalidate friends count for. 304 | 305 | Call the following method, to refresh cached data: 306 | >>> User.friends_count.refresh_cache(user) 307 | ... Updates `user.friends_count` in a cache. 308 | or 309 | >>> type(self).friends_count.refresh_cache(user) 310 | or 311 | >>> self.__class__.friends_count.refresh_cache(user) 312 | """ 313 | return self.friends.count() 314 | 315 | @staticmethod 316 | def get_books_tags(meta): 317 | """ 318 | Add one tag for every book in list of favorite books. 319 | So we will add a list of tags to cached favorite books list. 320 | """ 321 | if not meta.has_returned_value: 322 | return [] 323 | 324 | favorite_books = meta.returned_value 325 | # yes, it may occupy a lot of cache keys 326 | return [create_cache_key('book', book.pk) for book in favorite_books] 327 | 328 | @ecached('user_favorite_books:{self.id}', 600, get_books_tags) 329 | def get_favorite_books(self): 330 | """ 331 | Caches list of related books by user id. So in code you will use: 332 | 333 | >>> favorite_books = request.user.get_favorite_books() # cached for user 334 | 335 | You may want to invalidate this cache in two cases: 336 | 337 | 1. User added new book to favorites: 338 | 339 | >>> User.get_favorite_books.invalidate_cache_by_key(user) 340 | or 341 | >>> User.get_favorite_books.invalidate_cache_by_key(self=user) 342 | or 343 | >>> from easy_cache import invalidate_cache_key, create_cache_key 344 | >>> invalidate_cache_key(create_cache_key('user_favorite_books', user.id)) 345 | or 346 | >>> invalidate_cache_key('user_favorite_books:{}'.format(user.id)) 347 | 348 | 2. Some information about favorite book was changed, e.g. its title: 349 | >>> from easy_cache import invalidate_cache_tags, create_tag_cache_key 350 | >>> tag_cache_key = create_tag_cache_key('book', changed_book_id) 351 | >>> User.get_favorite_books.invalidate_cache_by_tags(tag_cache_key) 352 | or 353 | >>> invalidate_cache_tags(tag_cache_key) 354 | 355 | To refresh cached values use the following patterns: 356 | >>> User.get_favorite_books.refresh_cache(user) 357 | or 358 | >>> User.get_favorite_books.refresh_cache(self=user) 359 | """ 360 | return self.favorite_books.filter(user=self) 361 | ``` 362 | 363 | ## Prefix usage 364 | 365 | Commonly `prefix` is used to invalidate all cache-keys in one namespace, e. g.: 366 | 367 | ```python 368 | from functools import partial 369 | 370 | class Shop(models.Model): 371 | single_shop_cache = partial(ecached, prefix='shop:{self.id}') 372 | 373 | @single_shop_cache('goods_list') 374 | def get_all_goods_list(self): 375 | return [...] 376 | 377 | @single_shop_cache('prices_list') 378 | def get_all_prices_list(self): 379 | return [...] 380 | 381 | # if you have `shop` object you are able to use the following invalidation 382 | # strategies: 383 | 384 | # Invalidate cached list of goods for concrete shop 385 | Shop.get_all_goods_list.invalidate_cache_by_key(shop) 386 | 387 | # Refresh cached list of goods for concrete shop 388 | Shop.get_all_goods_list.refresh_cache(shop) 389 | 390 | # Invalidate cached list of prices for concrete shop 391 | Shop.get_all_prices_list.invalidate_cache_by_key(shop) 392 | 393 | # Refresh cached list of prices for concrete shop 394 | Shop.get_all_prices_list.refresh_cache(shop) 395 | 396 | # Invalidate all cached items for concrete shop 397 | Shop.get_all_goods_list.invalidate_cache_by_prefix(shop) 398 | # or 399 | Shop.get_all_prices_list.invalidate_cache_by_prefix(shop) 400 | # or 401 | from easy_cache import invalidate_cache_prefix 402 | invalidate_cache_prefix('shop:{self.id}'.format(self=shop)) 403 | ``` 404 | 405 | ## Invalidation summary 406 | 407 | There are two ways to invalidate cache objects: use invalidation methods bound to decorated function and separate functions-invalidators. 408 | 409 | ```python 410 | .invalidate_cache_by_key(*args, **kwargs) 411 | .invalidate_cache_by_tags(tags=(), *args, **kwargs) 412 | .invalidate_cache_by_prefix(*args, **kwargs) 413 | 414 | # should be used with a class instance if it is used in a class namespace: 415 | class A: 416 | id = 1 417 | 418 | @ecached() 419 | def method(self): 420 | pass 421 | 422 | @ecached_property() 423 | def obj_property(self): 424 | pass 425 | 426 | @ecached_property('{self.id}:hello') 427 | def world(self): 428 | return '' 429 | 430 | A.method.invalidate_cache_by_key() 431 | # or 432 | A().method.invalidate_cache_by_key() 433 | # only one variant is possible for a properties 434 | A.obj_property.invalidate_cache_by_key() 435 | # and 436 | item = A() 437 | A.world.invalidate_cache_by_key(item) 438 | 439 | # and 440 | from easy_cache import ( 441 | invalidate_cache_key, 442 | invalidate_cache_tags, 443 | invalidate_cache_prefix, 444 | create_cache_key, 445 | ) 446 | 447 | # Note that `cache_instance` and `cache_alias` may be passed 448 | # to the following invalidators 449 | invalidate_cache_key(cache_key) 450 | invalidate_cache_tags(tags) 451 | invalidate_cache_prefix(prefix) 452 | ``` 453 | 454 | Here `tags` can be a string (single tag) or a list of tags. Bound methods should be provided with parameters if they are used in cache key/tag/prefix: 455 | 456 | ```python 457 | @ecached('key:{a}:value:{c}', tags=['tag:{a}'], prefix='pre:{b}', cache_alias='memcached') 458 | def time_consuming_operation(a, b, c=100): 459 | pass 460 | 461 | time_consuming_operation.invalidate_cache_by_key(a=1, c=11) 462 | time_consuming_operation.invalidate_cache_by_tags(a=10) 463 | time_consuming_operation.invalidate_cache_by_prefix(b=2) 464 | 465 | # or using `create_cache_key` helper 466 | invalidate_cache_key( 467 | create_cache_key('key', 1, 'value', 11), cache_alias='memcached' 468 | ) 469 | invalidate_cache_tags(create_cache_key('tag', 10), cache_alias='memcached') 470 | invalidate_cache_prefix('pre:{}'.format(2), cache_alias='memcached') 471 | ``` 472 | 473 | ## Refresh summary 474 | 475 | There is one way to refresh cache objects: use refresh methods bound to decorated function. 476 | 477 | ```python 478 | .refresh_cache(*args, **kwargs) 479 | 480 | # should be used with class instance if it is used in class namespace: 481 | class A: 482 | @ecached() 483 | def method(self): 484 | pass 485 | 486 | @ecached_property() 487 | def obj_property(self): 488 | pass 489 | 490 | A.method.refresh_cache() 491 | A.obj_property.refresh_cache() 492 | ``` 493 | 494 | ## Internal caches framework 495 | 496 | Be aware: internal cache framework instance is single threaded, so if you add new cache instance in a one thread it won't appear in another. 497 | 498 | Easy-cache uses build-in Django cache framework by default, so you can choose what cache storage to use on every decorated function, e.g.: 499 | 500 | ```python 501 | # Django settings 502 | CACHES={ 503 | 'local_memory': { 504 | 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', 505 | 'LOCATION': 'locmem', 506 | 'KEY_PREFIX': 'custom_prefix', 507 | }, 508 | 'memcached': { 509 | 'BACKEND': 'django.core.cache.backends.memcached.PyMemcacheCache', 510 | 'LOCATION': '127.0.0.1:11211', 511 | 'KEY_PREFIX': 'memcached', 512 | } 513 | } 514 | 515 | # then in somewhere code 516 | @ecached(..., cache_alias='memcached') 517 | # or 518 | @ecached(..., cache_alias='local_memory') 519 | # or even 520 | from django.core.cache import caches 521 | another_cache = caches['another_cache'] 522 | @ecached(..., cache_instance=another_cache) 523 | ``` 524 | 525 | However if you don't use Django, there is cache framework built into easy-cache package, it can be used in the same fashion as Django caches: 526 | 527 | ```python 528 | # Custom cache instance class must implement AbstractCacheInstance interface: 529 | from easy_cache.abc import AbstractCacheInstance 530 | from easy_cache.core import DEFAULT_TIMEOUT, NOT_FOUND 531 | 532 | class CustomCache(AbstractCacheInstance): 533 | 534 | def get(self, key, default=NOT_FOUND): 535 | ... 536 | 537 | def get_many(self, keys): 538 | ... 539 | 540 | def set(self, key, value, timeout=DEFAULT_TIMEOUT): 541 | ... 542 | 543 | def set_many(self, data_dict, timeout=DEFAULT_TIMEOUT): 544 | ... 545 | 546 | def delete(self, key): 547 | ... 548 | 549 | from easy_cache import caches 550 | 551 | custom_cache = CustomCache() 552 | caches['new_cache'] = custom_cache 553 | caches.set_default(CustomCacheDefault()) 554 | 555 | # and then 556 | @ecached(..., cache_alias='new_cache') 557 | # or 558 | @ecached(..., cache_instance=custom_cache) 559 | # will use `default` alias 560 | @ecached(...) 561 | ``` 562 | 563 | There is already implemented redis cache instance class, based on [redis-py client](https://pypi.python.org/pypi/redis): 564 | 565 | ```python 566 | from redis import StrictRedis 567 | from easy_cache.contrib.redis_cache import RedisCacheInstance 568 | from easy_cache import caches 569 | 570 | redis_cache = RedisCacheInstance(StrictRedis(host='...', port='...')) 571 | caches.set_default(redis_cache) 572 | 573 | # will use `default` alias 574 | @ecached(...) 575 | ``` 576 | 577 | ## Dynamic timeout example 578 | 579 | You may need to provide cache timeout dynamically depending on function parameters: 580 | 581 | ```python 582 | def dynamic_timeout(group): 583 | if group == 'admins': 584 | timeout = 10 585 | else: 586 | timeout = 100 587 | return timeout 588 | 589 | @ecached('key:{group}', timeout=dynamic_timeout) 590 | def get_users_by_group(group): 591 | ... 592 | ``` 593 | 594 | ## Development and contribution 595 | 596 | Live instances of Redis and Memcached are required for few tests to pass, so it's recommended to use docker/docker-compose to setup the necessary environment: 597 | 598 | ```shell 599 | docker-compose up -d 600 | 601 | # to enable debug logs 602 | # export EASY_CACHE_DEBUG="yes" 603 | 604 | # install package locally 605 | pip install -e .[tests] 606 | 607 | # run tests with pytest or tox 608 | pytest 609 | tox 610 | ``` 611 | 612 | ## Performance and overhead 613 | 614 | Benchmarking may be executed with `tox` command and it shows that decorators give about 4% of overhead in worst case and about 1-2% overhead on the average. 615 | 616 | If you don't use tags or prefix you will get one cache request for `get` and one request for `set` if result not found in cache, otherwise two consecutive requests will be made: `get` and `get_many` to receive actual value from cache and validate its tags (prefix). Then one `set_many` request will be performed to save a data to cache storage. 617 | -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: '2' 2 | services: 3 | redis: 4 | image: redis 5 | ports: 6 | - "6379:6379" 7 | memcached: 8 | image: memcached 9 | ports: 10 | - "11211:11211" -------------------------------------------------------------------------------- /easy_cache/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from ._version import get_versions 3 | __version__ = get_versions()['version'] 4 | del get_versions 5 | 6 | from easy_cache.core import ( 7 | caches, 8 | create_cache_key, 9 | create_tag_cache_key, 10 | invalidate_cache_key, 11 | invalidate_cache_prefix, 12 | invalidate_cache_tags, 13 | get_default_cache_instance, 14 | set_global_cache_instance, 15 | set_cache_key_delimiter, 16 | set_tag_key_prefix, 17 | MetaCallable, 18 | ) 19 | 20 | from easy_cache.decorators import ecached, ecached_property, meta_accepted 21 | -------------------------------------------------------------------------------- /easy_cache/_version.py: -------------------------------------------------------------------------------- 1 | 2 | # This file helps to compute a version number in source trees obtained from 3 | # git-archive tarball (such as those provided by githubs download-from-tag 4 | # feature). Distribution tarballs (built by setup.py sdist) and build 5 | # directories (produced by setup.py build) will contain a much shorter file 6 | # that just contains the computed version number. 7 | 8 | # This file is released into the public domain. Generated by 9 | # versioneer-0.17 (https://github.com/warner/python-versioneer) 10 | 11 | """Git implementation of _version.py.""" 12 | 13 | import errno 14 | import os 15 | import re 16 | import subprocess 17 | import sys 18 | 19 | 20 | def get_keywords(): 21 | """Get the keywords needed to look up the version information.""" 22 | # these strings will be replaced by git during git-archive. 23 | # setup.py/versioneer.py will grep for the variable names, so they must 24 | # each be defined on a line of their own. _version.py will just call 25 | # get_keywords(). 26 | git_refnames = " (HEAD -> master)" 27 | git_full = "e36c5432b13453120b71adb3e7392af35d2af5ea" 28 | git_date = "2022-10-13 23:03:58 +0300" 29 | keywords = {"refnames": git_refnames, "full": git_full, "date": git_date} 30 | return keywords 31 | 32 | 33 | class VersioneerConfig: 34 | """Container for Versioneer configuration parameters.""" 35 | 36 | 37 | def get_config(): 38 | """Create, populate and return the VersioneerConfig() object.""" 39 | # these strings are filled in when 'setup.py versioneer' creates 40 | # _version.py 41 | cfg = VersioneerConfig() 42 | cfg.VCS = "git" 43 | cfg.style = "pep440" 44 | cfg.tag_prefix = "" 45 | cfg.parentdir_prefix = "None" 46 | cfg.versionfile_source = "easy_cache/_version.py" 47 | cfg.verbose = False 48 | return cfg 49 | 50 | 51 | class NotThisMethod(Exception): 52 | """Exception raised if a method is not valid for the current scenario.""" 53 | 54 | 55 | LONG_VERSION_PY = {} 56 | HANDLERS = {} 57 | 58 | 59 | def register_vcs_handler(vcs, method): # decorator 60 | """Decorator to mark a method as the handler for a particular VCS.""" 61 | def decorate(f): 62 | """Store f in HANDLERS[vcs][method].""" 63 | if vcs not in HANDLERS: 64 | HANDLERS[vcs] = {} 65 | HANDLERS[vcs][method] = f 66 | return f 67 | return decorate 68 | 69 | 70 | def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, 71 | env=None): 72 | """Call the given command(s).""" 73 | assert isinstance(commands, list) 74 | p = None 75 | for c in commands: 76 | try: 77 | dispcmd = str([c] + args) 78 | # remember shell=False, so use git.cmd on windows, not just git 79 | p = subprocess.Popen([c] + args, cwd=cwd, env=env, 80 | stdout=subprocess.PIPE, 81 | stderr=(subprocess.PIPE if hide_stderr 82 | else None)) 83 | break 84 | except EnvironmentError: 85 | e = sys.exc_info()[1] 86 | if e.errno == errno.ENOENT: 87 | continue 88 | if verbose: 89 | print("unable to run %s" % dispcmd) 90 | print(e) 91 | return None, None 92 | else: 93 | if verbose: 94 | print("unable to find command, tried %s" % (commands,)) 95 | return None, None 96 | stdout = p.communicate()[0].strip() 97 | if sys.version_info[0] >= 3: 98 | stdout = stdout.decode() 99 | if p.returncode != 0: 100 | if verbose: 101 | print("unable to run %s (error)" % dispcmd) 102 | print("stdout was %s" % stdout) 103 | return None, p.returncode 104 | return stdout, p.returncode 105 | 106 | 107 | def versions_from_parentdir(parentdir_prefix, root, verbose): 108 | """Try to determine the version from the parent directory name. 109 | 110 | Source tarballs conventionally unpack into a directory that includes both 111 | the project name and a version string. We will also support searching up 112 | two directory levels for an appropriately named parent directory 113 | """ 114 | rootdirs = [] 115 | 116 | for i in range(3): 117 | dirname = os.path.basename(root) 118 | if dirname.startswith(parentdir_prefix): 119 | return {"version": dirname[len(parentdir_prefix):], 120 | "full-revisionid": None, 121 | "dirty": False, "error": None, "date": None} 122 | else: 123 | rootdirs.append(root) 124 | root = os.path.dirname(root) # up a level 125 | 126 | if verbose: 127 | print("Tried directories %s but none started with prefix %s" % 128 | (str(rootdirs), parentdir_prefix)) 129 | raise NotThisMethod("rootdir doesn't start with parentdir_prefix") 130 | 131 | 132 | @register_vcs_handler("git", "get_keywords") 133 | def git_get_keywords(versionfile_abs): 134 | """Extract version information from the given file.""" 135 | # the code embedded in _version.py can just fetch the value of these 136 | # keywords. When used from setup.py, we don't want to import _version.py, 137 | # so we do it with a regexp instead. This function is not used from 138 | # _version.py. 139 | keywords = {} 140 | try: 141 | f = open(versionfile_abs, "r") 142 | for line in f.readlines(): 143 | if line.strip().startswith("git_refnames ="): 144 | mo = re.search(r'=\s*"(.*)"', line) 145 | if mo: 146 | keywords["refnames"] = mo.group(1) 147 | if line.strip().startswith("git_full ="): 148 | mo = re.search(r'=\s*"(.*)"', line) 149 | if mo: 150 | keywords["full"] = mo.group(1) 151 | if line.strip().startswith("git_date ="): 152 | mo = re.search(r'=\s*"(.*)"', line) 153 | if mo: 154 | keywords["date"] = mo.group(1) 155 | f.close() 156 | except EnvironmentError: 157 | pass 158 | return keywords 159 | 160 | 161 | @register_vcs_handler("git", "keywords") 162 | def git_versions_from_keywords(keywords, tag_prefix, verbose): 163 | """Get version information from git keywords.""" 164 | if not keywords: 165 | raise NotThisMethod("no keywords at all, weird") 166 | date = keywords.get("date") 167 | if date is not None: 168 | # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant 169 | # datestamp. However we prefer "%ci" (which expands to an "ISO-8601 170 | # -like" string, which we must then edit to make compliant), because 171 | # it's been around since git-1.5.3, and it's too difficult to 172 | # discover which version we're using, or to work around using an 173 | # older one. 174 | date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) 175 | refnames = keywords["refnames"].strip() 176 | if refnames.startswith("$Format"): 177 | if verbose: 178 | print("keywords are unexpanded, not using") 179 | raise NotThisMethod("unexpanded keywords, not a git-archive tarball") 180 | refs = set([r.strip() for r in refnames.strip("()").split(",")]) 181 | # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of 182 | # just "foo-1.0". If we see a "tag: " prefix, prefer those. 183 | TAG = "tag: " 184 | tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)]) 185 | if not tags: 186 | # Either we're using git < 1.8.3, or there really are no tags. We use 187 | # a heuristic: assume all version tags have a digit. The old git %d 188 | # expansion behaves like git log --decorate=short and strips out the 189 | # refs/heads/ and refs/tags/ prefixes that would let us distinguish 190 | # between branches and tags. By ignoring refnames without digits, we 191 | # filter out many common branch names like "release" and 192 | # "stabilization", as well as "HEAD" and "master". 193 | tags = set([r for r in refs if re.search(r'\d', r)]) 194 | if verbose: 195 | print("discarding '%s', no digits" % ",".join(refs - tags)) 196 | if verbose: 197 | print("likely tags: %s" % ",".join(sorted(tags))) 198 | for ref in sorted(tags): 199 | # sorting will prefer e.g. "2.0" over "2.0rc1" 200 | if ref.startswith(tag_prefix): 201 | r = ref[len(tag_prefix):] 202 | if verbose: 203 | print("picking %s" % r) 204 | return {"version": r, 205 | "full-revisionid": keywords["full"].strip(), 206 | "dirty": False, "error": None, 207 | "date": date} 208 | # no suitable tags, so version is "0+unknown", but full hex is still there 209 | if verbose: 210 | print("no suitable tags, using unknown + full revision id") 211 | return {"version": "0+unknown", 212 | "full-revisionid": keywords["full"].strip(), 213 | "dirty": False, "error": "no suitable tags", "date": None} 214 | 215 | 216 | @register_vcs_handler("git", "pieces_from_vcs") 217 | def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): 218 | """Get version from 'git describe' in the root of the source tree. 219 | 220 | This only gets called if the git-archive 'subst' keywords were *not* 221 | expanded, and _version.py hasn't already been rewritten with a short 222 | version string, meaning we're inside a checked out source tree. 223 | """ 224 | GITS = ["git"] 225 | if sys.platform == "win32": 226 | GITS = ["git.cmd", "git.exe"] 227 | 228 | out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root, 229 | hide_stderr=True) 230 | if rc != 0: 231 | if verbose: 232 | print("Directory %s not under git control" % root) 233 | raise NotThisMethod("'git rev-parse --git-dir' returned error") 234 | 235 | # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] 236 | # if there isn't one, this yields HEX[-dirty] (no NUM) 237 | describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty", 238 | "--always", "--long", 239 | "--match", "%s*" % tag_prefix], 240 | cwd=root) 241 | # --long was added in git-1.5.5 242 | if describe_out is None: 243 | raise NotThisMethod("'git describe' failed") 244 | describe_out = describe_out.strip() 245 | full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) 246 | if full_out is None: 247 | raise NotThisMethod("'git rev-parse' failed") 248 | full_out = full_out.strip() 249 | 250 | pieces = {} 251 | pieces["long"] = full_out 252 | pieces["short"] = full_out[:7] # maybe improved later 253 | pieces["error"] = None 254 | 255 | # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] 256 | # TAG might have hyphens. 257 | git_describe = describe_out 258 | 259 | # look for -dirty suffix 260 | dirty = git_describe.endswith("-dirty") 261 | pieces["dirty"] = dirty 262 | if dirty: 263 | git_describe = git_describe[:git_describe.rindex("-dirty")] 264 | 265 | # now we have TAG-NUM-gHEX or HEX 266 | 267 | if "-" in git_describe: 268 | # TAG-NUM-gHEX 269 | mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) 270 | if not mo: 271 | # unparseable. Maybe git-describe is misbehaving? 272 | pieces["error"] = ("unable to parse git-describe output: '%s'" 273 | % describe_out) 274 | return pieces 275 | 276 | # tag 277 | full_tag = mo.group(1) 278 | if not full_tag.startswith(tag_prefix): 279 | if verbose: 280 | fmt = "tag '%s' doesn't start with prefix '%s'" 281 | print(fmt % (full_tag, tag_prefix)) 282 | pieces["error"] = ("tag '%s' doesn't start with prefix '%s'" 283 | % (full_tag, tag_prefix)) 284 | return pieces 285 | pieces["closest-tag"] = full_tag[len(tag_prefix):] 286 | 287 | # distance: number of commits since tag 288 | pieces["distance"] = int(mo.group(2)) 289 | 290 | # commit: short hex revision ID 291 | pieces["short"] = mo.group(3) 292 | 293 | else: 294 | # HEX: no tags 295 | pieces["closest-tag"] = None 296 | count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"], 297 | cwd=root) 298 | pieces["distance"] = int(count_out) # total number of commits 299 | 300 | # commit date: see ISO-8601 comment in git_versions_from_keywords() 301 | date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"], 302 | cwd=root)[0].strip() 303 | pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) 304 | 305 | return pieces 306 | 307 | 308 | def plus_or_dot(pieces): 309 | """Return a + if we don't already have one, else return a .""" 310 | if "+" in pieces.get("closest-tag", ""): 311 | return "." 312 | return "+" 313 | 314 | 315 | def render_pep440(pieces): 316 | """Build up version string, with post-release "local version identifier". 317 | 318 | Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you 319 | get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty 320 | 321 | Exceptions: 322 | 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] 323 | """ 324 | if pieces["closest-tag"]: 325 | rendered = pieces["closest-tag"] 326 | if pieces["distance"] or pieces["dirty"]: 327 | rendered += plus_or_dot(pieces) 328 | rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) 329 | if pieces["dirty"]: 330 | rendered += ".dirty" 331 | else: 332 | # exception #1 333 | rendered = "0+untagged.%d.g%s" % (pieces["distance"], 334 | pieces["short"]) 335 | if pieces["dirty"]: 336 | rendered += ".dirty" 337 | return rendered 338 | 339 | 340 | def render_pep440_pre(pieces): 341 | """TAG[.post.devDISTANCE] -- No -dirty. 342 | 343 | Exceptions: 344 | 1: no tags. 0.post.devDISTANCE 345 | """ 346 | if pieces["closest-tag"]: 347 | rendered = pieces["closest-tag"] 348 | if pieces["distance"]: 349 | rendered += ".post.dev%d" % pieces["distance"] 350 | else: 351 | # exception #1 352 | rendered = "0.post.dev%d" % pieces["distance"] 353 | return rendered 354 | 355 | 356 | def render_pep440_post(pieces): 357 | """TAG[.postDISTANCE[.dev0]+gHEX] . 358 | 359 | The ".dev0" means dirty. Note that .dev0 sorts backwards 360 | (a dirty tree will appear "older" than the corresponding clean one), 361 | but you shouldn't be releasing software with -dirty anyways. 362 | 363 | Exceptions: 364 | 1: no tags. 0.postDISTANCE[.dev0] 365 | """ 366 | if pieces["closest-tag"]: 367 | rendered = pieces["closest-tag"] 368 | if pieces["distance"] or pieces["dirty"]: 369 | rendered += ".post%d" % pieces["distance"] 370 | if pieces["dirty"]: 371 | rendered += ".dev0" 372 | rendered += plus_or_dot(pieces) 373 | rendered += "g%s" % pieces["short"] 374 | else: 375 | # exception #1 376 | rendered = "0.post%d" % pieces["distance"] 377 | if pieces["dirty"]: 378 | rendered += ".dev0" 379 | rendered += "+g%s" % pieces["short"] 380 | return rendered 381 | 382 | 383 | def render_pep440_old(pieces): 384 | """TAG[.postDISTANCE[.dev0]] . 385 | 386 | The ".dev0" means dirty. 387 | 388 | Eexceptions: 389 | 1: no tags. 0.postDISTANCE[.dev0] 390 | """ 391 | if pieces["closest-tag"]: 392 | rendered = pieces["closest-tag"] 393 | if pieces["distance"] or pieces["dirty"]: 394 | rendered += ".post%d" % pieces["distance"] 395 | if pieces["dirty"]: 396 | rendered += ".dev0" 397 | else: 398 | # exception #1 399 | rendered = "0.post%d" % pieces["distance"] 400 | if pieces["dirty"]: 401 | rendered += ".dev0" 402 | return rendered 403 | 404 | 405 | def render_git_describe(pieces): 406 | """TAG[-DISTANCE-gHEX][-dirty]. 407 | 408 | Like 'git describe --tags --dirty --always'. 409 | 410 | Exceptions: 411 | 1: no tags. HEX[-dirty] (note: no 'g' prefix) 412 | """ 413 | if pieces["closest-tag"]: 414 | rendered = pieces["closest-tag"] 415 | if pieces["distance"]: 416 | rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) 417 | else: 418 | # exception #1 419 | rendered = pieces["short"] 420 | if pieces["dirty"]: 421 | rendered += "-dirty" 422 | return rendered 423 | 424 | 425 | def render_git_describe_long(pieces): 426 | """TAG-DISTANCE-gHEX[-dirty]. 427 | 428 | Like 'git describe --tags --dirty --always -long'. 429 | The distance/hash is unconditional. 430 | 431 | Exceptions: 432 | 1: no tags. HEX[-dirty] (note: no 'g' prefix) 433 | """ 434 | if pieces["closest-tag"]: 435 | rendered = pieces["closest-tag"] 436 | rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) 437 | else: 438 | # exception #1 439 | rendered = pieces["short"] 440 | if pieces["dirty"]: 441 | rendered += "-dirty" 442 | return rendered 443 | 444 | 445 | def render(pieces, style): 446 | """Render the given version pieces into the requested style.""" 447 | if pieces["error"]: 448 | return {"version": "unknown", 449 | "full-revisionid": pieces.get("long"), 450 | "dirty": None, 451 | "error": pieces["error"], 452 | "date": None} 453 | 454 | if not style or style == "default": 455 | style = "pep440" # the default 456 | 457 | if style == "pep440": 458 | rendered = render_pep440(pieces) 459 | elif style == "pep440-pre": 460 | rendered = render_pep440_pre(pieces) 461 | elif style == "pep440-post": 462 | rendered = render_pep440_post(pieces) 463 | elif style == "pep440-old": 464 | rendered = render_pep440_old(pieces) 465 | elif style == "git-describe": 466 | rendered = render_git_describe(pieces) 467 | elif style == "git-describe-long": 468 | rendered = render_git_describe_long(pieces) 469 | else: 470 | raise ValueError("unknown style '%s'" % style) 471 | 472 | return {"version": rendered, "full-revisionid": pieces["long"], 473 | "dirty": pieces["dirty"], "error": None, 474 | "date": pieces.get("date")} 475 | 476 | 477 | def get_versions(): 478 | """Get version information or return default if unable to do so.""" 479 | # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have 480 | # __file__, we can work backwards from there to the root. Some 481 | # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which 482 | # case we can only use expanded keywords. 483 | 484 | cfg = get_config() 485 | verbose = cfg.verbose 486 | 487 | try: 488 | return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, 489 | verbose) 490 | except NotThisMethod: 491 | pass 492 | 493 | try: 494 | root = os.path.realpath(__file__) 495 | # versionfile_source is the relative path from the top of the source 496 | # tree (where the .git directory might live) to this file. Invert 497 | # this to find the root from __file__. 498 | for i in cfg.versionfile_source.split('/'): 499 | root = os.path.dirname(root) 500 | except NameError: 501 | return {"version": "0+unknown", "full-revisionid": None, 502 | "dirty": None, 503 | "error": "unable to find root of source tree", 504 | "date": None} 505 | 506 | try: 507 | pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) 508 | return render(pieces, cfg.style) 509 | except NotThisMethod: 510 | pass 511 | 512 | try: 513 | if cfg.parentdir_prefix: 514 | return versions_from_parentdir(cfg.parentdir_prefix, root, verbose) 515 | except NotThisMethod: 516 | pass 517 | 518 | return {"version": "0+unknown", "full-revisionid": None, 519 | "dirty": None, 520 | "error": "unable to compute version", "date": None} 521 | -------------------------------------------------------------------------------- /easy_cache/abc.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from abc import ABCMeta, abstractmethod 3 | 4 | from easy_cache.core import DEFAULT_TIMEOUT, NOT_FOUND 5 | 6 | 7 | class AbstractCacheInstance(object, metaclass=ABCMeta): 8 | """All custom cache instances (clients) should 9 | inherit this class. 10 | """ 11 | 12 | @abstractmethod 13 | def get(self, key, default=NOT_FOUND): 14 | """ 15 | :type key: str | basestring 16 | :rtype Any | None 17 | """ 18 | pass 19 | 20 | @abstractmethod 21 | def get_many(self, keys): 22 | """ 23 | :type keys: list | tuple 24 | :rtype dict: 25 | """ 26 | pass 27 | 28 | @abstractmethod 29 | def set(self, key, value, timeout=DEFAULT_TIMEOUT): 30 | """ 31 | :type key: str | basestring 32 | """ 33 | pass 34 | 35 | @abstractmethod 36 | def set_many(self, data_dict, timeout=DEFAULT_TIMEOUT): 37 | """ 38 | :type data_dict: dict 39 | """ 40 | pass 41 | 42 | @abstractmethod 43 | def delete(self, key): 44 | """ 45 | :type key: str | basestring 46 | """ 47 | pass 48 | -------------------------------------------------------------------------------- /easy_cache/compat.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from collections import namedtuple 3 | import inspect 4 | from inspect import Parameter 5 | 6 | 7 | def force_text(obj, encoding='utf-8'): 8 | if isinstance(obj, str): 9 | return obj 10 | elif not isinstance(obj, bytes): 11 | return str(obj) 12 | 13 | try: 14 | return str(obj, encoding=encoding) 15 | except UnicodeDecodeError: 16 | return obj.decode(encoding) 17 | 18 | 19 | def force_binary(obj, encoding='utf-8'): 20 | if isinstance(obj, bytes): 21 | return obj 22 | elif not isinstance(obj, str): 23 | return bytes(obj) 24 | 25 | try: 26 | return bytes(obj, encoding=encoding) 27 | except UnicodeEncodeError: 28 | return obj.encode(encoding) 29 | 30 | 31 | ArgSpec = namedtuple('ArgSpec', 'args varargs keywords defaults') 32 | 33 | 34 | def getargspec(func): 35 | signature = inspect.signature(func) 36 | 37 | args = [] 38 | varargs = None 39 | keywords = None 40 | defaults = [] 41 | 42 | for param in signature.parameters.values(): # type: Parameter 43 | if param.kind == Parameter.VAR_POSITIONAL: 44 | varargs = param.name 45 | elif param.kind in ( 46 | Parameter.POSITIONAL_ONLY, 47 | Parameter.KEYWORD_ONLY, 48 | Parameter.POSITIONAL_OR_KEYWORD): 49 | args.append(param.name) 50 | elif param.kind == Parameter.VAR_KEYWORD: 51 | keywords = param.name 52 | 53 | # noinspection PyProtectedMember 54 | if param.default is not inspect._empty: 55 | defaults.append(param.default) 56 | 57 | return ArgSpec(args, varargs, keywords, tuple(defaults)) 58 | -------------------------------------------------------------------------------- /easy_cache/contrib/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | -------------------------------------------------------------------------------- /easy_cache/contrib/redis_cache.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | import json 3 | from easy_cache import create_cache_key 4 | from easy_cache.abc import AbstractCacheInstance 5 | from easy_cache.compat import force_text, force_binary 6 | from easy_cache.core import DEFAULT_TIMEOUT, NOT_FOUND 7 | 8 | 9 | class RedisCacheInstance(AbstractCacheInstance): 10 | """Redis cache instance compatible with easy_cache. 11 | 12 | Instance of Redis or StrictRedis instance must be passed to init. 13 | See: https://pypi.python.org/pypi/redis 14 | """ 15 | def __init__(self, redis, prefix=None, serializer=json): 16 | self.client = redis 17 | self.prefix = prefix 18 | self.serializer = serializer 19 | 20 | def make_key(self, key): 21 | if not self.prefix: 22 | return key 23 | return create_cache_key(self.prefix, key) 24 | 25 | def load_value(self, value): 26 | if isinstance(value, bytes): 27 | value = force_text(value) 28 | elif value is None: 29 | return value 30 | return self.serializer.loads(value) 31 | 32 | # noinspection PyMethodMayBeStatic 33 | def dump_value(self, value): 34 | if isinstance(value, bytes): 35 | return value 36 | 37 | return force_binary(self.serializer.dumps(value)) 38 | 39 | def make_keys(self, keys): 40 | return [self.make_key(key) for key in keys] 41 | 42 | def get_many(self, keys): 43 | """ 44 | :rtype dict: 45 | """ 46 | return dict( 47 | zip( 48 | keys, 49 | map(self.load_value, self.client.mget(self.make_keys(keys))) 50 | ) 51 | ) 52 | 53 | def set(self, key, value, timeout=DEFAULT_TIMEOUT): 54 | """ 55 | :param timeout: must be in seconds 56 | """ 57 | if timeout is DEFAULT_TIMEOUT: 58 | timeout = None 59 | 60 | return self.client.set( 61 | self.make_key(key), 62 | self.dump_value(value), 63 | ex=timeout 64 | ) 65 | 66 | def set_many(self, data_dict, timeout=DEFAULT_TIMEOUT): 67 | """ 68 | :param timeout: must be in seconds 69 | """ 70 | if timeout is DEFAULT_TIMEOUT: 71 | timeout = None 72 | 73 | pipe = self.client.pipeline() 74 | pipe.mset( 75 | {self.make_key(key): self.dump_value(value) 76 | for key, value in iter(data_dict.items())} 77 | ) 78 | 79 | if timeout: 80 | for key in data_dict: 81 | pipe.expire(self.make_key(key), timeout) 82 | 83 | return pipe.execute() 84 | 85 | def delete(self, key): 86 | return self.client.delete(self.make_key(key)) 87 | 88 | def get(self, key, default=NOT_FOUND): 89 | result = self.client.get(self.make_key(key)) 90 | return default if result is None else self.load_value(result) 91 | -------------------------------------------------------------------------------- /easy_cache/core.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from __future__ import unicode_literals 3 | 4 | from collections import abc 5 | import inspect 6 | import logging 7 | import os 8 | import threading 9 | from time import time 10 | 11 | import six 12 | 13 | from .compat import force_text, force_binary, getargspec 14 | from .utils import get_function_path, cached_property 15 | 16 | 17 | try: 18 | # noinspection PyUnresolvedReferences 19 | import django 20 | 21 | # noinspection PyUnresolvedReferences 22 | def _get_cache_by_alias(alias): 23 | if alias == DEFAULT_CACHE_ALIAS: 24 | from django.core.cache import cache 25 | else: 26 | try: 27 | from django.core.cache import caches 28 | cache = caches[alias] 29 | except ImportError: 30 | from django.core.cache import get_cache 31 | cache = get_cache(alias) 32 | return cache 33 | 34 | except ImportError: 35 | 36 | class ImproperlyConfigured(Exception): 37 | pass 38 | 39 | def _get_cache_by_alias(alias): 40 | raise ImproperlyConfigured('Cache instance not found for alias "%s"' % alias) 41 | 42 | 43 | logger = logging.getLogger(__name__) 44 | 45 | 46 | class Value(object): 47 | 48 | def __init__(self, name): 49 | self.name = name 50 | 51 | def __repr__(self): 52 | return self.name 53 | 54 | 55 | NOT_FOUND = Value('NOT_FOUND') 56 | NOT_SET = Value('NOT_SET') 57 | DEFAULT_TIMEOUT = Value('DEFAULT_TIMEOUT') 58 | CACHE_KEY_DELIMITER = force_text(':') 59 | TAG_KEY_PREFIX = force_text('tag') 60 | 61 | LAZY_MODE = os.environ.get('EASY_CACHE_LAZY_MODE_ENABLE', '') == 'yes' 62 | DEFAULT_CACHE_ALIAS = 'default-easy-cache' 63 | META_ACCEPTED_ATTR = '_easy_cache_meta_accepted' 64 | META_ARG_NAME = 'meta' 65 | 66 | 67 | class CacheHandler(object): 68 | """ Inspired by Django """ 69 | 70 | def __init__(self): 71 | self._caches = threading.local() 72 | 73 | # noinspection PyMethodMayBeStatic 74 | def get_default_cache(self, alias): 75 | return _get_cache_by_alias(alias) 76 | 77 | def __getitem__(self, alias): 78 | try: 79 | return self._caches.caches[alias] 80 | except AttributeError: 81 | self._caches.caches = {} 82 | except KeyError: 83 | pass 84 | 85 | cache = self.get_default_cache(alias) 86 | self._caches.caches[alias] = cache 87 | return cache 88 | 89 | def __setitem__(self, key, value): 90 | try: 91 | self._caches.caches 92 | except AttributeError: 93 | self._caches.caches = {} 94 | 95 | self._caches.caches[key] = value 96 | 97 | def get_default(self): 98 | return self[DEFAULT_CACHE_ALIAS] 99 | 100 | def set_default(self, cache_instance): 101 | self[DEFAULT_CACHE_ALIAS] = cache_instance 102 | 103 | 104 | caches = CacheHandler() 105 | 106 | 107 | # setters 108 | def set_cache_key_delimiter(delimiter): 109 | if not isinstance(delimiter, str): 110 | raise TypeError('Invalid delimiter type, string required') 111 | 112 | global CACHE_KEY_DELIMITER 113 | CACHE_KEY_DELIMITER = force_text(delimiter) 114 | 115 | 116 | def set_tag_key_prefix(prefix): 117 | if not isinstance(prefix, str): 118 | raise TypeError('Invalid tag prefix type, string required') 119 | 120 | global TAG_KEY_PREFIX 121 | TAG_KEY_PREFIX = force_text(prefix) 122 | 123 | 124 | def set_global_cache_instance(cache_instance): 125 | caches.set_default(cache_instance) 126 | 127 | 128 | def get_default_cache_instance(): 129 | return caches.get_default() 130 | 131 | 132 | def invalidate_cache_key(cache_key, cache_instance=None, cache_alias=None): 133 | _cache = cache_instance or caches[cache_alias or DEFAULT_CACHE_ALIAS] 134 | return _cache.delete(cache_key) 135 | 136 | 137 | def invalidate_cache_prefix(prefix, cache_instance=None, cache_alias=None): 138 | return invalidate_cache_tags(prefix, cache_instance, cache_alias) 139 | 140 | 141 | def invalidate_cache_tags(tags, cache_instance=None, cache_alias=None): 142 | if isinstance(tags, str): 143 | tags = [tags] 144 | 145 | _cache = TaggedCacheProxy(cache_instance or caches[cache_alias or DEFAULT_CACHE_ALIAS]) 146 | return _cache.invalidate(tags) 147 | 148 | 149 | def create_cache_key(*parts): 150 | """ Generate cache key using global delimiter char """ 151 | if len(parts) == 1: 152 | parts = parts[0] 153 | if isinstance(parts, str): 154 | parts = [parts] 155 | 156 | return CACHE_KEY_DELIMITER.join(force_text(p) for p in parts) 157 | 158 | 159 | def create_tag_cache_key(*parts): 160 | return create_cache_key(TAG_KEY_PREFIX, *parts) 161 | 162 | 163 | def get_timestamp(): 164 | return int(time() * 1000000) 165 | 166 | 167 | def compare_dicts(d1, d2): 168 | """Use simple comparison""" 169 | return dict(d1) == dict(d2) 170 | 171 | 172 | class MetaCallable(abc.Mapping): 173 | """ Object contains meta information about method or function decorated with ecached, 174 | passed arguments, returned results, signature description and so on. 175 | """ 176 | 177 | def __init__(self, args=(), kwargs=None, returned_value=NOT_SET, call_args=None): 178 | self.args = args 179 | self.kwargs = kwargs or {} 180 | self.returned_value = returned_value 181 | self.call_args = call_args or {} 182 | self.function = None 183 | self.scope = None 184 | 185 | def __contains__(self, item): 186 | return item in self.call_args 187 | 188 | def __iter__(self): 189 | return iter(self.call_args) 190 | 191 | def __len__(self): 192 | return len(self.call_args) 193 | 194 | def __getitem__(self, item): 195 | return self.call_args[item] 196 | 197 | @property 198 | def has_returned_value(self): 199 | return self.returned_value is not NOT_SET 200 | 201 | 202 | class TaggedCacheProxy(object): 203 | """ Each cache key/value pair can have additional tags to check 204 | if cached values is still valid. 205 | """ 206 | def __init__(self, cache_instance): 207 | """ 208 | :param cache_instance: should support `set_many` and 209 | `get_many` operations 210 | """ 211 | self._cache_instance = cache_instance 212 | 213 | def make_value(self, key, value, tags): 214 | data = {} 215 | tags = [create_tag_cache_key(_) for _ in tags] 216 | 217 | # get tags and their cached values (if exists) 218 | tags_dict = self._cache_instance.get_many(tags) 219 | 220 | # set new timestamps for missed tags 221 | for tag_key in tags: 222 | if tags_dict.get(tag_key) is None: 223 | # this should be sent to cache as separate key-value 224 | data[tag_key] = get_timestamp() 225 | 226 | tags_dict.update(data) 227 | 228 | data[key] = { 229 | 'value': value, 230 | 'tags': tags_dict, 231 | } 232 | 233 | return data 234 | 235 | def __getattr__(self, item): 236 | return getattr(self._cache_instance, item) 237 | 238 | def set(self, key, value, *args, **kwargs): 239 | value_dict = self.make_value(key, value, kwargs.pop('tags')) 240 | return self._cache_instance.set_many(value_dict, *args, **kwargs) 241 | 242 | def get(self, key, default=None, **kwargs): 243 | value = self._cache_instance.get(key, default=NOT_FOUND, **kwargs) 244 | 245 | # not found in cache 246 | if value is NOT_FOUND: 247 | return default 248 | 249 | tags_dict = value.get('tags') 250 | if not tags_dict: 251 | return value 252 | 253 | # check if it has valid tags 254 | cached_tags_dict = self._cache_instance.get_many(tags_dict.keys()) 255 | 256 | # compare dicts 257 | if not compare_dicts(cached_tags_dict, tags_dict): 258 | # cache is invalid - return default value 259 | return default 260 | 261 | return value.get('value', default) 262 | 263 | def invalidate(self, tags): 264 | """ Invalidates cache by tags """ 265 | ts = get_timestamp() 266 | return self._cache_instance.set_many({create_tag_cache_key(tag): ts for tag in tags}) 267 | 268 | 269 | class Cached(object): 270 | 271 | def __init__(self, 272 | function, 273 | cache_key=None, 274 | timeout=DEFAULT_TIMEOUT, 275 | cache_instance=None, 276 | cache_alias=None, 277 | as_property=False): 278 | 279 | self.cache_key = cache_key 280 | self.function = function 281 | self.as_property = as_property 282 | self.timeout = timeout 283 | self.instance = None 284 | self.klass = None 285 | 286 | self._scope = None 287 | self._cache_instance = cache_instance 288 | self._cache_alias = cache_alias or DEFAULT_CACHE_ALIAS 289 | 290 | @cached_property 291 | def cache_key_template(self): 292 | # processing different types of cache_key parameter 293 | if self.cache_key is None: 294 | return self.create_cache_key 295 | elif isinstance(self.cache_key, (list, tuple)): 296 | return create_cache_key( 297 | force_text(key).join(('{', '}')) for key in self.cache_key 298 | ) 299 | else: 300 | return self.cache_key 301 | 302 | @property 303 | def scope(self): 304 | return self.instance or self.klass or self._scope 305 | 306 | @scope.setter 307 | def scope(self, value): 308 | self._scope = value 309 | 310 | def get_timeout(self, callable_meta): 311 | if isinstance(self.timeout, int) or self.timeout is DEFAULT_TIMEOUT: 312 | return self.timeout 313 | 314 | return self._format(self.timeout, callable_meta) 315 | 316 | if LAZY_MODE: 317 | def _get_cache_instance(self): 318 | if self._cache_instance is None: 319 | return caches[self._cache_alias] 320 | return self._cache_instance 321 | else: 322 | def _get_cache_instance(self): 323 | if self._cache_instance is None: 324 | self._cache_instance = caches[self._cache_alias] 325 | return self._cache_instance 326 | 327 | cache_instance = property(_get_cache_instance) 328 | 329 | def __call__(self, *args, **kwargs): 330 | callable_meta = self.collect_meta(args, kwargs) 331 | cache_key = self.generate_cache_key(callable_meta) 332 | cached_value = self.get_cached_value(cache_key) 333 | 334 | if cached_value is NOT_FOUND: 335 | logger.debug('MISS cache_key="%s"', cache_key) 336 | value = self.function(*callable_meta.args, **callable_meta.kwargs) 337 | callable_meta.returned_value = value 338 | self.set_cached_value(cache_key, callable_meta) 339 | return value 340 | 341 | logger.debug('HIT cache_key="%s"', cache_key) 342 | return cached_value 343 | 344 | def create_cache_key(self, *args, **kwargs): 345 | """ if cache_key parameter is not specified we use default algorithm """ 346 | scope = self.scope 347 | prefix = get_function_path(self.function, scope) 348 | 349 | args = list(args) 350 | if scope: 351 | try: 352 | args.remove(scope) 353 | except ValueError: 354 | pass 355 | 356 | for k in sorted(kwargs): 357 | args.append(kwargs[k]) 358 | return create_cache_key(prefix, *args) 359 | 360 | def update_arguments(self, args, kwargs): 361 | # if we got instance method or class method - modify positional arguments 362 | if self.instance: 363 | # first argument in args is "self" 364 | args = (self.instance, ) + args 365 | elif self.klass and not type(self.function) == staticmethod: 366 | # firs argument in args is "cls" 367 | args = (self.klass, ) + args 368 | 369 | return args, kwargs 370 | 371 | def _clone(self, **kwargs): 372 | cached = self.__class__(function=self.function, **kwargs) 373 | 374 | cached.cache_key = self.cache_key 375 | cached.as_property = self.as_property 376 | cached.timeout = self.timeout 377 | 378 | cached._cache_instance = self._cache_instance 379 | cached._cache_alias = self._cache_alias 380 | return cached 381 | 382 | def __get__(self, instance, klass): 383 | cached = self._clone() 384 | 385 | if cached.as_property and instance is None and klass is not None: 386 | # special case – calling property as class 387 | # attr means that we want to run invalidation, so we out of any scope 388 | return cached 389 | 390 | if instance: 391 | cached.instance = instance 392 | if klass: 393 | cached.klass = klass 394 | 395 | if cached.as_property and instance is not None: 396 | return cached() 397 | 398 | return cached 399 | 400 | def get_cached_value(self, cache_key): 401 | logger.debug('Get cache_key="%s"', cache_key) 402 | return self.cache_instance.get(cache_key, NOT_FOUND) 403 | 404 | def set_cached_value(self, cache_key, callable_meta, **extra): 405 | timeout = self.get_timeout(callable_meta) 406 | 407 | if timeout is not DEFAULT_TIMEOUT: 408 | extra['timeout'] = timeout 409 | 410 | logger.debug('Set cache_key="%s" timeout="%s"', cache_key, extra.get('timeout')) 411 | self.cache_instance.set(cache_key, callable_meta.returned_value, **extra) 412 | 413 | @staticmethod 414 | def _check_if_meta_required(callable_template): 415 | """ 416 | Checks if we need to provide `meta` arg into cache key constructor, 417 | there are two way to get this right. 418 | 419 | 1. Use single `meta` argument: 420 | 421 | def construct_key(meta): 422 | ... 423 | 424 | 2. User `meta_accepted` decorator: 425 | 426 | from easy_cache import meta_accepted 427 | 428 | @meta_accepted 429 | def construct_key(m): 430 | ... 431 | 432 | """ 433 | if getattr(callable_template, META_ACCEPTED_ATTR, False): 434 | return True 435 | 436 | arg_spec = getargspec(callable_template) 437 | 438 | if (arg_spec.varargs is None and 439 | arg_spec.keywords is None and 440 | arg_spec and arg_spec.args[0] == META_ARG_NAME): 441 | return True 442 | 443 | return False 444 | 445 | def _format(self, template, meta): 446 | if isinstance(template, (staticmethod, classmethod)): 447 | template = template.__func__ 448 | 449 | if isinstance(template, abc.Callable): 450 | if self._check_if_meta_required(template): 451 | return template(meta) 452 | else: 453 | return template(*meta.args, **meta.kwargs) 454 | 455 | if not self.function: 456 | return template 457 | 458 | try: 459 | if isinstance(template, str): 460 | return force_text(template).format(**meta.call_args) 461 | elif isinstance(template, (list, tuple, set)): 462 | return [force_text(t).format(**meta.call_args) for t in template] 463 | except KeyError as ex: 464 | raise ValueError('Parameter "%s" is required for "%s"' % (ex, template)) 465 | 466 | raise TypeError( 467 | 'Unsupported type for key template: {!r}'.format(type(template)) 468 | ) 469 | 470 | def collect_meta(self, args, kwargs, returned_value=NOT_SET): 471 | """ :returns: MetaCallable """ 472 | args, kwargs = self.update_arguments(args, kwargs) 473 | 474 | meta = MetaCallable(args=args, kwargs=kwargs, returned_value=returned_value) 475 | 476 | if not self.function: 477 | return meta 478 | 479 | # default arguments are also passed to template function 480 | arg_spec = getargspec(self.function) 481 | diff_count = len(arg_spec.args) - len(args) 482 | 483 | # do not provide default arguments which were already passed 484 | if diff_count > 0 and arg_spec.defaults: 485 | # take minimum here 486 | diff_count = min(len(arg_spec.defaults), diff_count) 487 | default_kwargs = dict(zip(arg_spec.args[-diff_count:], 488 | arg_spec.defaults[-diff_count:])) 489 | else: 490 | default_kwargs = {} 491 | 492 | default_kwargs.update(kwargs) 493 | meta.kwargs = default_kwargs 494 | meta.function = self.function 495 | meta.scope = self.scope 496 | 497 | try: 498 | signature = inspect.signature(self.function) 499 | bound_args = signature.bind(*args, **kwargs).arguments 500 | bound_args.update(default_kwargs) 501 | meta.call_args = bound_args 502 | except TypeError: 503 | # sometimes not all required parameters are provided, just ignore them 504 | meta.call_args = meta.kwargs 505 | return meta 506 | 507 | def generate_cache_key(self, callable_meta): 508 | return self._format(self.cache_key_template, callable_meta) 509 | 510 | def invalidate_cache_by_key(self, *args, **kwargs): 511 | callable_meta = self.collect_meta(args, kwargs) 512 | cache_key = self.generate_cache_key(callable_meta) 513 | return self.cache_instance.delete(cache_key) 514 | 515 | def refresh_cache(self, *args, **kwargs): 516 | callable_meta = self.collect_meta(args, kwargs) 517 | cache_key = self.generate_cache_key(callable_meta) 518 | 519 | logger.debug('REFRESH cache_key="%s"', cache_key) 520 | value = self.function(*callable_meta.args, **callable_meta.kwargs) 521 | callable_meta.returned_value = value 522 | self.set_cached_value(cache_key, callable_meta) 523 | return value 524 | 525 | def __str__(self): 526 | return ( 527 | ''.format( 528 | get_function_path(self.function, self.scope), 529 | get_function_path(self.cache_key_template), 530 | self.timeout) 531 | ) 532 | 533 | def __repr__(self): 534 | try: 535 | return self.__str__() 536 | except (UnicodeEncodeError, UnicodeDecodeError): 537 | return '[Bad Unicode data]' 538 | 539 | 540 | class TaggedCached(Cached): 541 | """ Cache with tags and prefix support """ 542 | 543 | def __init__(self, 544 | function, 545 | cache_key=None, 546 | timeout=None, 547 | cache_instance=None, 548 | cache_alias=None, 549 | as_property=False, 550 | tags=(), 551 | prefix=None): 552 | 553 | super(TaggedCached, self).__init__( 554 | function=function, 555 | cache_key=cache_key, 556 | cache_instance=cache_instance, 557 | cache_alias=cache_alias, 558 | timeout=timeout, 559 | as_property=as_property, 560 | ) 561 | assert tags or prefix 562 | self.tags = tags 563 | self.prefix = prefix 564 | 565 | if self._cache_instance: 566 | self._cache_instance = TaggedCacheProxy(self.cache_instance) 567 | 568 | if LAZY_MODE: 569 | @property 570 | def cache_instance(self): 571 | if self._cache_instance is None: 572 | return TaggedCacheProxy(caches[self._cache_alias]) 573 | return self._cache_instance 574 | else: 575 | @property 576 | def cache_instance(self): 577 | if self._cache_instance is None: 578 | self._cache_instance = TaggedCacheProxy(caches[self._cache_alias]) 579 | return self._cache_instance 580 | 581 | def _clone(self, **kwargs): 582 | return super(TaggedCached, self)._clone(tags=self.tags, prefix=self.prefix) 583 | 584 | def invalidate_cache_by_tags(self, tags=(), *args, **kwargs): 585 | """ Invalidate cache for this method or property by one of provided tags 586 | :type tags: str | list | tuple | callable 587 | """ 588 | if not self.tags: 589 | raise ValueError('Tags were not specified, nothing to invalidate') 590 | 591 | def to_set(obj): 592 | return set([obj] if isinstance(obj, str) else obj) 593 | 594 | callable_meta = self.collect_meta(args, kwargs) 595 | all_tags = to_set(self._format(self.tags, callable_meta)) 596 | 597 | if not tags: 598 | tags = all_tags 599 | else: 600 | tags = to_set(self._format(tags, callable_meta)) 601 | if all_tags: 602 | tags &= all_tags 603 | 604 | return self.cache_instance.invalidate(tags) 605 | 606 | def invalidate_cache_by_prefix(self, *args, **kwargs): 607 | if not self.prefix: 608 | raise ValueError('Prefix was not specified, nothing to invalidate') 609 | 610 | callable_meta = self.collect_meta(args, kwargs) 611 | prefix = self._format(self.prefix, callable_meta) 612 | return self.cache_instance.invalidate([prefix]) 613 | 614 | def generate_cache_key(self, callable_meta): 615 | cache_key = super(TaggedCached, self).generate_cache_key(callable_meta) 616 | if self.prefix: 617 | prefix = self._format(self.prefix, callable_meta) 618 | cache_key = create_cache_key(prefix, cache_key) 619 | return cache_key 620 | 621 | def set_cached_value(self, cache_key, callable_meta, **extra): 622 | # generate tags and prefix only after successful execution 623 | tags = self._format(self.tags, callable_meta) 624 | 625 | if self.prefix: 626 | prefix = self._format(self.prefix, callable_meta) 627 | tags = set(tags) | {prefix} 628 | 629 | return super(TaggedCached, self).set_cached_value(cache_key, callable_meta, tags=tags) 630 | 631 | def __str__(self): 632 | return str( 633 | ''.format( 635 | get_function_path(self.function, self.scope), 636 | get_function_path(self.cache_key_template), 637 | get_function_path(self.tags), 638 | get_function_path(self.prefix), 639 | self.timeout) 640 | ) 641 | -------------------------------------------------------------------------------- /easy_cache/decorators.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from collections import abc 3 | from functools import update_wrapper 4 | 5 | from easy_cache.core import Cached, TaggedCached, DEFAULT_TIMEOUT, META_ACCEPTED_ATTR 6 | 7 | 8 | # noinspection PyPep8Naming 9 | class ecached(object): 10 | """ Caches result of decorated callable. 11 | Possible use-cases are: 12 | 13 | @cached() 14 | def func(...): 15 | 16 | @cached('cache_key') # cache key only 17 | def func(...): 18 | 19 | @cached('cache_key', 300) # cache key and timeout in seconds 20 | def func(...): 21 | 22 | @cached('cache_key', 300, ('user', 'books')) # + tags 23 | def func(...): 24 | 25 | @cached('{a}:{b}') 26 | def func(a, b): # cache keys based on method parameters 27 | 28 | @cached(['a', 'b']) 29 | def func(a, b): # cache keys based on method parameters 30 | 31 | @cached(callable_with_parameters) 32 | def func(a, b): # cache_key = callable_with_parameters(a, b) 33 | 34 | """ 35 | def __init__(self, cache_key=None, timeout=DEFAULT_TIMEOUT, tags=(), prefix=None, 36 | cache_instance=None, cache_alias=None): 37 | if tags or prefix: 38 | self.cache = TaggedCached( 39 | function=None, 40 | cache_key=cache_key, 41 | tags=tags, 42 | timeout=timeout, 43 | prefix=prefix, 44 | cache_instance=cache_instance, 45 | cache_alias=cache_alias, 46 | ) 47 | else: 48 | self.cache = Cached( 49 | function=None, 50 | cache_key=cache_key, 51 | timeout=timeout, 52 | cache_instance=cache_instance, 53 | cache_alias=cache_alias, 54 | ) 55 | 56 | self._instance = None 57 | self._class = None 58 | self._func = None 59 | self._wrapped = False 60 | 61 | def __get__(self, instance, owner): 62 | self._instance = instance 63 | self._class = owner 64 | return self.wrapper() 65 | 66 | def wrapper(self): 67 | if not self._wrapped: 68 | if self._instance or self._class: 69 | wrapped = self._func.__get__(self._instance, self._class) 70 | 71 | if isinstance(self._func, staticmethod): 72 | # we don't need instance or class, however we need scope 73 | self.cache.scope = self._instance or self._class 74 | self._instance = None 75 | self._class = None 76 | else: 77 | wrapped = wrapped.__func__ 78 | else: 79 | wrapped = self._func 80 | 81 | update_wrapper(self.cache, wrapped) 82 | self.cache.function = wrapped 83 | self.cache.instance = self._instance 84 | self.cache.klass = self._class 85 | self._wrapped = True 86 | 87 | return self.cache 88 | 89 | def __call__(self, func): 90 | self._func = func 91 | 92 | if isinstance(func, abc.Callable): 93 | return self.wrapper() 94 | 95 | return self 96 | 97 | def __repr__(self): 98 | return repr(self.cache) 99 | 100 | 101 | def ecached_property(cache_key=None, timeout=DEFAULT_TIMEOUT, tags=(), prefix=None, 102 | cache_instance=None, cache_alias=None): 103 | """ Works the same as `cached` decorator, but intended to use 104 | for properties, e.g.: 105 | 106 | class User(object): 107 | 108 | @cached_property('{self.id}:friends_count', 120) 109 | def friends_count(self): 110 | return 111 | 112 | """ 113 | def wrapper(func): 114 | if tags or prefix: 115 | cache = TaggedCached( 116 | function=func, 117 | cache_key=cache_key, 118 | tags=tags, 119 | timeout=timeout, 120 | prefix=prefix, 121 | cache_instance=cache_instance, 122 | cache_alias=cache_alias, 123 | as_property=True, 124 | ) 125 | else: 126 | cache = Cached( 127 | function=func, 128 | cache_key=cache_key, 129 | timeout=timeout, 130 | cache_instance=cache_instance, 131 | cache_alias=cache_alias, 132 | as_property=True, 133 | ) 134 | 135 | return cache 136 | 137 | return wrapper 138 | 139 | 140 | def meta_accepted(func): 141 | 142 | if isinstance(func, (staticmethod, classmethod)): 143 | _func = func.__func__ 144 | else: 145 | _func = func 146 | setattr(_func, META_ACCEPTED_ATTR, True) 147 | 148 | return func 149 | -------------------------------------------------------------------------------- /easy_cache/utils.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | 4 | def get_function_path(function, bound_to=None): 5 | """Get received function path (as string), to import function later 6 | with `import_string`. 7 | """ 8 | if isinstance(function, str): 9 | return function 10 | 11 | # static and class methods 12 | if hasattr(function, '__func__'): 13 | real_function = function.__func__ 14 | elif callable(function): 15 | real_function = function 16 | else: 17 | return function 18 | 19 | func_path = [] 20 | 21 | module = getattr(real_function, '__module__', '__main__') 22 | if module: 23 | func_path.append(module) 24 | 25 | if not bound_to: 26 | try: 27 | bound_to = function.__self__ 28 | except AttributeError: 29 | pass 30 | 31 | if bound_to: 32 | if isinstance(bound_to, type): 33 | func_path.append(bound_to.__name__) 34 | else: 35 | func_path.append(bound_to.__class__.__name__) 36 | func_path.append(real_function.__name__) 37 | else: 38 | # qualname is available in Python 3 only 39 | func_path.append(getattr(real_function, '__qualname__', real_function.__name__)) 40 | 41 | return '.'.join(func_path) 42 | 43 | 44 | class cached_property(object): 45 | """A property that is only computed once per instance and then replaces 46 | itself with an ordinary attribute. Deleting the attribute resets the 47 | property. 48 | 49 | Source: https://github.com/bottlepy/bottle/blob/0.11.5/bottle.py#L175 50 | """ 51 | 52 | def __init__(self, func): 53 | self.__doc__ = getattr(func, '__doc__') 54 | self.func = func 55 | 56 | def __get__(self, obj, cls): 57 | if obj is None: 58 | # We're being accessed from the class itself, not from an object 59 | return self 60 | value = obj.__dict__[self.func.__name__] = self.func(obj) 61 | return value 62 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [versioneer] 2 | VCS = git 3 | style = pep440 4 | versionfile_source = easy_cache/_version.py 5 | tag_prefix = 6 | 7 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | import io 3 | 4 | from setuptools import setup, find_packages 5 | import versioneer 6 | 7 | 8 | def get_long_description(): 9 | with io.open('./README.md', encoding='utf-8') as f: 10 | readme = f.read() 11 | return readme 12 | 13 | 14 | tests_require = [ 15 | 'pytest', 16 | 'Django', 17 | 'django-redis', 18 | 'memory-profiler', 19 | 'mock', 20 | 'psutil', 21 | 'python-memcached', 22 | 'pymemcache', 23 | 'redis', 24 | 'pylibmc', 25 | 'tox-pyenv', 26 | ] 27 | 28 | 29 | setup( 30 | name='easy-cache', 31 | packages=find_packages(exclude=('tests', )), 32 | version=versioneer.get_version(), 33 | cmdclass=versioneer.get_cmdclass(), 34 | description='Useful cache decorators for methods and properties', 35 | author='Oleg Churkin', 36 | author_email='bahusoff@gmail.com', 37 | url='https://github.com/Bahus/easy_cache', 38 | keywords=['cache', 'decorator', 'invalidation', 39 | 'memcached', 'redis', 'django'], 40 | platforms='Platform Independent', 41 | license='MIT', 42 | classifiers=[ 43 | 'Development Status :: 5 - Production/Stable', 44 | 'Environment :: Web Environment', 45 | 'Operating System :: OS Independent', 46 | 'Programming Language :: Python', 47 | 'Programming Language :: Python :: 3.7', 48 | 'Programming Language :: Python :: 3.8', 49 | 'Programming Language :: Python :: 3.9', 50 | 'Programming Language :: Python :: 3.10', 51 | 'Topic :: Software Development :: Libraries :: Python Modules', 52 | 'Intended Audience :: Developers', 53 | 'License :: OSI Approved :: MIT License' 54 | ], 55 | long_description=get_long_description(), 56 | long_description_content_type='text/markdown', 57 | tests_require=tests_require, 58 | extras_require={ 59 | 'tests': tests_require, 60 | }, 61 | ) 62 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | -------------------------------------------------------------------------------- /tests/benchmarks.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | import math 3 | from contextlib import contextmanager 4 | from timeit import default_timer 5 | from redis import StrictRedis 6 | 7 | from django.conf import settings 8 | 9 | from easy_cache import caches 10 | from easy_cache.contrib.redis_cache import RedisCacheInstance 11 | from easy_cache.decorators import ecached 12 | 13 | from tests.conf import REDIS_HOST, MEMCACHED_HOST 14 | 15 | settings.configure( 16 | DEBUG=True, 17 | DATABASES={ 18 | 'default': { 19 | 'ENGINE': 'django.db.backends.sqlite3', 20 | 'NAME': ':memory:' 21 | } 22 | }, 23 | CACHES={ 24 | 'default': { 25 | 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', 26 | 'LOCATION': 'locmem', 27 | 'KEY_PREFIX': 'custom_prefix', 28 | }, 29 | 'memcached': { 30 | 'BACKEND': 'django.core.cache.backends.memcached.PyMemcacheCache', 31 | 'LOCATION': MEMCACHED_HOST, 32 | 'KEY_PREFIX': 'memcached', 33 | }, 34 | 'redis': { 35 | 'BACKEND': 'django_redis.cache.RedisCache', 36 | 'LOCATION': 'redis://{}/1'.format(REDIS_HOST), 37 | 'OPTIONS': { 38 | 'CLIENT_CLASS': 'django_redis.client.DefaultClient', 39 | } 40 | } 41 | }, 42 | ROOT_URLCONF='', 43 | INSTALLED_APPS=() 44 | ) 45 | 46 | 47 | # adds custom redis client 48 | redis_host, redis_port = REDIS_HOST.split(':') 49 | caches['redis_client'] = RedisCacheInstance( 50 | StrictRedis(host=redis_host, port=redis_port), 51 | prefix='bench' 52 | ) 53 | 54 | 55 | def ratio(a, b): 56 | if a > b: 57 | return a / b, 1 58 | elif a < b: 59 | return 1, b / a 60 | else: 61 | return 1, 1 62 | 63 | 64 | class Stopwatch(object): 65 | 66 | def __init__(self, name): 67 | self.name = name 68 | self.t0 = default_timer() 69 | self.laps = [] 70 | 71 | def __str__(self): 72 | m = self.mean() 73 | d = self.stddev() 74 | a = self.median() 75 | fmt = u'%-37s: mean=%0.5f, median=%0.5f, stddev=%0.5f, n=%3d, snr=%8.5f:%8.5f' 76 | return fmt % ((self.name, m, a, d, len(self.laps)) + ratio(m, d)) 77 | 78 | def mean(self): 79 | return sum(self.laps) / len(self.laps) 80 | 81 | def median(self): 82 | return sorted(self.laps)[int(len(self.laps) / 2)] 83 | 84 | def stddev(self): 85 | mean = self.mean() 86 | return math.sqrt(sum((lap - mean) ** 2 for lap in self.laps) / len(self.laps)) 87 | 88 | def total(self): 89 | return default_timer() - self.t0 90 | 91 | def reset(self): 92 | self.t0 = default_timer() 93 | self.laps = [] 94 | 95 | @contextmanager 96 | def timing(self): 97 | t0 = default_timer() 98 | try: 99 | yield 100 | finally: 101 | te = default_timer() 102 | self.laps.append(te - t0) 103 | 104 | c = 0 105 | 106 | 107 | def time_consuming_operation(): 108 | global c 109 | c += 1 110 | a = sum(range(1000000)) 111 | return str(a) 112 | 113 | 114 | def test_no_cache(): 115 | return time_consuming_operation() 116 | 117 | 118 | @ecached(cache_alias='default') 119 | def test_locmem_cache(): 120 | return time_consuming_operation() 121 | 122 | 123 | @ecached(cache_alias='memcached') 124 | def test_memcached_cache(): 125 | return time_consuming_operation() 126 | 127 | 128 | @ecached(cache_alias='redis') 129 | def test_redis_cache(): 130 | return time_consuming_operation() 131 | 132 | 133 | @ecached(cache_alias='redis_client') 134 | def test_redis_client_cache(): 135 | return time_consuming_operation() 136 | 137 | 138 | @ecached(cache_alias='default', tags=['tag1', 'tag2']) 139 | def test_locmem_cache_tags(): 140 | return time_consuming_operation() 141 | 142 | 143 | @ecached(cache_alias='memcached', tags=['tag1', 'tag2']) 144 | def test_memcached_cache_tags(): 145 | return time_consuming_operation() 146 | 147 | 148 | @ecached(cache_alias='redis', tags=['tag1', 'tag2']) 149 | def test_redis_cache_tags(): 150 | return time_consuming_operation() 151 | 152 | 153 | @ecached(cache_alias='redis_client', tags=['tag1', 'tag2']) 154 | def test_redis_client_cache_tags(): 155 | return time_consuming_operation() 156 | 157 | 158 | def main(): 159 | from django import get_version 160 | import sys 161 | 162 | print('=======', 'Python:', sys.version.replace('\n', ''), 'Django:', get_version(), '=======') 163 | 164 | global c 165 | n = 100 166 | 167 | benchmarks = ( 168 | (test_no_cache, n), 169 | (test_locmem_cache, 1), 170 | (test_locmem_cache_tags, 1), 171 | (test_memcached_cache, 1), 172 | (test_memcached_cache_tags, 1), 173 | (test_redis_cache, 1), 174 | (test_redis_cache_tags, 1), 175 | (test_redis_client_cache, 1), 176 | (test_redis_client_cache_tags, 1), 177 | ) 178 | 179 | def cleanup(function): 180 | if hasattr(function, 'invalidate_cache_by_key'): 181 | function.invalidate_cache_by_key() 182 | if hasattr(function, 'invalidate_cache_by_tags'): 183 | function.invalidate_cache_by_tags() 184 | 185 | for method, count in benchmarks: 186 | sw1 = Stopwatch('[cleanup] ' + method.__name__) 187 | cleanup(method) 188 | c = 0 189 | 190 | for _ in range(n): 191 | with sw1.timing(): 192 | method() 193 | cleanup(method) 194 | 195 | assert c == n, c 196 | print(sw1) 197 | 198 | sw2 = Stopwatch('[ normal] ' + method.__name__) 199 | cleanup(method) 200 | c = 0 201 | 202 | for _ in range(n): 203 | # skip first time 204 | if _ == 0: 205 | method() 206 | continue 207 | with sw2.timing(): 208 | method() 209 | 210 | assert c == count, c 211 | print(sw2) 212 | print('mean diff: {:.3} %, median diff: {:.3} %'.format( 213 | float(sw2.mean()) / sw1.mean() * 100, 214 | float(sw2.median()) / sw1.median() * 100, 215 | )) 216 | 217 | 218 | if __name__ == '__main__': 219 | main() 220 | -------------------------------------------------------------------------------- /tests/conf.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Tests configuration options 4 | """ 5 | import os 6 | 7 | # forced to be enabled in tests, since we need to change cache instance type dynamically 8 | os.environ['EASY_CACHE_LAZY_MODE_ENABLE'] = 'yes' 9 | 10 | # if enabled, you'll see additional logging from cache classes 11 | DEBUG = os.environ.get('EASY_CACHE_DEBUG') == 'yes' 12 | 13 | # host:port used in redis-live tests, see readme for docker commands 14 | REDIS_HOST = os.environ.get('EASY_CACHE_REDIS_HOST', '0.0.0.0:6379') 15 | 16 | # host:port used in memcached-live tests, see readme for docker commands 17 | MEMCACHED_HOST = os.environ.get('EASY_CACHE_MEMCACHED_HOST', '0.0.0.0:11211') 18 | -------------------------------------------------------------------------------- /tests/tests_basic.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from collections import abc 3 | import logging 4 | import random 5 | import sys 6 | 7 | import django 8 | 9 | from functools import partial 10 | from mock import Mock 11 | from unittest import TestCase, skipIf 12 | 13 | from tests.conf import DEBUG, REDIS_HOST, MEMCACHED_HOST 14 | 15 | from easy_cache import ecached, ecached_property, meta_accepted 16 | from easy_cache import ( 17 | set_global_cache_instance, 18 | invalidate_cache_key, 19 | invalidate_cache_tags, 20 | invalidate_cache_prefix, 21 | get_default_cache_instance, 22 | ) 23 | from easy_cache.core import ( 24 | create_cache_key, 25 | create_tag_cache_key, 26 | DEFAULT_TIMEOUT, 27 | MetaCallable, 28 | ) 29 | from easy_cache.compat import force_text, getargspec 30 | 31 | cache_mock = Mock() 32 | 33 | 34 | class MethodProxy(object): 35 | log = logging.getLogger('method_proxy') 36 | log.setLevel(logging.DEBUG) 37 | log.addHandler(logging.StreamHandler(sys.stdout)) 38 | 39 | def __init__(self, method_name, method, ref): 40 | self.method_name = method_name 41 | self.method = method 42 | self.ref = ref 43 | 44 | def __call__(self, *args, **kwargs): 45 | self.log.info('[%r] Cache-%s: args=%r, kwargs=%r', self.ref, self.method_name, args, kwargs) 46 | return self.method(*args, **kwargs) 47 | 48 | 49 | # noinspection PyProtectedMember 50 | class CacheProxy(object): 51 | 52 | def __init__(self, cache_instance, debug=False): 53 | """ :type cache_instance: django.core.cache.backends.locmem.LocMemCache|dict""" 54 | self._debug = debug 55 | self._cache = cache_instance 56 | self._timeouts = {} 57 | 58 | @property 59 | def is_dict(self): 60 | return isinstance(self._cache, dict) 61 | 62 | @property 63 | def is_locmem(self): 64 | return isinstance(getattr(self._cache, '_cache', None), abc.MutableMapping) 65 | 66 | @property 67 | def is_memcache(self): 68 | try: 69 | # noinspection PyUnresolvedReferences 70 | from memcache import Client 71 | except ImportError: 72 | return False 73 | 74 | return isinstance(getattr(self._cache, '_cache', None), Client) 75 | 76 | @property 77 | def is_pymemcache(self): 78 | try: 79 | # noinspection PyUnresolvedReferences 80 | from pymemcache.client.hash import HashClient 81 | except ImportError: 82 | return False 83 | 84 | return isinstance(getattr(self._cache, '_cache', None), HashClient) 85 | 86 | @property 87 | def is_pylibmc(self): 88 | try: 89 | # noinspection PyUnresolvedReferences 90 | from pylibmc import Client 91 | except ImportError: 92 | return False 93 | 94 | return isinstance(getattr(self._cache, '_cache', None), Client) 95 | 96 | @property 97 | def is_redis(self): 98 | try: 99 | from django_redis.client import DefaultClient 100 | except ImportError: 101 | return False 102 | return isinstance(getattr(self._cache, 'client', None), DefaultClient) 103 | 104 | def __getattribute__(self, item): 105 | value = object.__getattribute__(self, item) 106 | if callable(value) and self._debug: 107 | return MethodProxy(item, value, self) 108 | return value 109 | 110 | def set(self, key, value, timeout=DEFAULT_TIMEOUT): 111 | self._timeouts[key] = timeout 112 | 113 | if timeout is DEFAULT_TIMEOUT: 114 | timeout = None 115 | if self.is_dict: 116 | self._cache[key] = value 117 | else: 118 | self._cache.set(key, value, timeout) 119 | 120 | def get(self, key, default=None): 121 | return self._cache.get(key, default) 122 | 123 | def delete(self, key): 124 | if self.is_dict: 125 | if key in self._cache: 126 | del self._cache[key] 127 | del self._timeouts[key] 128 | else: 129 | del self._timeouts[key] 130 | self._cache.delete(key) 131 | 132 | def set_many(self, data, timeout=DEFAULT_TIMEOUT): 133 | self._timeouts.update({k: timeout for k in data}) 134 | 135 | if timeout is DEFAULT_TIMEOUT: 136 | timeout = None 137 | 138 | if self.is_dict: 139 | self._cache.update(data) 140 | else: 141 | self._cache.set_many(data, timeout) 142 | 143 | def get_many(self, data): 144 | if self.is_dict: 145 | return {k: self.get(k) for k in data if k in self} 146 | else: 147 | return self._cache.get_many(data) 148 | 149 | def make_key(self, key, *args, **kwargs): 150 | if self.is_dict: 151 | return key 152 | return self._cache.make_key(key, *args, **kwargs) 153 | 154 | def get_timeout(self, key): 155 | return self._timeouts[key] 156 | 157 | def clear(self): 158 | self._cache.clear() 159 | self._timeouts.clear() 160 | 161 | def search_prefix(self, prefix): 162 | keys_list = self.get_all_keys() 163 | 164 | actual_prefix = prefix 165 | 166 | if self.is_locmem: 167 | # using real keys only for locmem cache 168 | actual_prefix = self.with_key_prefix(force_text(prefix)) 169 | 170 | for key in keys_list: 171 | # force all keys to be unicode, since not all cache backends support it 172 | key = force_text(key) 173 | if key.startswith(force_text(actual_prefix)): 174 | return True 175 | return False 176 | 177 | def get_all_keys(self): 178 | if self.is_dict: 179 | return self._cache.keys() 180 | elif self.is_locmem: 181 | return self._cache._cache.keys() 182 | elif self.is_redis: 183 | # noinspection PyUnresolvedReferences 184 | return self._cache.client.keys('*') 185 | 186 | # fallback to saved keys, since there are some different problems 187 | # when receiving cache keys from memcached: 188 | # python-memcached - get_stats fails in Python3 189 | # pylibmc - get_stats does not work as expected 190 | return self._timeouts.keys() 191 | 192 | def with_key_prefix(self, value=''): 193 | if self.is_memcache or self.is_pylibmc or self.is_pymemcache: 194 | return self._cache.key_func(value, self._cache.key_prefix, self._cache.version) 195 | return '' 196 | 197 | def __len__(self): 198 | if self.is_dict: 199 | return len(self._cache) 200 | elif self.is_locmem: 201 | return len(self._cache._cache) 202 | elif self.is_memcache or self.is_pylibmc or self.is_pymemcache: 203 | # special case 204 | keys = self.get_all_keys() 205 | # prefix = self.with_key_prefix() 206 | # keys = [k[len(prefix):] for k in keys] 207 | return len(self._cache.get_many(keys)) 208 | elif self.is_redis: 209 | return len(self.get_all_keys()) 210 | return 0 211 | 212 | def __contains__(self, item): 213 | if self.is_dict: 214 | return item in self._cache 215 | else: 216 | return self._cache.has_key(item) 217 | 218 | def __repr__(self): 219 | name = type(self._cache) 220 | try: 221 | from django.core.cache import DEFAULT_CACHE_ALIAS, caches, DefaultCacheProxy 222 | if isinstance(self._cache, DefaultCacheProxy): 223 | name = type(caches[DEFAULT_CACHE_ALIAS]) 224 | except Exception: 225 | pass 226 | return 'ThreadLocalCache {}'.format(name) 227 | 228 | 229 | def custom_cache_key(*args, **kwargs): 230 | return create_cache_key('my_prefix', args[0].id, *args[1:]) 231 | 232 | 233 | def process_args(*args, **kwargs): 234 | final = list(args) 235 | for k, v in sorted(kwargs.items()): 236 | final.append(k) 237 | final.append(v) 238 | 239 | return ':'.join(force_text(i) for i in final) 240 | 241 | 242 | def get_test_result(*args, **kwargs): 243 | result = process_args(*args, **kwargs) 244 | cache_mock(result) 245 | return result 246 | 247 | 248 | def choose_timeout(self, a, b, c): 249 | if not isinstance(a, int): 250 | return DEFAULT_TIMEOUT 251 | return a * 100 252 | 253 | 254 | # noinspection PyNestedDecorators 255 | class User(object): 256 | name = 'user_name' 257 | prefixed_ecached = partial(ecached, prefix='USER:{self.id}', timeout=3600) 258 | 259 | def __init__(self, uid): 260 | self.id = uid 261 | 262 | @ecached('dyn_timeout:{a}', timeout=choose_timeout) 263 | def instance_dynamic_timeout(self, a, b, c): 264 | return get_test_result(a, b, c) 265 | 266 | @ecached() 267 | def instance_default_cache_key(self, a, b, c=8): 268 | return get_test_result(a, b, c) 269 | 270 | @ecached() 271 | @classmethod 272 | def class_method_default_cache_key(cls, a, b, c=9, d='HAHA'): 273 | return get_test_result(a, b, c) 274 | 275 | @ecached_property() 276 | def test_property(self): 277 | return get_test_result('property') 278 | 279 | @ecached('{self.id}:{a}:{b}:{c}') 280 | def instance_method_string(self, a, b, c=10): 281 | return get_test_result(a, b, c) 282 | 283 | @ecached(['self.id', 'a', 'b']) 284 | def instance_method_list(self, a, b, c=11): 285 | return get_test_result(a, b, c) 286 | 287 | @ecached(custom_cache_key) 288 | def instance_method_callable(self, a, b, c=12): 289 | return get_test_result(a, b, c) 290 | 291 | @ecached('{self.id}:{a}:{b}', 400) 292 | def instance_method_timeout(self, a, b, c=13): 293 | return get_test_result(a, b, c) 294 | 295 | @ecached('{self.id}:{a}:{b}', 500, ('tag1', 'tag2')) 296 | def instance_method_tags(self, a, b, c=14): 297 | return get_test_result(a, b, c) 298 | 299 | @staticmethod 300 | def generate_custom_tags(meta): 301 | """ :type meta: MetaCallable """ 302 | if meta.has_returned_value: 303 | cache_mock.assert_called_with(meta.returned_value) 304 | 305 | self = meta.args[0] 306 | a = meta.args[1] 307 | return [create_cache_key(self.name, self.id, a), 'simple_tag'] 308 | 309 | @meta_accepted 310 | @staticmethod 311 | def generate_key_based_on_meta(m, a=1): 312 | assert isinstance(m, MetaCallable) 313 | assert m.function is getattr(m['self'], 'instance_method_meta_test').function 314 | assert m.scope is m['self'] 315 | assert a == 1 316 | 317 | return create_cache_key(m['a'], m['b'], m['c']) 318 | 319 | @ecached(generate_key_based_on_meta) 320 | def instance_method_meta_test(self, a, b, c=666): 321 | return get_test_result(a, b, c) 322 | 323 | @ecached('{a}:{b}', tags=generate_custom_tags) 324 | def instance_method_custom_tags(self, a, b, c=14): 325 | return get_test_result(a, b, c) 326 | 327 | @prefixed_ecached('p1:{a}:{b}:{c}', tags=['{self.id}:tag1']) 328 | def instance_method_prefixed(self, a, b, c=15): 329 | return get_test_result(a, b, c) 330 | 331 | @ecached_property('{self.id}:friends_count', timeout=100, prefix='USER_PROPERTY') 332 | def friends_count(self): 333 | cache_mock() 334 | return 15 335 | 336 | @ecached_property('{self.id}:friends', 666) 337 | def friends(self): 338 | cache_mock() 339 | return ['Ivan', 'Sergey', 'Semen'] 340 | 341 | @ecached_property('static_key') 342 | def property_no_tags(self): 343 | cache_mock() 344 | return '42' 345 | 346 | @ecached(cache_key='{cls.name}:{c}') 347 | @classmethod 348 | def class_method_cache_key_string(cls, a, b, c=17): 349 | return get_test_result(a, b, c) 350 | 351 | @ecached(('cls.name', 'a'), 500, ['tag4', 'tag5:{cls.name}'], 352 | prefix=lambda cls, *args, **kwargs: create_cache_key('USER', args[0], args[1])) 353 | @classmethod 354 | def class_method_full_spec(cls, a, b, c=18): 355 | return get_test_result(a, b, c) 356 | 357 | @ecached('{hg}:{hg}:{test}', prefix=u'пользователь') 358 | @staticmethod 359 | def static_method(hg, test='abc', n=1.1): 360 | return get_test_result(hg, test, n) 361 | 362 | @ecached(tags=['ttt:{c}'], prefix='ppp:{b}') 363 | @staticmethod 364 | def static_method_default_key(a, b, c=11): 365 | return get_test_result(a, b, c) 366 | 367 | 368 | @ecached(timeout=100) 369 | def computation(a, b, c): 370 | return get_test_result(a, b, c) 371 | 372 | 373 | @ecached(('kwargs[a]', 'kwargs[b]'), prefix=u'пользователь') 374 | def ordinal_func(*args, **kwargs): 375 | return get_test_result(*args, **kwargs) 376 | 377 | 378 | @ecached('second:{c}', timeout=450, tags=['{a}']) 379 | def second_func(a, b, c=100): 380 | return get_test_result(a, b, c) 381 | 382 | 383 | class ClassCachedDecoratorTest(TestCase): 384 | 385 | def get_cache_instance(self): 386 | return CacheProxy({}, DEBUG) 387 | 388 | def setUp(self): 389 | self.cache = cache_mock 390 | self.cache.reset_mock() 391 | self.user = User(random.randint(10, 1000)) 392 | 393 | self.local_cache = self.get_cache_instance() 394 | """ :type local_cache: CacheProxy """ 395 | set_global_cache_instance(self.local_cache) 396 | 397 | assert self.local_cache == get_default_cache_instance() 398 | 399 | def tearDown(self): 400 | self.local_cache.clear() 401 | self.cache.reset_mock() 402 | 403 | def _check_base(self, method, param_to_change=None): 404 | self.cache.reset_mock() 405 | 406 | items = [u'тест', 'str', 100, 1.45] 407 | random.shuffle(items) 408 | 409 | a, b, c = items[:3] 410 | 411 | result = process_args(a, b, c) 412 | self.assertEqual(method(a, b, c), result) 413 | self.cache.assert_called_once_with(result) 414 | self.cache.reset_mock() 415 | 416 | # cached version (force convert to unicode) 417 | self.assertEqual(force_text(method(a, b, c)), force_text(result)) 418 | self.assertFalse(self.cache.called) 419 | self.cache.reset_mock() 420 | 421 | if param_to_change == 'c': 422 | c = items[3] 423 | elif param_to_change == 'b': 424 | b = items[3] 425 | else: 426 | a = items[3] 427 | 428 | result = process_args(a, b, c) 429 | 430 | # different params, no cache 431 | self.assertEqual(method(a, b, c), result) 432 | self.cache.assert_called_once_with(result) 433 | self.cache.reset_mock() 434 | 435 | def _check_cache_key(self, _callable, cache_key, *args, **kwargs): 436 | invalidator = kwargs.pop('invalidator', None) 437 | 438 | self.local_cache.clear() 439 | self.assertNotIn(cache_key, self.local_cache) 440 | 441 | _callable(*args, **kwargs) 442 | self.assertIn(cache_key, self.local_cache) 443 | 444 | if invalidator: 445 | invalidator() 446 | else: 447 | _callable.invalidate_cache_by_key(*args, **kwargs) 448 | 449 | self.assertNotIn(cache_key, self.local_cache) 450 | _callable(*args, **kwargs) 451 | 452 | def _check_cache_prefix(self, _callable, prefix, *args, **kwargs): 453 | self.local_cache.clear() 454 | self.cache.reset_mock() 455 | 456 | as_property = getattr(_callable, 'property', False) 457 | 458 | tag_prefix = create_tag_cache_key(prefix) 459 | self.assertNotIn(tag_prefix, self.local_cache) 460 | 461 | result = _callable(*args, **kwargs) 462 | self.assertIn(tag_prefix, self.local_cache) 463 | self.assertTrue(self.local_cache.search_prefix(prefix)) 464 | 465 | if as_property: 466 | self.cache.assert_called_once_with() 467 | else: 468 | self.cache.assert_called_once_with(result) 469 | 470 | self.cache.reset_mock() 471 | 472 | _callable(*args, **kwargs) 473 | self.assertFalse(self.cache.called) 474 | 475 | _callable.invalidate_cache_by_prefix(*args, **kwargs) 476 | result = _callable(*args, **kwargs) 477 | 478 | if as_property: 479 | self.cache.assert_called_once_with() 480 | else: 481 | self.cache.assert_called_once_with(result) 482 | 483 | self.cache.reset_mock() 484 | 485 | def _check_timeout(self, cache_key, timeout): 486 | assert cache_key in self.local_cache, '_check_cache_key required to use this method' 487 | self.assertEqual(self.local_cache.get_timeout(cache_key), timeout) 488 | 489 | def _check_tags(self, _callable, tags, *args, **kwargs): 490 | self.local_cache.clear() 491 | self.cache.reset_mock() 492 | 493 | for tag in tags: 494 | self.assertNotIn(create_tag_cache_key(tag), self.local_cache) 495 | 496 | result = _callable(*args, **kwargs) 497 | 498 | for tag in tags: 499 | self.assertIn(create_tag_cache_key(tag), self.local_cache) 500 | 501 | self.cache.assert_called_once_with(result) 502 | self.cache.reset_mock() 503 | 504 | # invalidate by tag 505 | for tag in tags: 506 | invalidate_cache_tags(tag) 507 | result = _callable(*args, **kwargs) 508 | self.cache.assert_called_once_with(result) 509 | self.cache.reset_mock() 510 | 511 | _callable(*args, **kwargs) 512 | self.assertFalse(self.cache.called) 513 | 514 | _callable.invalidate_cache_by_tags(tag, *args, **kwargs) 515 | result = _callable(*args, **kwargs) 516 | self.cache.assert_called_once_with(result) 517 | self.cache.reset_mock() 518 | 519 | def test_default_cache_key(self): 520 | cache_callable = self.user.instance_default_cache_key 521 | cache_key = create_cache_key( 522 | __name__ + '.User.instance_default_cache_key', 1, 2, 8 523 | ) 524 | self._check_base(cache_callable) 525 | self._check_cache_key(cache_callable, cache_key, 1, 2) 526 | self._check_timeout(cache_key, DEFAULT_TIMEOUT) 527 | 528 | cache_callable = User.class_method_default_cache_key 529 | cache_key = create_cache_key( 530 | __name__ + '.User.class_method_default_cache_key', 2, 3, 9, 'HAHA' 531 | ) 532 | 533 | self._check_base(cache_callable) 534 | self._check_cache_key(cache_callable, cache_key, 2, 3) 535 | self._check_timeout(cache_key, DEFAULT_TIMEOUT) 536 | 537 | cache_callable = computation 538 | cache_key = create_cache_key( 539 | __name__ + '.computation', 'a', 'b', 'c' 540 | ) 541 | 542 | self._check_base(cache_callable) 543 | self._check_cache_key(cache_callable, cache_key, 'a', 'b', 'c') 544 | self._check_timeout(cache_key, 100) 545 | 546 | def test_default_cache_key_for_property(self): 547 | self.assertEqual(self.user.test_property, 'property') 548 | 549 | cache_callable = lambda: getattr(self.user, 'test_property') 550 | cache_key = create_cache_key(__name__ + '.User.test_property') 551 | 552 | self._check_cache_key(cache_callable, cache_key, 553 | invalidator=User.test_property.invalidate_cache_by_key) 554 | 555 | self.local_cache.clear() 556 | self.cache.reset_mock() 557 | 558 | self.assertEqual(self.user.test_property, 'property') 559 | self.cache.assert_called_once_with('property') 560 | self.cache.reset_mock() 561 | 562 | self.assertEqual(self.user.test_property, 'property') 563 | self.assertFalse(self.cache.called) 564 | 565 | # invalidate cache 566 | User.test_property.invalidate_cache_by_key() 567 | self.assertEqual(self.user.test_property, 'property') 568 | self.cache.assert_called_once_with('property') 569 | 570 | def test_cache_key_as_string(self): 571 | cache_callable = self.user.instance_method_string 572 | cache_key = create_cache_key(self.user.id, 1, 2, 3) 573 | 574 | self._check_base(self.user.instance_method_string) 575 | self._check_cache_key(cache_callable, cache_key, 1, 2, c=3) 576 | self._check_timeout(cache_key, DEFAULT_TIMEOUT) 577 | self.assertEqual(len(self.local_cache), 1) 578 | 579 | def test_cache_key_as_list(self): 580 | cache_callable = self.user.instance_method_list 581 | cache_key = create_cache_key(self.user.id, 2, 3) 582 | 583 | self._check_base(cache_callable) 584 | self._check_cache_key(cache_callable, cache_key, 2, 3) 585 | self._check_timeout(cache_key, DEFAULT_TIMEOUT) 586 | 587 | def test_cache_key_as_list_unrelated_param_changed(self): 588 | # if we change only "c" parameter - data will be received from cache 589 | a = b = c = 10 590 | result = process_args(a, b, c) 591 | self.assertEqual(self.user.instance_method_list(a, b, c), result) 592 | self.cache.assert_called_once_with(result) 593 | self.cache.reset_mock() 594 | 595 | # still cached version 596 | self.assertEqual(self.user.instance_method_list(a, b, c + 10), result) 597 | self.assertFalse(self.cache.called) 598 | self.cache.reset_mock() 599 | 600 | def test_cache_key_as_callable(self): 601 | cache_callable = self.user.instance_method_callable 602 | cache_key = custom_cache_key(self.user, 5, 5) 603 | 604 | self._check_base(cache_callable) 605 | self._check_cache_key(cache_callable, cache_key, 5, 5) 606 | self._check_timeout(cache_key, DEFAULT_TIMEOUT) 607 | 608 | def test_not_default_timeout(self): 609 | cache_callable = self.user.instance_method_timeout 610 | cache_key = create_cache_key(self.user.id, 5, 5) 611 | 612 | self._check_base(cache_callable) 613 | self._check_cache_key(cache_callable, cache_key, 5, 5) 614 | self._check_timeout(cache_key, 400) 615 | 616 | def test_cache_tags(self): 617 | cache_callable = self.user.instance_method_tags 618 | cache_key = create_cache_key(self.user.id, 5, 5) 619 | 620 | self._check_base(cache_callable) 621 | self._check_cache_key(cache_callable, cache_key, 5, 5) 622 | self._check_timeout(cache_key, 500) 623 | self._check_tags(cache_callable, ['tag1', 'tag2'], 6, 7) 624 | 625 | def test_cache_custom_tags(self): 626 | cache_callable = self.user.instance_method_custom_tags 627 | cache_key = create_cache_key(10, 11) 628 | cache_tags = self.user.generate_custom_tags(MetaCallable(args=(self.user, 10))) 629 | 630 | self._check_cache_key(cache_callable, cache_key, 10, 11) 631 | self._check_tags(cache_callable, cache_tags, 10, 11) 632 | 633 | def test_method_prefixed(self): 634 | cache_callable = self.user.instance_method_prefixed 635 | cache_prefix = create_cache_key('USER', self.user.id) 636 | 637 | # prefix should ba attached 638 | cache_key = create_cache_key(cache_prefix, 'p1', 1, 2, 3) 639 | 640 | self._check_base(cache_callable) 641 | self._check_cache_key(cache_callable, cache_key, 1, 2, 3) 642 | self._check_timeout(cache_key, 3600) 643 | 644 | # prefix is a tag actually 645 | self._check_cache_prefix(cache_callable, cache_prefix, 1, 2, 3) 646 | self._check_tags(cache_callable, [create_cache_key(self.user.id, 'tag1')], 1, 2, 3) 647 | 648 | def test_property_friends_count(self): 649 | self.assertEqual(self.user.friends_count, 15) 650 | cache_callable = lambda: getattr(self.user, 'friends_count') 651 | cache_callable.property = True 652 | cache_callable.invalidate_cache_by_prefix = User.friends_count.invalidate_cache_by_prefix 653 | 654 | cache_prefix = 'USER_PROPERTY' 655 | cache_key = create_cache_key(cache_prefix, self.user.id, 'friends_count') 656 | 657 | self._check_cache_key( 658 | cache_callable, 659 | cache_key, 660 | invalidator=partial(User.friends_count.invalidate_cache_by_key, self.user) 661 | ) 662 | self._check_timeout(cache_key, 100) 663 | # noinspection PyTypeChecker 664 | self._check_cache_prefix(cache_callable, cache_prefix) 665 | 666 | def test_property_friends(self): 667 | friends_list = ['Ivan', 'Sergey', 'Semen'] 668 | 669 | self.assertListEqual(self.user.friends, friends_list) 670 | 671 | self.cache.assert_called_once_with() 672 | self.cache.reset_mock() 673 | 674 | self.assertListEqual(self.user.friends, friends_list) 675 | self.cache.assert_not_called() 676 | self.cache.reset_mock() 677 | 678 | User.friends.invalidate_cache_by_key(self.user) 679 | self.assertListEqual(self.user.friends, friends_list) 680 | self.cache.assert_called_once_with() 681 | self.cache.reset_mock() 682 | 683 | cache_callable = lambda: getattr(self.user, 'friends') 684 | cache_callable.property = True 685 | cache_key = create_cache_key(self.user.id, 'friends') 686 | 687 | self._check_cache_key( 688 | cache_callable, 689 | cache_key, 690 | invalidator=partial(User.friends.invalidate_cache_by_key, self.user) 691 | ) 692 | self._check_timeout(cache_key, 666) 693 | 694 | def test_property_no_tags(self): 695 | self.assertEqual(self.user.property_no_tags, '42') 696 | 697 | cache_callable = lambda: getattr(self.user, 'property_no_tags') 698 | cache_key = create_cache_key('static_key') 699 | 700 | self._check_cache_key( 701 | cache_callable, 702 | cache_key, 703 | invalidator=partial(User.property_no_tags.invalidate_cache_by_key) 704 | ) 705 | 706 | def test_class_method_key_string(self): 707 | cache_callable = User.class_method_cache_key_string 708 | cache_key = create_cache_key(User.name, 17) 709 | 710 | self._check_base(cache_callable, param_to_change='c') 711 | self._check_cache_key(cache_callable, cache_key, 1, 2) 712 | self._check_timeout(cache_key, DEFAULT_TIMEOUT) 713 | 714 | cache_callable = self.user.class_method_cache_key_string 715 | self._check_base(cache_callable, param_to_change='c') 716 | self._check_cache_key(cache_callable, cache_key, 4, 5) 717 | 718 | def test_class_method_full_spec(self): 719 | cache_callable = User.class_method_full_spec 720 | a = u'a' 721 | b = u'b' 722 | c = 10 723 | 724 | cache_prefix = create_cache_key('USER', a, b) 725 | cache_key = create_cache_key(cache_prefix, User.name, a) 726 | 727 | self._check_base(cache_callable) 728 | self._check_cache_key(cache_callable, cache_key, a, b, c) 729 | self._check_timeout(cache_key, 500) 730 | self._check_tags( 731 | cache_callable, 732 | ['tag4', create_cache_key(u'tag5', User.name)], 733 | a, b, c 734 | ) 735 | self._check_cache_prefix(cache_callable, cache_prefix, a, b, c) 736 | 737 | def test_static_method(self): 738 | cache_callable = User.static_method 739 | hg = 123 740 | test = u'ЫЫЫЫ' 741 | 742 | cache_prefix = cache_callable.prefix 743 | cache_key = create_cache_key(cache_prefix, hg, hg, test) 744 | 745 | self._check_base(cache_callable) 746 | self._check_cache_key(cache_callable, cache_key, hg, test) 747 | self._check_timeout(cache_key, DEFAULT_TIMEOUT) 748 | self._check_cache_prefix(cache_callable, cache_prefix, hg, test) 749 | 750 | def test_static_method_default_key(self): 751 | cache_callable = User.static_method_default_key 752 | cache_prefix = create_cache_key('ppp', 2) 753 | cache_key = create_cache_key( 754 | cache_prefix, __name__ + '.User.static_method_default_key', 1, 2, 11 755 | ) 756 | 757 | self._check_base(cache_callable, param_to_change='b') 758 | self._check_cache_key(cache_callable, cache_key, a=1, b=2) 759 | 760 | # check partial invalidation 761 | self.cache.reset_mock() 762 | cache_callable(1, 2, 3) 763 | self.assertTrue(self.cache.called) 764 | 765 | self.cache.reset_mock() 766 | cache_callable(1, 2, 3) 767 | self.assertFalse(self.cache.called) 768 | 769 | self.cache.reset_mock() 770 | cache_callable.invalidate_cache_by_tags(c=3) 771 | cache_callable(1, 2, 3) 772 | self.assertTrue(self.cache.called) 773 | 774 | self.cache.reset_mock() 775 | cache_callable.invalidate_cache_by_prefix(b=2) 776 | cache_callable(1, 2, 3) 777 | self.assertTrue(self.cache.called) 778 | 779 | self.cache.reset_mock() 780 | cache_callable.invalidate_cache_by_key(1, b=2, c=3) 781 | cache_callable(1, 2, 3) 782 | self.assertTrue(self.cache.called) 783 | 784 | def test_ordinal_func(self): 785 | cache_callable = ordinal_func 786 | cache_prefix = ordinal_func.prefix 787 | cache_key = create_cache_key(cache_prefix, 10, 20) 788 | 789 | self.cache.reset_mock() 790 | 791 | result = process_args(a=10, b=10) 792 | 793 | self.assertEqual(cache_callable(a=10, b=10), result) 794 | self.cache.assert_called_once_with(result) 795 | self.cache.reset_mock() 796 | 797 | # cached version 798 | self.assertEqual(cache_callable(a=10, b=10), result) 799 | self.assertFalse(self.cache.called) 800 | self.cache.reset_mock() 801 | 802 | result = process_args(a=10, b=22) 803 | 804 | # different params, no cache 805 | self.assertEqual(cache_callable(a=10, b=22), result) 806 | self.cache.assert_called_once_with(result) 807 | self.cache.reset_mock() 808 | 809 | self._check_cache_key(cache_callable, cache_key, a=10, b=20) 810 | self._check_cache_prefix(cache_callable, cache_prefix, a=10, b=20) 811 | 812 | def test_second_func(self): 813 | cache_callable = second_func 814 | cache_key = create_cache_key('second', 100) 815 | 816 | self._check_base(cache_callable, param_to_change='c') 817 | self._check_cache_key(cache_callable, cache_key, 1, 2, c=100) 818 | self._check_timeout(cache_key, 450) 819 | self._check_tags(cache_callable, ['yyy'], 'yyy', 111) 820 | 821 | def test_invalidators(self): 822 | a, b = u'a', u'b' 823 | cache_callable = ordinal_func 824 | cache_prefix = ordinal_func.prefix 825 | cache_key = create_cache_key(cache_prefix, a, b) 826 | 827 | self.cache.reset_mock() 828 | 829 | result = process_args(a=a, b=b) 830 | 831 | self.assertEqual(cache_callable(a=a, b=b), result) 832 | self.cache.assert_called_once_with(result) 833 | self.cache.reset_mock() 834 | 835 | # cached version 836 | self.assertEqual(cache_callable(a=a, b=b), result) 837 | self.assertFalse(self.cache.called) 838 | self.cache.reset_mock() 839 | 840 | # invalidate cache via cache key 841 | invalidate_cache_key(cache_key) 842 | self.assertEqual(cache_callable(a=a, b=b), result) 843 | self.cache.assert_called_once_with(result) 844 | self.cache.reset_mock() 845 | 846 | # cached version 847 | self.assertEqual(cache_callable(a=a, b=b), result) 848 | self.assertFalse(self.cache.called) 849 | self.cache.reset_mock() 850 | 851 | # invalidate cache via prefix 852 | invalidate_cache_prefix(cache_prefix) 853 | self.assertEqual(cache_callable(a=a, b=b), result) 854 | self.cache.assert_called_once_with(result) 855 | self.cache.reset_mock() 856 | 857 | # cached version 858 | self.assertEqual(cache_callable(a=a, b=b), result) 859 | self.assertFalse(self.cache.called) 860 | self.cache.reset_mock() 861 | 862 | # invalidate cache via attached invalidator 863 | cache_callable.invalidate_cache_by_key(a=a, b=b) 864 | self.assertEqual(cache_callable(a=a, b=b), result) 865 | self.cache.assert_called_once_with(result) 866 | self.cache.reset_mock() 867 | 868 | def test_instance_method_and_meta_accepted_decorator(self): 869 | cache_callable = self.user.instance_method_meta_test 870 | 871 | cache_key = create_cache_key(1, 2, 5) 872 | 873 | self._check_base(cache_callable) 874 | self._check_cache_key(cache_callable, cache_key, 1, 2, c=5) 875 | self._check_timeout(cache_key, DEFAULT_TIMEOUT) 876 | self.assertEqual(len(self.local_cache), 1) 877 | 878 | def test_instance_method_dynamic_timeout(self): 879 | cache_callable = self.user.instance_dynamic_timeout 880 | 881 | self._check_base(cache_callable) 882 | 883 | cache_key = create_cache_key('dyn_timeout', 2) 884 | self._check_cache_key(cache_callable, cache_key, 2, 3, 4) 885 | self._check_timeout(cache_key, 2 * 100) 886 | 887 | self.cache.reset_mock() 888 | 889 | cache_key = create_cache_key('dyn_timeout', 4) 890 | self._check_cache_key(cache_callable, cache_key, 4, 5, 6) 891 | self._check_timeout(cache_key, 4 * 100) 892 | 893 | def test_refresh_cache(self): 894 | a, b = u'a', u'b' 895 | cache_callable = ordinal_func 896 | 897 | self.cache.reset_mock() 898 | 899 | result = process_args(a=a, b=b) 900 | 901 | self.assertEqual(cache_callable(a=a, b=b), result) 902 | self.cache.assert_called_once_with(result) 903 | self.cache.reset_mock() 904 | 905 | # cached version 906 | self.assertEqual(cache_callable(a=a, b=b), result) 907 | self.assertFalse(self.cache.called) 908 | self.cache.reset_mock() 909 | 910 | # refresh cache via cache key 911 | cache_callable.refresh_cache(a=a, b=b) 912 | self.cache.assert_called_once_with(result) 913 | self.assertEqual(cache_callable(a=a, b=b), result) 914 | self.cache.assert_called_once_with(result) 915 | self.cache.reset_mock() 916 | 917 | # Django-related part 918 | from django.conf import settings 919 | from django.test import SimpleTestCase 920 | from django.test.utils import override_settings 921 | 922 | settings.configure( 923 | DEBUG=True, 924 | DATABASES={ 925 | 'default': { 926 | 'ENGINE': 'django.db.backends.sqlite3', 927 | 'NAME': ':memory:' 928 | } 929 | }, 930 | ROOT_URLCONF='', 931 | INSTALLED_APPS=() 932 | ) 933 | 934 | 935 | @override_settings( 936 | CACHES={ 937 | 'default': { 938 | 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', 939 | 'LOCATION': 'locmem', 940 | 'KEY_PREFIX': 'custom_prefix', 941 | } 942 | } 943 | ) 944 | class DjangoLocMemCacheTest(ClassCachedDecoratorTest, SimpleTestCase): 945 | """ Uses django LocMem cache """ 946 | 947 | def get_cache_instance(self): 948 | from django.core.cache import cache 949 | return CacheProxy(cache, DEBUG) 950 | 951 | 952 | @skipIf( 953 | (django.VERSION[0] > 3), 954 | 'This test should only be executed for Django >3.2' 955 | ) 956 | @override_settings( 957 | CACHES={ 958 | 'default': { 959 | 'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache', 960 | 'LOCATION': MEMCACHED_HOST, 961 | 'KEY_PREFIX': 'memcached', 962 | } 963 | } 964 | ) 965 | class LiveMemcachedTest(DjangoLocMemCacheTest): 966 | """ Uses local memcached instance as cache backend """ 967 | 968 | 969 | @override_settings( 970 | CACHES={ 971 | 'default': { 972 | 'BACKEND': 'django.core.cache.backends.memcached.PyLibMCCache', 973 | 'LOCATION': MEMCACHED_HOST, 974 | 'KEY_PREFIX': 'pylibmc', 975 | } 976 | } 977 | ) 978 | class LivePyLibMCTest(DjangoLocMemCacheTest): 979 | """ Uses local memcached instance as cache backend """ 980 | 981 | 982 | @skipIf( 983 | (django.VERSION[0] <= 3), 984 | 'This test should only be executed for Django version >= 3.2' 985 | ) 986 | @override_settings( 987 | CACHES={ 988 | 'default': { 989 | 'BACKEND': 'django.core.cache.backends.memcached.PyMemcacheCache', 990 | 'LOCATION': MEMCACHED_HOST, 991 | 'KEY_PREFIX': 'pylibmc', 992 | } 993 | } 994 | ) 995 | class LivePyMemcachedTest(DjangoLocMemCacheTest): 996 | """ Uses local memcached instance as cache backend """ 997 | 998 | 999 | @override_settings( 1000 | CACHES={ 1001 | 'default': { 1002 | 'BACKEND': 'django_redis.cache.RedisCache', 1003 | 'LOCATION': 'redis://{}/1'.format(REDIS_HOST), 1004 | 'OPTIONS': { 1005 | 'CLIENT_CLASS': 'django_redis.client.DefaultClient', 1006 | } 1007 | } 1008 | } 1009 | ) 1010 | class LiveRedisTest(DjangoLocMemCacheTest): 1011 | """ Uses local redis instance and django-redis as cache backend """ 1012 | 1013 | 1014 | class MiscellaneousTest(TestCase): 1015 | def test_class_repr(self): 1016 | self.assertEqual( 1017 | repr(User.class_method_full_spec), 1018 | '' 1022 | ) 1023 | 1024 | self.assertEqual( 1025 | repr(User.class_method_default_cache_key), 1026 | '' 1029 | ) 1030 | 1031 | self.assertEqual( 1032 | repr(User.static_method), 1033 | '' 1037 | ) 1038 | 1039 | self.assertEqual( 1040 | repr(User.property_no_tags), 1041 | '' 1044 | ) 1045 | 1046 | self.assertEqual( 1047 | repr(User.instance_method_custom_tags), 1048 | '' 1052 | ) 1053 | 1054 | self.assertEqual( 1055 | repr(ordinal_func), 1056 | '' 1060 | ) 1061 | -------------------------------------------------------------------------------- /tests/tests_cache_clients.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | import redis 3 | from django.test.utils import override_settings 4 | 5 | from easy_cache.contrib.redis_cache import RedisCacheInstance 6 | from easy_cache import caches 7 | 8 | from tests.conf import REDIS_HOST 9 | from tests.tests_basic import CacheProxy, DjangoLocMemCacheTest as Base, DEBUG 10 | 11 | 12 | class RedisCacheProxy(CacheProxy): 13 | @property 14 | def is_redis(self): 15 | return True 16 | 17 | def clear(self): 18 | self._cache.client.flushall() 19 | 20 | def __contains__(self, item): 21 | return self._cache.client.exists(item) 22 | 23 | 24 | @override_settings( 25 | CACHES={ 26 | 'default': { 27 | 'BACKEND': 'django.core.cache.backends.dummy.DummyCache', 28 | } 29 | } 30 | ) 31 | class RedisCacheInstanceTest(Base): 32 | 33 | def get_cache_instance(self): 34 | host, port = REDIS_HOST.split(':') 35 | cache = RedisCacheInstance(redis.StrictRedis(host=host, port=port)) 36 | caches.set_default(cache) 37 | proxy = RedisCacheProxy(cache, DEBUG) 38 | return proxy 39 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | [tox] 2 | envlist = 3 | {py37}-django20 4 | {py38,py39,py310}-{django34} 5 | {py310}-{django4} 6 | tox_pyenv_fallback=False 7 | 8 | [pytest] 9 | testpaths = tests 10 | python_files = tests_* 11 | addopts = -s 12 | 13 | [testenv] 14 | passenv=EASY_CACHE_REDIS_HOST,EASY_CACHE_MEMCACHED_HOST 15 | commands= 16 | pytest tests/tests_basic.py 17 | pytest tests/tests_cache_clients.py 18 | python tests/benchmarks.py 19 | setenv = 20 | EASY_CACHE_LAZY_MODE_ENABLE = yes 21 | PYTHONPATH = {toxinidir} 22 | deps = 23 | django20: Django>=2.0.0 24 | django20: python-memcached 25 | django34: Django>=3 26 | django34: pymemcache 27 | django4: Django>=4 28 | django4: pymemcache 29 | pytest 30 | django-redis 31 | memory-profiler 32 | mock 33 | psutil 34 | redis 35 | pylibmc 36 | -------------------------------------------------------------------------------- /versioneer.py: -------------------------------------------------------------------------------- 1 | 2 | # Version: 0.17 3 | 4 | """The Versioneer - like a rocketeer, but for versions. 5 | 6 | The Versioneer 7 | ============== 8 | 9 | * like a rocketeer, but for versions! 10 | * https://github.com/warner/python-versioneer 11 | * Brian Warner 12 | * License: Public Domain 13 | * Compatible With: python2.6, 2.7, 3.2, 3.3, 3.4, 3.5, and pypy 14 | * [![Latest Version] 15 | (https://pypip.in/version/versioneer/badge.svg?style=flat) 16 | ](https://pypi.python.org/pypi/versioneer/) 17 | * [![Build Status] 18 | (https://travis-ci.org/warner/python-versioneer.png?branch=master) 19 | ](https://travis-ci.org/warner/python-versioneer) 20 | 21 | This is a tool for managing a recorded version number in distutils-based 22 | python projects. The goal is to remove the tedious and error-prone "update 23 | the embedded version string" step from your release process. Making a new 24 | release should be as easy as recording a new tag in your version-control 25 | system, and maybe making new tarballs. 26 | 27 | 28 | ## Quick Install 29 | 30 | * `pip install versioneer` to somewhere to your $PATH 31 | * add a `[versioneer]` section to your setup.cfg (see below) 32 | * run `versioneer install` in your source tree, commit the results 33 | 34 | ## Version Identifiers 35 | 36 | Source trees come from a variety of places: 37 | 38 | * a version-control system checkout (mostly used by developers) 39 | * a nightly tarball, produced by build automation 40 | * a snapshot tarball, produced by a web-based VCS browser, like github's 41 | "tarball from tag" feature 42 | * a release tarball, produced by "setup.py sdist", distributed through PyPI 43 | 44 | Within each source tree, the version identifier (either a string or a number, 45 | this tool is format-agnostic) can come from a variety of places: 46 | 47 | * ask the VCS tool itself, e.g. "git describe" (for checkouts), which knows 48 | about recent "tags" and an absolute revision-id 49 | * the name of the directory into which the tarball was unpacked 50 | * an expanded VCS keyword ($Id$, etc) 51 | * a `_version.py` created by some earlier build step 52 | 53 | For released software, the version identifier is closely related to a VCS 54 | tag. Some projects use tag names that include more than just the version 55 | string (e.g. "myproject-1.2" instead of just "1.2"), in which case the tool 56 | needs to strip the tag prefix to extract the version identifier. For 57 | unreleased software (between tags), the version identifier should provide 58 | enough information to help developers recreate the same tree, while also 59 | giving them an idea of roughly how old the tree is (after version 1.2, before 60 | version 1.3). Many VCS systems can report a description that captures this, 61 | for example `git describe --tags --dirty --always` reports things like 62 | "0.7-1-g574ab98-dirty" to indicate that the checkout is one revision past the 63 | 0.7 tag, has a unique revision id of "574ab98", and is "dirty" (it has 64 | uncommitted changes. 65 | 66 | The version identifier is used for multiple purposes: 67 | 68 | * to allow the module to self-identify its version: `myproject.__version__` 69 | * to choose a name and prefix for a 'setup.py sdist' tarball 70 | 71 | ## Theory of Operation 72 | 73 | Versioneer works by adding a special `_version.py` file into your source 74 | tree, where your `__init__.py` can import it. This `_version.py` knows how to 75 | dynamically ask the VCS tool for version information at import time. 76 | 77 | `_version.py` also contains `$Revision$` markers, and the installation 78 | process marks `_version.py` to have this marker rewritten with a tag name 79 | during the `git archive` command. As a result, generated tarballs will 80 | contain enough information to get the proper version. 81 | 82 | To allow `setup.py` to compute a version too, a `versioneer.py` is added to 83 | the top level of your source tree, next to `setup.py` and the `setup.cfg` 84 | that configures it. This overrides several distutils/setuptools commands to 85 | compute the version when invoked, and changes `setup.py build` and `setup.py 86 | sdist` to replace `_version.py` with a small static file that contains just 87 | the generated version data. 88 | 89 | ## Installation 90 | 91 | See [INSTALL.md](./INSTALL.md) for detailed installation instructions. 92 | 93 | ## Version-String Flavors 94 | 95 | Code which uses Versioneer can learn about its version string at runtime by 96 | importing `_version` from your main `__init__.py` file and running the 97 | `get_versions()` function. From the "outside" (e.g. in `setup.py`), you can 98 | import the top-level `versioneer.py` and run `get_versions()`. 99 | 100 | Both functions return a dictionary with different flavors of version 101 | information: 102 | 103 | * `['version']`: A condensed version string, rendered using the selected 104 | style. This is the most commonly used value for the project's version 105 | string. The default "pep440" style yields strings like `0.11`, 106 | `0.11+2.g1076c97`, or `0.11+2.g1076c97.dirty`. See the "Styles" section 107 | below for alternative styles. 108 | 109 | * `['full-revisionid']`: detailed revision identifier. For Git, this is the 110 | full SHA1 commit id, e.g. "1076c978a8d3cfc70f408fe5974aa6c092c949ac". 111 | 112 | * `['date']`: Date and time of the latest `HEAD` commit. For Git, it is the 113 | commit date in ISO 8601 format. This will be None if the date is not 114 | available. 115 | 116 | * `['dirty']`: a boolean, True if the tree has uncommitted changes. Note that 117 | this is only accurate if run in a VCS checkout, otherwise it is likely to 118 | be False or None 119 | 120 | * `['error']`: if the version string could not be computed, this will be set 121 | to a string describing the problem, otherwise it will be None. It may be 122 | useful to throw an exception in setup.py if this is set, to avoid e.g. 123 | creating tarballs with a version string of "unknown". 124 | 125 | Some variants are more useful than others. Including `full-revisionid` in a 126 | bug report should allow developers to reconstruct the exact code being tested 127 | (or indicate the presence of local changes that should be shared with the 128 | developers). `version` is suitable for display in an "about" box or a CLI 129 | `--version` output: it can be easily compared against release notes and lists 130 | of bugs fixed in various releases. 131 | 132 | The installer adds the following text to your `__init__.py` to place a basic 133 | version in `YOURPROJECT.__version__`: 134 | 135 | from ._version import get_versions 136 | __version__ = get_versions()['version'] 137 | del get_versions 138 | 139 | ## Styles 140 | 141 | The setup.cfg `style=` configuration controls how the VCS information is 142 | rendered into a version string. 143 | 144 | The default style, "pep440", produces a PEP440-compliant string, equal to the 145 | un-prefixed tag name for actual releases, and containing an additional "local 146 | version" section with more detail for in-between builds. For Git, this is 147 | TAG[+DISTANCE.gHEX[.dirty]] , using information from `git describe --tags 148 | --dirty --always`. For example "0.11+2.g1076c97.dirty" indicates that the 149 | tree is like the "1076c97" commit but has uncommitted changes (".dirty"), and 150 | that this commit is two revisions ("+2") beyond the "0.11" tag. For released 151 | software (exactly equal to a known tag), the identifier will only contain the 152 | stripped tag, e.g. "0.11". 153 | 154 | Other styles are available. See details.md in the Versioneer source tree for 155 | descriptions. 156 | 157 | ## Debugging 158 | 159 | Versioneer tries to avoid fatal errors: if something goes wrong, it will tend 160 | to return a version of "0+unknown". To investigate the problem, run `setup.py 161 | version`, which will run the version-lookup code in a verbose mode, and will 162 | display the full contents of `get_versions()` (including the `error` string, 163 | which may help identify what went wrong). 164 | 165 | ## Known Limitations 166 | 167 | Some situations are known to cause problems for Versioneer. This details the 168 | most significant ones. More can be found on Github 169 | [issues page](https://github.com/warner/python-versioneer/issues). 170 | 171 | ### Subprojects 172 | 173 | Versioneer has limited support for source trees in which `setup.py` is not in 174 | the root directory (e.g. `setup.py` and `.git/` are *not* siblings). The are 175 | two common reasons why `setup.py` might not be in the root: 176 | 177 | * Source trees which contain multiple subprojects, such as 178 | [Buildbot](https://github.com/buildbot/buildbot), which contains both 179 | "master" and "slave" subprojects, each with their own `setup.py`, 180 | `setup.cfg`, and `tox.ini`. Projects like these produce multiple PyPI 181 | distributions (and upload multiple independently-installable tarballs). 182 | * Source trees whose main purpose is to contain a C library, but which also 183 | provide bindings to Python (and perhaps other langauges) in subdirectories. 184 | 185 | Versioneer will look for `.git` in parent directories, and most operations 186 | should get the right version string. However `pip` and `setuptools` have bugs 187 | and implementation details which frequently cause `pip install .` from a 188 | subproject directory to fail to find a correct version string (so it usually 189 | defaults to `0+unknown`). 190 | 191 | `pip install --editable .` should work correctly. `setup.py install` might 192 | work too. 193 | 194 | Pip-8.1.1 is known to have this problem, but hopefully it will get fixed in 195 | some later version. 196 | 197 | [Bug #38](https://github.com/warner/python-versioneer/issues/38) is tracking 198 | this issue. The discussion in 199 | [PR #61](https://github.com/warner/python-versioneer/pull/61) describes the 200 | issue from the Versioneer side in more detail. 201 | [pip PR#3176](https://github.com/pypa/pip/pull/3176) and 202 | [pip PR#3615](https://github.com/pypa/pip/pull/3615) contain work to improve 203 | pip to let Versioneer work correctly. 204 | 205 | Versioneer-0.16 and earlier only looked for a `.git` directory next to the 206 | `setup.cfg`, so subprojects were completely unsupported with those releases. 207 | 208 | ### Editable installs with setuptools <= 18.5 209 | 210 | `setup.py develop` and `pip install --editable .` allow you to install a 211 | project into a virtualenv once, then continue editing the source code (and 212 | test) without re-installing after every change. 213 | 214 | "Entry-point scripts" (`setup(entry_points={"console_scripts": ..})`) are a 215 | convenient way to specify executable scripts that should be installed along 216 | with the python package. 217 | 218 | These both work as expected when using modern setuptools. When using 219 | setuptools-18.5 or earlier, however, certain operations will cause 220 | `pkg_resources.DistributionNotFound` errors when running the entrypoint 221 | script, which must be resolved by re-installing the package. This happens 222 | when the install happens with one version, then the egg_info data is 223 | regenerated while a different version is checked out. Many setup.py commands 224 | cause egg_info to be rebuilt (including `sdist`, `wheel`, and installing into 225 | a different virtualenv), so this can be surprising. 226 | 227 | [Bug #83](https://github.com/warner/python-versioneer/issues/83) describes 228 | this one, but upgrading to a newer version of setuptools should probably 229 | resolve it. 230 | 231 | ### Unicode version strings 232 | 233 | While Versioneer works (and is continually tested) with both Python 2 and 234 | Python 3, it is not entirely consistent with bytes-vs-unicode distinctions. 235 | Newer releases probably generate unicode version strings on py2. It's not 236 | clear that this is wrong, but it may be surprising for applications when then 237 | write these strings to a network connection or include them in bytes-oriented 238 | APIs like cryptographic checksums. 239 | 240 | [Bug #71](https://github.com/warner/python-versioneer/issues/71) investigates 241 | this question. 242 | 243 | 244 | ## Updating Versioneer 245 | 246 | To upgrade your project to a new release of Versioneer, do the following: 247 | 248 | * install the new Versioneer (`pip install -U versioneer` or equivalent) 249 | * edit `setup.cfg`, if necessary, to include any new configuration settings 250 | indicated by the release notes. See [UPGRADING](./UPGRADING.md) for details. 251 | * re-run `versioneer install` in your source tree, to replace 252 | `SRC/_version.py` 253 | * commit any changed files 254 | 255 | ## Future Directions 256 | 257 | This tool is designed to make it easily extended to other version-control 258 | systems: all VCS-specific components are in separate directories like 259 | src/git/ . The top-level `versioneer.py` script is assembled from these 260 | components by running make-versioneer.py . In the future, make-versioneer.py 261 | will take a VCS name as an argument, and will construct a version of 262 | `versioneer.py` that is specific to the given VCS. It might also take the 263 | configuration arguments that are currently provided manually during 264 | installation by editing setup.py . Alternatively, it might go the other 265 | direction and include code from all supported VCS systems, reducing the 266 | number of intermediate scripts. 267 | 268 | 269 | ## License 270 | 271 | To make Versioneer easier to embed, all its code is dedicated to the public 272 | domain. The `_version.py` that it creates is also in the public domain. 273 | Specifically, both are released under the Creative Commons "Public Domain 274 | Dedication" license (CC0-1.0), as described in 275 | https://creativecommons.org/publicdomain/zero/1.0/ . 276 | 277 | """ 278 | 279 | from __future__ import print_function 280 | try: 281 | import configparser 282 | except ImportError: 283 | import ConfigParser as configparser 284 | import errno 285 | import json 286 | import os 287 | import re 288 | import subprocess 289 | import sys 290 | 291 | 292 | class VersioneerConfig: 293 | """Container for Versioneer configuration parameters.""" 294 | 295 | 296 | def get_root(): 297 | """Get the project root directory. 298 | 299 | We require that all commands are run from the project root, i.e. the 300 | directory that contains setup.py, setup.cfg, and versioneer.py . 301 | """ 302 | root = os.path.realpath(os.path.abspath(os.getcwd())) 303 | setup_py = os.path.join(root, "setup.py") 304 | versioneer_py = os.path.join(root, "versioneer.py") 305 | if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)): 306 | # allow 'python path/to/setup.py COMMAND' 307 | root = os.path.dirname(os.path.realpath(os.path.abspath(sys.argv[0]))) 308 | setup_py = os.path.join(root, "setup.py") 309 | versioneer_py = os.path.join(root, "versioneer.py") 310 | if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)): 311 | err = ("Versioneer was unable to run the project root directory. " 312 | "Versioneer requires setup.py to be executed from " 313 | "its immediate directory (like 'python setup.py COMMAND'), " 314 | "or in a way that lets it use sys.argv[0] to find the root " 315 | "(like 'python path/to/setup.py COMMAND').") 316 | raise VersioneerBadRootError(err) 317 | try: 318 | # Certain runtime workflows (setup.py install/develop in a setuptools 319 | # tree) execute all dependencies in a single python process, so 320 | # "versioneer" may be imported multiple times, and python's shared 321 | # module-import table will cache the first one. So we can't use 322 | # os.path.dirname(__file__), as that will find whichever 323 | # versioneer.py was first imported, even in later projects. 324 | me = os.path.realpath(os.path.abspath(__file__)) 325 | me_dir = os.path.normcase(os.path.splitext(me)[0]) 326 | vsr_dir = os.path.normcase(os.path.splitext(versioneer_py)[0]) 327 | if me_dir != vsr_dir: 328 | print("Warning: build in %s is using versioneer.py from %s" 329 | % (os.path.dirname(me), versioneer_py)) 330 | except NameError: 331 | pass 332 | return root 333 | 334 | 335 | def get_config_from_root(root): 336 | """Read the project setup.cfg file to determine Versioneer config.""" 337 | # This might raise EnvironmentError (if setup.cfg is missing), or 338 | # configparser.NoSectionError (if it lacks a [versioneer] section), or 339 | # configparser.NoOptionError (if it lacks "VCS="). See the docstring at 340 | # the top of versioneer.py for instructions on writing your setup.cfg . 341 | setup_cfg = os.path.join(root, "setup.cfg") 342 | parser = configparser.SafeConfigParser() 343 | with open(setup_cfg, "r") as f: 344 | parser.readfp(f) 345 | VCS = parser.get("versioneer", "VCS") # mandatory 346 | 347 | def get(parser, name): 348 | if parser.has_option("versioneer", name): 349 | return parser.get("versioneer", name) 350 | return None 351 | cfg = VersioneerConfig() 352 | cfg.VCS = VCS 353 | cfg.style = get(parser, "style") or "" 354 | cfg.versionfile_source = get(parser, "versionfile_source") 355 | cfg.versionfile_build = get(parser, "versionfile_build") 356 | cfg.tag_prefix = get(parser, "tag_prefix") 357 | if cfg.tag_prefix in ("''", '""'): 358 | cfg.tag_prefix = "" 359 | cfg.parentdir_prefix = get(parser, "parentdir_prefix") 360 | cfg.verbose = get(parser, "verbose") 361 | return cfg 362 | 363 | 364 | class NotThisMethod(Exception): 365 | """Exception raised if a method is not valid for the current scenario.""" 366 | 367 | # these dictionaries contain VCS-specific tools 368 | LONG_VERSION_PY = {} 369 | HANDLERS = {} 370 | 371 | 372 | def register_vcs_handler(vcs, method): # decorator 373 | """Decorator to mark a method as the handler for a particular VCS.""" 374 | def decorate(f): 375 | """Store f in HANDLERS[vcs][method].""" 376 | if vcs not in HANDLERS: 377 | HANDLERS[vcs] = {} 378 | HANDLERS[vcs][method] = f 379 | return f 380 | return decorate 381 | 382 | 383 | def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, 384 | env=None): 385 | """Call the given command(s).""" 386 | assert isinstance(commands, list) 387 | p = None 388 | for c in commands: 389 | try: 390 | dispcmd = str([c] + args) 391 | # remember shell=False, so use git.cmd on windows, not just git 392 | p = subprocess.Popen([c] + args, cwd=cwd, env=env, 393 | stdout=subprocess.PIPE, 394 | stderr=(subprocess.PIPE if hide_stderr 395 | else None)) 396 | break 397 | except EnvironmentError: 398 | e = sys.exc_info()[1] 399 | if e.errno == errno.ENOENT: 400 | continue 401 | if verbose: 402 | print("unable to run %s" % dispcmd) 403 | print(e) 404 | return None, None 405 | else: 406 | if verbose: 407 | print("unable to find command, tried %s" % (commands,)) 408 | return None, None 409 | stdout = p.communicate()[0].strip() 410 | if sys.version_info[0] >= 3: 411 | stdout = stdout.decode() 412 | if p.returncode != 0: 413 | if verbose: 414 | print("unable to run %s (error)" % dispcmd) 415 | print("stdout was %s" % stdout) 416 | return None, p.returncode 417 | return stdout, p.returncode 418 | LONG_VERSION_PY['git'] = ''' 419 | # This file helps to compute a version number in source trees obtained from 420 | # git-archive tarball (such as those provided by githubs download-from-tag 421 | # feature). Distribution tarballs (built by setup.py sdist) and build 422 | # directories (produced by setup.py build) will contain a much shorter file 423 | # that just contains the computed version number. 424 | 425 | # This file is released into the public domain. Generated by 426 | # versioneer-0.17 (https://github.com/warner/python-versioneer) 427 | 428 | """Git implementation of _version.py.""" 429 | 430 | import errno 431 | import os 432 | import re 433 | import subprocess 434 | import sys 435 | 436 | 437 | def get_keywords(): 438 | """Get the keywords needed to look up the version information.""" 439 | # these strings will be replaced by git during git-archive. 440 | # setup.py/versioneer.py will grep for the variable names, so they must 441 | # each be defined on a line of their own. _version.py will just call 442 | # get_keywords(). 443 | git_refnames = "%(DOLLAR)sFormat:%%d%(DOLLAR)s" 444 | git_full = "%(DOLLAR)sFormat:%%H%(DOLLAR)s" 445 | git_date = "%(DOLLAR)sFormat:%%ci%(DOLLAR)s" 446 | keywords = {"refnames": git_refnames, "full": git_full, "date": git_date} 447 | return keywords 448 | 449 | 450 | class VersioneerConfig: 451 | """Container for Versioneer configuration parameters.""" 452 | 453 | 454 | def get_config(): 455 | """Create, populate and return the VersioneerConfig() object.""" 456 | # these strings are filled in when 'setup.py versioneer' creates 457 | # _version.py 458 | cfg = VersioneerConfig() 459 | cfg.VCS = "git" 460 | cfg.style = "%(STYLE)s" 461 | cfg.tag_prefix = "%(TAG_PREFIX)s" 462 | cfg.parentdir_prefix = "%(PARENTDIR_PREFIX)s" 463 | cfg.versionfile_source = "%(VERSIONFILE_SOURCE)s" 464 | cfg.verbose = False 465 | return cfg 466 | 467 | 468 | class NotThisMethod(Exception): 469 | """Exception raised if a method is not valid for the current scenario.""" 470 | 471 | 472 | LONG_VERSION_PY = {} 473 | HANDLERS = {} 474 | 475 | 476 | def register_vcs_handler(vcs, method): # decorator 477 | """Decorator to mark a method as the handler for a particular VCS.""" 478 | def decorate(f): 479 | """Store f in HANDLERS[vcs][method].""" 480 | if vcs not in HANDLERS: 481 | HANDLERS[vcs] = {} 482 | HANDLERS[vcs][method] = f 483 | return f 484 | return decorate 485 | 486 | 487 | def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, 488 | env=None): 489 | """Call the given command(s).""" 490 | assert isinstance(commands, list) 491 | p = None 492 | for c in commands: 493 | try: 494 | dispcmd = str([c] + args) 495 | # remember shell=False, so use git.cmd on windows, not just git 496 | p = subprocess.Popen([c] + args, cwd=cwd, env=env, 497 | stdout=subprocess.PIPE, 498 | stderr=(subprocess.PIPE if hide_stderr 499 | else None)) 500 | break 501 | except EnvironmentError: 502 | e = sys.exc_info()[1] 503 | if e.errno == errno.ENOENT: 504 | continue 505 | if verbose: 506 | print("unable to run %%s" %% dispcmd) 507 | print(e) 508 | return None, None 509 | else: 510 | if verbose: 511 | print("unable to find command, tried %%s" %% (commands,)) 512 | return None, None 513 | stdout = p.communicate()[0].strip() 514 | if sys.version_info[0] >= 3: 515 | stdout = stdout.decode() 516 | if p.returncode != 0: 517 | if verbose: 518 | print("unable to run %%s (error)" %% dispcmd) 519 | print("stdout was %%s" %% stdout) 520 | return None, p.returncode 521 | return stdout, p.returncode 522 | 523 | 524 | def versions_from_parentdir(parentdir_prefix, root, verbose): 525 | """Try to determine the version from the parent directory name. 526 | 527 | Source tarballs conventionally unpack into a directory that includes both 528 | the project name and a version string. We will also support searching up 529 | two directory levels for an appropriately named parent directory 530 | """ 531 | rootdirs = [] 532 | 533 | for i in range(3): 534 | dirname = os.path.basename(root) 535 | if dirname.startswith(parentdir_prefix): 536 | return {"version": dirname[len(parentdir_prefix):], 537 | "full-revisionid": None, 538 | "dirty": False, "error": None, "date": None} 539 | else: 540 | rootdirs.append(root) 541 | root = os.path.dirname(root) # up a level 542 | 543 | if verbose: 544 | print("Tried directories %%s but none started with prefix %%s" %% 545 | (str(rootdirs), parentdir_prefix)) 546 | raise NotThisMethod("rootdir doesn't start with parentdir_prefix") 547 | 548 | 549 | @register_vcs_handler("git", "get_keywords") 550 | def git_get_keywords(versionfile_abs): 551 | """Extract version information from the given file.""" 552 | # the code embedded in _version.py can just fetch the value of these 553 | # keywords. When used from setup.py, we don't want to import _version.py, 554 | # so we do it with a regexp instead. This function is not used from 555 | # _version.py. 556 | keywords = {} 557 | try: 558 | f = open(versionfile_abs, "r") 559 | for line in f.readlines(): 560 | if line.strip().startswith("git_refnames ="): 561 | mo = re.search(r'=\s*"(.*)"', line) 562 | if mo: 563 | keywords["refnames"] = mo.group(1) 564 | if line.strip().startswith("git_full ="): 565 | mo = re.search(r'=\s*"(.*)"', line) 566 | if mo: 567 | keywords["full"] = mo.group(1) 568 | if line.strip().startswith("git_date ="): 569 | mo = re.search(r'=\s*"(.*)"', line) 570 | if mo: 571 | keywords["date"] = mo.group(1) 572 | f.close() 573 | except EnvironmentError: 574 | pass 575 | return keywords 576 | 577 | 578 | @register_vcs_handler("git", "keywords") 579 | def git_versions_from_keywords(keywords, tag_prefix, verbose): 580 | """Get version information from git keywords.""" 581 | if not keywords: 582 | raise NotThisMethod("no keywords at all, weird") 583 | date = keywords.get("date") 584 | if date is not None: 585 | # git-2.2.0 added "%%cI", which expands to an ISO-8601 -compliant 586 | # datestamp. However we prefer "%%ci" (which expands to an "ISO-8601 587 | # -like" string, which we must then edit to make compliant), because 588 | # it's been around since git-1.5.3, and it's too difficult to 589 | # discover which version we're using, or to work around using an 590 | # older one. 591 | date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) 592 | refnames = keywords["refnames"].strip() 593 | if refnames.startswith("$Format"): 594 | if verbose: 595 | print("keywords are unexpanded, not using") 596 | raise NotThisMethod("unexpanded keywords, not a git-archive tarball") 597 | refs = set([r.strip() for r in refnames.strip("()").split(",")]) 598 | # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of 599 | # just "foo-1.0". If we see a "tag: " prefix, prefer those. 600 | TAG = "tag: " 601 | tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)]) 602 | if not tags: 603 | # Either we're using git < 1.8.3, or there really are no tags. We use 604 | # a heuristic: assume all version tags have a digit. The old git %%d 605 | # expansion behaves like git log --decorate=short and strips out the 606 | # refs/heads/ and refs/tags/ prefixes that would let us distinguish 607 | # between branches and tags. By ignoring refnames without digits, we 608 | # filter out many common branch names like "release" and 609 | # "stabilization", as well as "HEAD" and "master". 610 | tags = set([r for r in refs if re.search(r'\d', r)]) 611 | if verbose: 612 | print("discarding '%%s', no digits" %% ",".join(refs - tags)) 613 | if verbose: 614 | print("likely tags: %%s" %% ",".join(sorted(tags))) 615 | for ref in sorted(tags): 616 | # sorting will prefer e.g. "2.0" over "2.0rc1" 617 | if ref.startswith(tag_prefix): 618 | r = ref[len(tag_prefix):] 619 | if verbose: 620 | print("picking %%s" %% r) 621 | return {"version": r, 622 | "full-revisionid": keywords["full"].strip(), 623 | "dirty": False, "error": None, 624 | "date": date} 625 | # no suitable tags, so version is "0+unknown", but full hex is still there 626 | if verbose: 627 | print("no suitable tags, using unknown + full revision id") 628 | return {"version": "0+unknown", 629 | "full-revisionid": keywords["full"].strip(), 630 | "dirty": False, "error": "no suitable tags", "date": None} 631 | 632 | 633 | @register_vcs_handler("git", "pieces_from_vcs") 634 | def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): 635 | """Get version from 'git describe' in the root of the source tree. 636 | 637 | This only gets called if the git-archive 'subst' keywords were *not* 638 | expanded, and _version.py hasn't already been rewritten with a short 639 | version string, meaning we're inside a checked out source tree. 640 | """ 641 | GITS = ["git"] 642 | if sys.platform == "win32": 643 | GITS = ["git.cmd", "git.exe"] 644 | 645 | out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root, 646 | hide_stderr=True) 647 | if rc != 0: 648 | if verbose: 649 | print("Directory %%s not under git control" %% root) 650 | raise NotThisMethod("'git rev-parse --git-dir' returned error") 651 | 652 | # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] 653 | # if there isn't one, this yields HEX[-dirty] (no NUM) 654 | describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty", 655 | "--always", "--long", 656 | "--match", "%%s*" %% tag_prefix], 657 | cwd=root) 658 | # --long was added in git-1.5.5 659 | if describe_out is None: 660 | raise NotThisMethod("'git describe' failed") 661 | describe_out = describe_out.strip() 662 | full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) 663 | if full_out is None: 664 | raise NotThisMethod("'git rev-parse' failed") 665 | full_out = full_out.strip() 666 | 667 | pieces = {} 668 | pieces["long"] = full_out 669 | pieces["short"] = full_out[:7] # maybe improved later 670 | pieces["error"] = None 671 | 672 | # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] 673 | # TAG might have hyphens. 674 | git_describe = describe_out 675 | 676 | # look for -dirty suffix 677 | dirty = git_describe.endswith("-dirty") 678 | pieces["dirty"] = dirty 679 | if dirty: 680 | git_describe = git_describe[:git_describe.rindex("-dirty")] 681 | 682 | # now we have TAG-NUM-gHEX or HEX 683 | 684 | if "-" in git_describe: 685 | # TAG-NUM-gHEX 686 | mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) 687 | if not mo: 688 | # unparseable. Maybe git-describe is misbehaving? 689 | pieces["error"] = ("unable to parse git-describe output: '%%s'" 690 | %% describe_out) 691 | return pieces 692 | 693 | # tag 694 | full_tag = mo.group(1) 695 | if not full_tag.startswith(tag_prefix): 696 | if verbose: 697 | fmt = "tag '%%s' doesn't start with prefix '%%s'" 698 | print(fmt %% (full_tag, tag_prefix)) 699 | pieces["error"] = ("tag '%%s' doesn't start with prefix '%%s'" 700 | %% (full_tag, tag_prefix)) 701 | return pieces 702 | pieces["closest-tag"] = full_tag[len(tag_prefix):] 703 | 704 | # distance: number of commits since tag 705 | pieces["distance"] = int(mo.group(2)) 706 | 707 | # commit: short hex revision ID 708 | pieces["short"] = mo.group(3) 709 | 710 | else: 711 | # HEX: no tags 712 | pieces["closest-tag"] = None 713 | count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"], 714 | cwd=root) 715 | pieces["distance"] = int(count_out) # total number of commits 716 | 717 | # commit date: see ISO-8601 comment in git_versions_from_keywords() 718 | date = run_command(GITS, ["show", "-s", "--format=%%ci", "HEAD"], 719 | cwd=root)[0].strip() 720 | pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) 721 | 722 | return pieces 723 | 724 | 725 | def plus_or_dot(pieces): 726 | """Return a + if we don't already have one, else return a .""" 727 | if "+" in pieces.get("closest-tag", ""): 728 | return "." 729 | return "+" 730 | 731 | 732 | def render_pep440(pieces): 733 | """Build up version string, with post-release "local version identifier". 734 | 735 | Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you 736 | get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty 737 | 738 | Exceptions: 739 | 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] 740 | """ 741 | if pieces["closest-tag"]: 742 | rendered = pieces["closest-tag"] 743 | if pieces["distance"] or pieces["dirty"]: 744 | rendered += plus_or_dot(pieces) 745 | rendered += "%%d.g%%s" %% (pieces["distance"], pieces["short"]) 746 | if pieces["dirty"]: 747 | rendered += ".dirty" 748 | else: 749 | # exception #1 750 | rendered = "0+untagged.%%d.g%%s" %% (pieces["distance"], 751 | pieces["short"]) 752 | if pieces["dirty"]: 753 | rendered += ".dirty" 754 | return rendered 755 | 756 | 757 | def render_pep440_pre(pieces): 758 | """TAG[.post.devDISTANCE] -- No -dirty. 759 | 760 | Exceptions: 761 | 1: no tags. 0.post.devDISTANCE 762 | """ 763 | if pieces["closest-tag"]: 764 | rendered = pieces["closest-tag"] 765 | if pieces["distance"]: 766 | rendered += ".post.dev%%d" %% pieces["distance"] 767 | else: 768 | # exception #1 769 | rendered = "0.post.dev%%d" %% pieces["distance"] 770 | return rendered 771 | 772 | 773 | def render_pep440_post(pieces): 774 | """TAG[.postDISTANCE[.dev0]+gHEX] . 775 | 776 | The ".dev0" means dirty. Note that .dev0 sorts backwards 777 | (a dirty tree will appear "older" than the corresponding clean one), 778 | but you shouldn't be releasing software with -dirty anyways. 779 | 780 | Exceptions: 781 | 1: no tags. 0.postDISTANCE[.dev0] 782 | """ 783 | if pieces["closest-tag"]: 784 | rendered = pieces["closest-tag"] 785 | if pieces["distance"] or pieces["dirty"]: 786 | rendered += ".post%%d" %% pieces["distance"] 787 | if pieces["dirty"]: 788 | rendered += ".dev0" 789 | rendered += plus_or_dot(pieces) 790 | rendered += "g%%s" %% pieces["short"] 791 | else: 792 | # exception #1 793 | rendered = "0.post%%d" %% pieces["distance"] 794 | if pieces["dirty"]: 795 | rendered += ".dev0" 796 | rendered += "+g%%s" %% pieces["short"] 797 | return rendered 798 | 799 | 800 | def render_pep440_old(pieces): 801 | """TAG[.postDISTANCE[.dev0]] . 802 | 803 | The ".dev0" means dirty. 804 | 805 | Eexceptions: 806 | 1: no tags. 0.postDISTANCE[.dev0] 807 | """ 808 | if pieces["closest-tag"]: 809 | rendered = pieces["closest-tag"] 810 | if pieces["distance"] or pieces["dirty"]: 811 | rendered += ".post%%d" %% pieces["distance"] 812 | if pieces["dirty"]: 813 | rendered += ".dev0" 814 | else: 815 | # exception #1 816 | rendered = "0.post%%d" %% pieces["distance"] 817 | if pieces["dirty"]: 818 | rendered += ".dev0" 819 | return rendered 820 | 821 | 822 | def render_git_describe(pieces): 823 | """TAG[-DISTANCE-gHEX][-dirty]. 824 | 825 | Like 'git describe --tags --dirty --always'. 826 | 827 | Exceptions: 828 | 1: no tags. HEX[-dirty] (note: no 'g' prefix) 829 | """ 830 | if pieces["closest-tag"]: 831 | rendered = pieces["closest-tag"] 832 | if pieces["distance"]: 833 | rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"]) 834 | else: 835 | # exception #1 836 | rendered = pieces["short"] 837 | if pieces["dirty"]: 838 | rendered += "-dirty" 839 | return rendered 840 | 841 | 842 | def render_git_describe_long(pieces): 843 | """TAG-DISTANCE-gHEX[-dirty]. 844 | 845 | Like 'git describe --tags --dirty --always -long'. 846 | The distance/hash is unconditional. 847 | 848 | Exceptions: 849 | 1: no tags. HEX[-dirty] (note: no 'g' prefix) 850 | """ 851 | if pieces["closest-tag"]: 852 | rendered = pieces["closest-tag"] 853 | rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"]) 854 | else: 855 | # exception #1 856 | rendered = pieces["short"] 857 | if pieces["dirty"]: 858 | rendered += "-dirty" 859 | return rendered 860 | 861 | 862 | def render(pieces, style): 863 | """Render the given version pieces into the requested style.""" 864 | if pieces["error"]: 865 | return {"version": "unknown", 866 | "full-revisionid": pieces.get("long"), 867 | "dirty": None, 868 | "error": pieces["error"], 869 | "date": None} 870 | 871 | if not style or style == "default": 872 | style = "pep440" # the default 873 | 874 | if style == "pep440": 875 | rendered = render_pep440(pieces) 876 | elif style == "pep440-pre": 877 | rendered = render_pep440_pre(pieces) 878 | elif style == "pep440-post": 879 | rendered = render_pep440_post(pieces) 880 | elif style == "pep440-old": 881 | rendered = render_pep440_old(pieces) 882 | elif style == "git-describe": 883 | rendered = render_git_describe(pieces) 884 | elif style == "git-describe-long": 885 | rendered = render_git_describe_long(pieces) 886 | else: 887 | raise ValueError("unknown style '%%s'" %% style) 888 | 889 | return {"version": rendered, "full-revisionid": pieces["long"], 890 | "dirty": pieces["dirty"], "error": None, 891 | "date": pieces.get("date")} 892 | 893 | 894 | def get_versions(): 895 | """Get version information or return default if unable to do so.""" 896 | # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have 897 | # __file__, we can work backwards from there to the root. Some 898 | # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which 899 | # case we can only use expanded keywords. 900 | 901 | cfg = get_config() 902 | verbose = cfg.verbose 903 | 904 | try: 905 | return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, 906 | verbose) 907 | except NotThisMethod: 908 | pass 909 | 910 | try: 911 | root = os.path.realpath(__file__) 912 | # versionfile_source is the relative path from the top of the source 913 | # tree (where the .git directory might live) to this file. Invert 914 | # this to find the root from __file__. 915 | for i in cfg.versionfile_source.split('/'): 916 | root = os.path.dirname(root) 917 | except NameError: 918 | return {"version": "0+unknown", "full-revisionid": None, 919 | "dirty": None, 920 | "error": "unable to find root of source tree", 921 | "date": None} 922 | 923 | try: 924 | pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) 925 | return render(pieces, cfg.style) 926 | except NotThisMethod: 927 | pass 928 | 929 | try: 930 | if cfg.parentdir_prefix: 931 | return versions_from_parentdir(cfg.parentdir_prefix, root, verbose) 932 | except NotThisMethod: 933 | pass 934 | 935 | return {"version": "0+unknown", "full-revisionid": None, 936 | "dirty": None, 937 | "error": "unable to compute version", "date": None} 938 | ''' 939 | 940 | 941 | @register_vcs_handler("git", "get_keywords") 942 | def git_get_keywords(versionfile_abs): 943 | """Extract version information from the given file.""" 944 | # the code embedded in _version.py can just fetch the value of these 945 | # keywords. When used from setup.py, we don't want to import _version.py, 946 | # so we do it with a regexp instead. This function is not used from 947 | # _version.py. 948 | keywords = {} 949 | try: 950 | f = open(versionfile_abs, "r") 951 | for line in f.readlines(): 952 | if line.strip().startswith("git_refnames ="): 953 | mo = re.search(r'=\s*"(.*)"', line) 954 | if mo: 955 | keywords["refnames"] = mo.group(1) 956 | if line.strip().startswith("git_full ="): 957 | mo = re.search(r'=\s*"(.*)"', line) 958 | if mo: 959 | keywords["full"] = mo.group(1) 960 | if line.strip().startswith("git_date ="): 961 | mo = re.search(r'=\s*"(.*)"', line) 962 | if mo: 963 | keywords["date"] = mo.group(1) 964 | f.close() 965 | except EnvironmentError: 966 | pass 967 | return keywords 968 | 969 | 970 | @register_vcs_handler("git", "keywords") 971 | def git_versions_from_keywords(keywords, tag_prefix, verbose): 972 | """Get version information from git keywords.""" 973 | if not keywords: 974 | raise NotThisMethod("no keywords at all, weird") 975 | date = keywords.get("date") 976 | if date is not None: 977 | # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant 978 | # datestamp. However we prefer "%ci" (which expands to an "ISO-8601 979 | # -like" string, which we must then edit to make compliant), because 980 | # it's been around since git-1.5.3, and it's too difficult to 981 | # discover which version we're using, or to work around using an 982 | # older one. 983 | date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) 984 | refnames = keywords["refnames"].strip() 985 | if refnames.startswith("$Format"): 986 | if verbose: 987 | print("keywords are unexpanded, not using") 988 | raise NotThisMethod("unexpanded keywords, not a git-archive tarball") 989 | refs = set([r.strip() for r in refnames.strip("()").split(",")]) 990 | # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of 991 | # just "foo-1.0". If we see a "tag: " prefix, prefer those. 992 | TAG = "tag: " 993 | tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)]) 994 | if not tags: 995 | # Either we're using git < 1.8.3, or there really are no tags. We use 996 | # a heuristic: assume all version tags have a digit. The old git %d 997 | # expansion behaves like git log --decorate=short and strips out the 998 | # refs/heads/ and refs/tags/ prefixes that would let us distinguish 999 | # between branches and tags. By ignoring refnames without digits, we 1000 | # filter out many common branch names like "release" and 1001 | # "stabilization", as well as "HEAD" and "master". 1002 | tags = set([r for r in refs if re.search(r'\d', r)]) 1003 | if verbose: 1004 | print("discarding '%s', no digits" % ",".join(refs - tags)) 1005 | if verbose: 1006 | print("likely tags: %s" % ",".join(sorted(tags))) 1007 | for ref in sorted(tags): 1008 | # sorting will prefer e.g. "2.0" over "2.0rc1" 1009 | if ref.startswith(tag_prefix): 1010 | r = ref[len(tag_prefix):] 1011 | if verbose: 1012 | print("picking %s" % r) 1013 | return {"version": r, 1014 | "full-revisionid": keywords["full"].strip(), 1015 | "dirty": False, "error": None, 1016 | "date": date} 1017 | # no suitable tags, so version is "0+unknown", but full hex is still there 1018 | if verbose: 1019 | print("no suitable tags, using unknown + full revision id") 1020 | return {"version": "0+unknown", 1021 | "full-revisionid": keywords["full"].strip(), 1022 | "dirty": False, "error": "no suitable tags", "date": None} 1023 | 1024 | 1025 | @register_vcs_handler("git", "pieces_from_vcs") 1026 | def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): 1027 | """Get version from 'git describe' in the root of the source tree. 1028 | 1029 | This only gets called if the git-archive 'subst' keywords were *not* 1030 | expanded, and _version.py hasn't already been rewritten with a short 1031 | version string, meaning we're inside a checked out source tree. 1032 | """ 1033 | GITS = ["git"] 1034 | if sys.platform == "win32": 1035 | GITS = ["git.cmd", "git.exe"] 1036 | 1037 | out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root, 1038 | hide_stderr=True) 1039 | if rc != 0: 1040 | if verbose: 1041 | print("Directory %s not under git control" % root) 1042 | raise NotThisMethod("'git rev-parse --git-dir' returned error") 1043 | 1044 | # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] 1045 | # if there isn't one, this yields HEX[-dirty] (no NUM) 1046 | describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty", 1047 | "--always", "--long", 1048 | "--match", "%s*" % tag_prefix], 1049 | cwd=root) 1050 | # --long was added in git-1.5.5 1051 | if describe_out is None: 1052 | raise NotThisMethod("'git describe' failed") 1053 | describe_out = describe_out.strip() 1054 | full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) 1055 | if full_out is None: 1056 | raise NotThisMethod("'git rev-parse' failed") 1057 | full_out = full_out.strip() 1058 | 1059 | pieces = {} 1060 | pieces["long"] = full_out 1061 | pieces["short"] = full_out[:7] # maybe improved later 1062 | pieces["error"] = None 1063 | 1064 | # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] 1065 | # TAG might have hyphens. 1066 | git_describe = describe_out 1067 | 1068 | # look for -dirty suffix 1069 | dirty = git_describe.endswith("-dirty") 1070 | pieces["dirty"] = dirty 1071 | if dirty: 1072 | git_describe = git_describe[:git_describe.rindex("-dirty")] 1073 | 1074 | # now we have TAG-NUM-gHEX or HEX 1075 | 1076 | if "-" in git_describe: 1077 | # TAG-NUM-gHEX 1078 | mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) 1079 | if not mo: 1080 | # unparseable. Maybe git-describe is misbehaving? 1081 | pieces["error"] = ("unable to parse git-describe output: '%s'" 1082 | % describe_out) 1083 | return pieces 1084 | 1085 | # tag 1086 | full_tag = mo.group(1) 1087 | if not full_tag.startswith(tag_prefix): 1088 | if verbose: 1089 | fmt = "tag '%s' doesn't start with prefix '%s'" 1090 | print(fmt % (full_tag, tag_prefix)) 1091 | pieces["error"] = ("tag '%s' doesn't start with prefix '%s'" 1092 | % (full_tag, tag_prefix)) 1093 | return pieces 1094 | pieces["closest-tag"] = full_tag[len(tag_prefix):] 1095 | 1096 | # distance: number of commits since tag 1097 | pieces["distance"] = int(mo.group(2)) 1098 | 1099 | # commit: short hex revision ID 1100 | pieces["short"] = mo.group(3) 1101 | 1102 | else: 1103 | # HEX: no tags 1104 | pieces["closest-tag"] = None 1105 | count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"], 1106 | cwd=root) 1107 | pieces["distance"] = int(count_out) # total number of commits 1108 | 1109 | # commit date: see ISO-8601 comment in git_versions_from_keywords() 1110 | date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"], 1111 | cwd=root)[0].strip() 1112 | pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) 1113 | 1114 | return pieces 1115 | 1116 | 1117 | def do_vcs_install(manifest_in, versionfile_source, ipy): 1118 | """Git-specific installation logic for Versioneer. 1119 | 1120 | For Git, this means creating/changing .gitattributes to mark _version.py 1121 | for export-subst keyword substitution. 1122 | """ 1123 | GITS = ["git"] 1124 | if sys.platform == "win32": 1125 | GITS = ["git.cmd", "git.exe"] 1126 | files = [manifest_in, versionfile_source] 1127 | if ipy: 1128 | files.append(ipy) 1129 | try: 1130 | me = __file__ 1131 | if me.endswith(".pyc") or me.endswith(".pyo"): 1132 | me = os.path.splitext(me)[0] + ".py" 1133 | versioneer_file = os.path.relpath(me) 1134 | except NameError: 1135 | versioneer_file = "versioneer.py" 1136 | files.append(versioneer_file) 1137 | present = False 1138 | try: 1139 | f = open(".gitattributes", "r") 1140 | for line in f.readlines(): 1141 | if line.strip().startswith(versionfile_source): 1142 | if "export-subst" in line.strip().split()[1:]: 1143 | present = True 1144 | f.close() 1145 | except EnvironmentError: 1146 | pass 1147 | if not present: 1148 | f = open(".gitattributes", "a+") 1149 | f.write("%s export-subst\n" % versionfile_source) 1150 | f.close() 1151 | files.append(".gitattributes") 1152 | run_command(GITS, ["add", "--"] + files) 1153 | 1154 | 1155 | def versions_from_parentdir(parentdir_prefix, root, verbose): 1156 | """Try to determine the version from the parent directory name. 1157 | 1158 | Source tarballs conventionally unpack into a directory that includes both 1159 | the project name and a version string. We will also support searching up 1160 | two directory levels for an appropriately named parent directory 1161 | """ 1162 | rootdirs = [] 1163 | 1164 | for i in range(3): 1165 | dirname = os.path.basename(root) 1166 | if dirname.startswith(parentdir_prefix): 1167 | return {"version": dirname[len(parentdir_prefix):], 1168 | "full-revisionid": None, 1169 | "dirty": False, "error": None, "date": None} 1170 | else: 1171 | rootdirs.append(root) 1172 | root = os.path.dirname(root) # up a level 1173 | 1174 | if verbose: 1175 | print("Tried directories %s but none started with prefix %s" % 1176 | (str(rootdirs), parentdir_prefix)) 1177 | raise NotThisMethod("rootdir doesn't start with parentdir_prefix") 1178 | 1179 | SHORT_VERSION_PY = """ 1180 | # This file was generated by 'versioneer.py' (0.17) from 1181 | # revision-control system data, or from the parent directory name of an 1182 | # unpacked source archive. Distribution tarballs contain a pre-generated copy 1183 | # of this file. 1184 | 1185 | import json 1186 | 1187 | version_json = ''' 1188 | %s 1189 | ''' # END VERSION_JSON 1190 | 1191 | 1192 | def get_versions(): 1193 | return json.loads(version_json) 1194 | """ 1195 | 1196 | 1197 | def versions_from_file(filename): 1198 | """Try to determine the version from _version.py if present.""" 1199 | try: 1200 | with open(filename) as f: 1201 | contents = f.read() 1202 | except EnvironmentError: 1203 | raise NotThisMethod("unable to read _version.py") 1204 | mo = re.search(r"version_json = '''\n(.*)''' # END VERSION_JSON", 1205 | contents, re.M | re.S) 1206 | if not mo: 1207 | mo = re.search(r"version_json = '''\r\n(.*)''' # END VERSION_JSON", 1208 | contents, re.M | re.S) 1209 | if not mo: 1210 | raise NotThisMethod("no version_json in _version.py") 1211 | return json.loads(mo.group(1)) 1212 | 1213 | 1214 | def write_to_version_file(filename, versions): 1215 | """Write the given version number to the given _version.py file.""" 1216 | os.unlink(filename) 1217 | contents = json.dumps(versions, sort_keys=True, 1218 | indent=1, separators=(",", ": ")) 1219 | with open(filename, "w") as f: 1220 | f.write(SHORT_VERSION_PY % contents) 1221 | 1222 | print("set %s to '%s'" % (filename, versions["version"])) 1223 | 1224 | 1225 | def plus_or_dot(pieces): 1226 | """Return a + if we don't already have one, else return a .""" 1227 | if "+" in pieces.get("closest-tag", ""): 1228 | return "." 1229 | return "+" 1230 | 1231 | 1232 | def render_pep440(pieces): 1233 | """Build up version string, with post-release "local version identifier". 1234 | 1235 | Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you 1236 | get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty 1237 | 1238 | Exceptions: 1239 | 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] 1240 | """ 1241 | if pieces["closest-tag"]: 1242 | rendered = pieces["closest-tag"] 1243 | if pieces["distance"] or pieces["dirty"]: 1244 | rendered += plus_or_dot(pieces) 1245 | rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) 1246 | if pieces["dirty"]: 1247 | rendered += ".dirty" 1248 | else: 1249 | # exception #1 1250 | rendered = "0+untagged.%d.g%s" % (pieces["distance"], 1251 | pieces["short"]) 1252 | if pieces["dirty"]: 1253 | rendered += ".dirty" 1254 | return rendered 1255 | 1256 | 1257 | def render_pep440_pre(pieces): 1258 | """TAG[.post.devDISTANCE] -- No -dirty. 1259 | 1260 | Exceptions: 1261 | 1: no tags. 0.post.devDISTANCE 1262 | """ 1263 | if pieces["closest-tag"]: 1264 | rendered = pieces["closest-tag"] 1265 | if pieces["distance"]: 1266 | rendered += ".post.dev%d" % pieces["distance"] 1267 | else: 1268 | # exception #1 1269 | rendered = "0.post.dev%d" % pieces["distance"] 1270 | return rendered 1271 | 1272 | 1273 | def render_pep440_post(pieces): 1274 | """TAG[.postDISTANCE[.dev0]+gHEX] . 1275 | 1276 | The ".dev0" means dirty. Note that .dev0 sorts backwards 1277 | (a dirty tree will appear "older" than the corresponding clean one), 1278 | but you shouldn't be releasing software with -dirty anyways. 1279 | 1280 | Exceptions: 1281 | 1: no tags. 0.postDISTANCE[.dev0] 1282 | """ 1283 | if pieces["closest-tag"]: 1284 | rendered = pieces["closest-tag"] 1285 | if pieces["distance"] or pieces["dirty"]: 1286 | rendered += ".post%d" % pieces["distance"] 1287 | if pieces["dirty"]: 1288 | rendered += ".dev0" 1289 | rendered += plus_or_dot(pieces) 1290 | rendered += "g%s" % pieces["short"] 1291 | else: 1292 | # exception #1 1293 | rendered = "0.post%d" % pieces["distance"] 1294 | if pieces["dirty"]: 1295 | rendered += ".dev0" 1296 | rendered += "+g%s" % pieces["short"] 1297 | return rendered 1298 | 1299 | 1300 | def render_pep440_old(pieces): 1301 | """TAG[.postDISTANCE[.dev0]] . 1302 | 1303 | The ".dev0" means dirty. 1304 | 1305 | Eexceptions: 1306 | 1: no tags. 0.postDISTANCE[.dev0] 1307 | """ 1308 | if pieces["closest-tag"]: 1309 | rendered = pieces["closest-tag"] 1310 | if pieces["distance"] or pieces["dirty"]: 1311 | rendered += ".post%d" % pieces["distance"] 1312 | if pieces["dirty"]: 1313 | rendered += ".dev0" 1314 | else: 1315 | # exception #1 1316 | rendered = "0.post%d" % pieces["distance"] 1317 | if pieces["dirty"]: 1318 | rendered += ".dev0" 1319 | return rendered 1320 | 1321 | 1322 | def render_git_describe(pieces): 1323 | """TAG[-DISTANCE-gHEX][-dirty]. 1324 | 1325 | Like 'git describe --tags --dirty --always'. 1326 | 1327 | Exceptions: 1328 | 1: no tags. HEX[-dirty] (note: no 'g' prefix) 1329 | """ 1330 | if pieces["closest-tag"]: 1331 | rendered = pieces["closest-tag"] 1332 | if pieces["distance"]: 1333 | rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) 1334 | else: 1335 | # exception #1 1336 | rendered = pieces["short"] 1337 | if pieces["dirty"]: 1338 | rendered += "-dirty" 1339 | return rendered 1340 | 1341 | 1342 | def render_git_describe_long(pieces): 1343 | """TAG-DISTANCE-gHEX[-dirty]. 1344 | 1345 | Like 'git describe --tags --dirty --always -long'. 1346 | The distance/hash is unconditional. 1347 | 1348 | Exceptions: 1349 | 1: no tags. HEX[-dirty] (note: no 'g' prefix) 1350 | """ 1351 | if pieces["closest-tag"]: 1352 | rendered = pieces["closest-tag"] 1353 | rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) 1354 | else: 1355 | # exception #1 1356 | rendered = pieces["short"] 1357 | if pieces["dirty"]: 1358 | rendered += "-dirty" 1359 | return rendered 1360 | 1361 | 1362 | def render(pieces, style): 1363 | """Render the given version pieces into the requested style.""" 1364 | if pieces["error"]: 1365 | return {"version": "unknown", 1366 | "full-revisionid": pieces.get("long"), 1367 | "dirty": None, 1368 | "error": pieces["error"], 1369 | "date": None} 1370 | 1371 | if not style or style == "default": 1372 | style = "pep440" # the default 1373 | 1374 | if style == "pep440": 1375 | rendered = render_pep440(pieces) 1376 | elif style == "pep440-pre": 1377 | rendered = render_pep440_pre(pieces) 1378 | elif style == "pep440-post": 1379 | rendered = render_pep440_post(pieces) 1380 | elif style == "pep440-old": 1381 | rendered = render_pep440_old(pieces) 1382 | elif style == "git-describe": 1383 | rendered = render_git_describe(pieces) 1384 | elif style == "git-describe-long": 1385 | rendered = render_git_describe_long(pieces) 1386 | else: 1387 | raise ValueError("unknown style '%s'" % style) 1388 | 1389 | return {"version": rendered, "full-revisionid": pieces["long"], 1390 | "dirty": pieces["dirty"], "error": None, 1391 | "date": pieces.get("date")} 1392 | 1393 | 1394 | class VersioneerBadRootError(Exception): 1395 | """The project root directory is unknown or missing key files.""" 1396 | 1397 | 1398 | def get_versions(verbose=False): 1399 | """Get the project version from whatever source is available. 1400 | 1401 | Returns dict with two keys: 'version' and 'full'. 1402 | """ 1403 | if "versioneer" in sys.modules: 1404 | # see the discussion in cmdclass.py:get_cmdclass() 1405 | del sys.modules["versioneer"] 1406 | 1407 | root = get_root() 1408 | cfg = get_config_from_root(root) 1409 | 1410 | assert cfg.VCS is not None, "please set [versioneer]VCS= in setup.cfg" 1411 | handlers = HANDLERS.get(cfg.VCS) 1412 | assert handlers, "unrecognized VCS '%s'" % cfg.VCS 1413 | verbose = verbose or cfg.verbose 1414 | assert cfg.versionfile_source is not None, \ 1415 | "please set versioneer.versionfile_source" 1416 | assert cfg.tag_prefix is not None, "please set versioneer.tag_prefix" 1417 | 1418 | versionfile_abs = os.path.join(root, cfg.versionfile_source) 1419 | 1420 | # extract version from first of: _version.py, VCS command (e.g. 'git 1421 | # describe'), parentdir. This is meant to work for developers using a 1422 | # source checkout, for users of a tarball created by 'setup.py sdist', 1423 | # and for users of a tarball/zipball created by 'git archive' or github's 1424 | # download-from-tag feature or the equivalent in other VCSes. 1425 | 1426 | get_keywords_f = handlers.get("get_keywords") 1427 | from_keywords_f = handlers.get("keywords") 1428 | if get_keywords_f and from_keywords_f: 1429 | try: 1430 | keywords = get_keywords_f(versionfile_abs) 1431 | ver = from_keywords_f(keywords, cfg.tag_prefix, verbose) 1432 | if verbose: 1433 | print("got version from expanded keyword %s" % ver) 1434 | return ver 1435 | except NotThisMethod: 1436 | pass 1437 | 1438 | try: 1439 | ver = versions_from_file(versionfile_abs) 1440 | if verbose: 1441 | print("got version from file %s %s" % (versionfile_abs, ver)) 1442 | return ver 1443 | except NotThisMethod: 1444 | pass 1445 | 1446 | from_vcs_f = handlers.get("pieces_from_vcs") 1447 | if from_vcs_f: 1448 | try: 1449 | pieces = from_vcs_f(cfg.tag_prefix, root, verbose) 1450 | ver = render(pieces, cfg.style) 1451 | if verbose: 1452 | print("got version from VCS %s" % ver) 1453 | return ver 1454 | except NotThisMethod: 1455 | pass 1456 | 1457 | try: 1458 | if cfg.parentdir_prefix: 1459 | ver = versions_from_parentdir(cfg.parentdir_prefix, root, verbose) 1460 | if verbose: 1461 | print("got version from parentdir %s" % ver) 1462 | return ver 1463 | except NotThisMethod: 1464 | pass 1465 | 1466 | if verbose: 1467 | print("unable to compute version") 1468 | 1469 | return {"version": "0+unknown", "full-revisionid": None, 1470 | "dirty": None, "error": "unable to compute version", 1471 | "date": None} 1472 | 1473 | 1474 | def get_version(): 1475 | """Get the short version string for this project.""" 1476 | return get_versions()["version"] 1477 | 1478 | 1479 | def get_cmdclass(): 1480 | """Get the custom setuptools/distutils subclasses used by Versioneer.""" 1481 | if "versioneer" in sys.modules: 1482 | del sys.modules["versioneer"] 1483 | # this fixes the "python setup.py develop" case (also 'install' and 1484 | # 'easy_install .'), in which subdependencies of the main project are 1485 | # built (using setup.py bdist_egg) in the same python process. Assume 1486 | # a main project A and a dependency B, which use different versions 1487 | # of Versioneer. A's setup.py imports A's Versioneer, leaving it in 1488 | # sys.modules by the time B's setup.py is executed, causing B to run 1489 | # with the wrong versioneer. Setuptools wraps the sub-dep builds in a 1490 | # sandbox that restores sys.modules to it's pre-build state, so the 1491 | # parent is protected against the child's "import versioneer". By 1492 | # removing ourselves from sys.modules here, before the child build 1493 | # happens, we protect the child from the parent's versioneer too. 1494 | # Also see https://github.com/warner/python-versioneer/issues/52 1495 | 1496 | cmds = {} 1497 | 1498 | # we add "version" to both distutils and setuptools 1499 | from distutils.core import Command 1500 | 1501 | class cmd_version(Command): 1502 | description = "report generated version string" 1503 | user_options = [] 1504 | boolean_options = [] 1505 | 1506 | def initialize_options(self): 1507 | pass 1508 | 1509 | def finalize_options(self): 1510 | pass 1511 | 1512 | def run(self): 1513 | vers = get_versions(verbose=True) 1514 | print("Version: %s" % vers["version"]) 1515 | print(" full-revisionid: %s" % vers.get("full-revisionid")) 1516 | print(" dirty: %s" % vers.get("dirty")) 1517 | print(" date: %s" % vers.get("date")) 1518 | if vers["error"]: 1519 | print(" error: %s" % vers["error"]) 1520 | cmds["version"] = cmd_version 1521 | 1522 | # we override "build_py" in both distutils and setuptools 1523 | # 1524 | # most invocation pathways end up running build_py: 1525 | # distutils/build -> build_py 1526 | # distutils/install -> distutils/build ->.. 1527 | # setuptools/bdist_wheel -> distutils/install ->.. 1528 | # setuptools/bdist_egg -> distutils/install_lib -> build_py 1529 | # setuptools/install -> bdist_egg ->.. 1530 | # setuptools/develop -> ? 1531 | # pip install: 1532 | # copies source tree to a tempdir before running egg_info/etc 1533 | # if .git isn't copied too, 'git describe' will fail 1534 | # then does setup.py bdist_wheel, or sometimes setup.py install 1535 | # setup.py egg_info -> ? 1536 | 1537 | # we override different "build_py" commands for both environments 1538 | if "setuptools" in sys.modules: 1539 | from setuptools.command.build_py import build_py as _build_py 1540 | else: 1541 | from distutils.command.build_py import build_py as _build_py 1542 | 1543 | class cmd_build_py(_build_py): 1544 | def run(self): 1545 | root = get_root() 1546 | cfg = get_config_from_root(root) 1547 | versions = get_versions() 1548 | _build_py.run(self) 1549 | # now locate _version.py in the new build/ directory and replace 1550 | # it with an updated value 1551 | if cfg.versionfile_build: 1552 | target_versionfile = os.path.join(self.build_lib, 1553 | cfg.versionfile_build) 1554 | print("UPDATING %s" % target_versionfile) 1555 | write_to_version_file(target_versionfile, versions) 1556 | cmds["build_py"] = cmd_build_py 1557 | 1558 | if "cx_Freeze" in sys.modules: # cx_freeze enabled? 1559 | from cx_Freeze.dist import build_exe as _build_exe 1560 | # nczeczulin reports that py2exe won't like the pep440-style string 1561 | # as FILEVERSION, but it can be used for PRODUCTVERSION, e.g. 1562 | # setup(console=[{ 1563 | # "version": versioneer.get_version().split("+", 1)[0], # FILEVERSION 1564 | # "product_version": versioneer.get_version(), 1565 | # ... 1566 | 1567 | class cmd_build_exe(_build_exe): 1568 | def run(self): 1569 | root = get_root() 1570 | cfg = get_config_from_root(root) 1571 | versions = get_versions() 1572 | target_versionfile = cfg.versionfile_source 1573 | print("UPDATING %s" % target_versionfile) 1574 | write_to_version_file(target_versionfile, versions) 1575 | 1576 | _build_exe.run(self) 1577 | os.unlink(target_versionfile) 1578 | with open(cfg.versionfile_source, "w") as f: 1579 | LONG = LONG_VERSION_PY[cfg.VCS] 1580 | f.write(LONG % 1581 | {"DOLLAR": "$", 1582 | "STYLE": cfg.style, 1583 | "TAG_PREFIX": cfg.tag_prefix, 1584 | "PARENTDIR_PREFIX": cfg.parentdir_prefix, 1585 | "VERSIONFILE_SOURCE": cfg.versionfile_source, 1586 | }) 1587 | cmds["build_exe"] = cmd_build_exe 1588 | del cmds["build_py"] 1589 | 1590 | if 'py2exe' in sys.modules: # py2exe enabled? 1591 | try: 1592 | from py2exe.distutils_buildexe import py2exe as _py2exe # py3 1593 | except ImportError: 1594 | from py2exe.build_exe import py2exe as _py2exe # py2 1595 | 1596 | class cmd_py2exe(_py2exe): 1597 | def run(self): 1598 | root = get_root() 1599 | cfg = get_config_from_root(root) 1600 | versions = get_versions() 1601 | target_versionfile = cfg.versionfile_source 1602 | print("UPDATING %s" % target_versionfile) 1603 | write_to_version_file(target_versionfile, versions) 1604 | 1605 | _py2exe.run(self) 1606 | os.unlink(target_versionfile) 1607 | with open(cfg.versionfile_source, "w") as f: 1608 | LONG = LONG_VERSION_PY[cfg.VCS] 1609 | f.write(LONG % 1610 | {"DOLLAR": "$", 1611 | "STYLE": cfg.style, 1612 | "TAG_PREFIX": cfg.tag_prefix, 1613 | "PARENTDIR_PREFIX": cfg.parentdir_prefix, 1614 | "VERSIONFILE_SOURCE": cfg.versionfile_source, 1615 | }) 1616 | cmds["py2exe"] = cmd_py2exe 1617 | 1618 | # we override different "sdist" commands for both environments 1619 | if "setuptools" in sys.modules: 1620 | from setuptools.command.sdist import sdist as _sdist 1621 | else: 1622 | from distutils.command.sdist import sdist as _sdist 1623 | 1624 | class cmd_sdist(_sdist): 1625 | def run(self): 1626 | versions = get_versions() 1627 | self._versioneer_generated_versions = versions 1628 | # unless we update this, the command will keep using the old 1629 | # version 1630 | self.distribution.metadata.version = versions["version"] 1631 | return _sdist.run(self) 1632 | 1633 | def make_release_tree(self, base_dir, files): 1634 | root = get_root() 1635 | cfg = get_config_from_root(root) 1636 | _sdist.make_release_tree(self, base_dir, files) 1637 | # now locate _version.py in the new base_dir directory 1638 | # (remembering that it may be a hardlink) and replace it with an 1639 | # updated value 1640 | target_versionfile = os.path.join(base_dir, cfg.versionfile_source) 1641 | print("UPDATING %s" % target_versionfile) 1642 | write_to_version_file(target_versionfile, 1643 | self._versioneer_generated_versions) 1644 | cmds["sdist"] = cmd_sdist 1645 | 1646 | return cmds 1647 | 1648 | 1649 | CONFIG_ERROR = """ 1650 | setup.cfg is missing the necessary Versioneer configuration. You need 1651 | a section like: 1652 | 1653 | [versioneer] 1654 | VCS = git 1655 | style = pep440 1656 | versionfile_source = src/myproject/_version.py 1657 | versionfile_build = myproject/_version.py 1658 | tag_prefix = 1659 | parentdir_prefix = myproject- 1660 | 1661 | You will also need to edit your setup.py to use the results: 1662 | 1663 | import versioneer 1664 | setup(version=versioneer.get_version(), 1665 | cmdclass=versioneer.get_cmdclass(), ...) 1666 | 1667 | Please read the docstring in ./versioneer.py for configuration instructions, 1668 | edit setup.cfg, and re-run the installer or 'python versioneer.py setup'. 1669 | """ 1670 | 1671 | SAMPLE_CONFIG = """ 1672 | # See the docstring in versioneer.py for instructions. Note that you must 1673 | # re-run 'versioneer.py setup' after changing this section, and commit the 1674 | # resulting files. 1675 | 1676 | [versioneer] 1677 | #VCS = git 1678 | #style = pep440 1679 | #versionfile_source = 1680 | #versionfile_build = 1681 | #tag_prefix = 1682 | #parentdir_prefix = 1683 | 1684 | """ 1685 | 1686 | INIT_PY_SNIPPET = """ 1687 | from ._version import get_versions 1688 | __version__ = get_versions()['version'] 1689 | del get_versions 1690 | """ 1691 | 1692 | 1693 | def do_setup(): 1694 | """Main VCS-independent setup function for installing Versioneer.""" 1695 | root = get_root() 1696 | try: 1697 | cfg = get_config_from_root(root) 1698 | except (EnvironmentError, configparser.NoSectionError, 1699 | configparser.NoOptionError) as e: 1700 | if isinstance(e, (EnvironmentError, configparser.NoSectionError)): 1701 | print("Adding sample versioneer config to setup.cfg", 1702 | file=sys.stderr) 1703 | with open(os.path.join(root, "setup.cfg"), "a") as f: 1704 | f.write(SAMPLE_CONFIG) 1705 | print(CONFIG_ERROR, file=sys.stderr) 1706 | return 1 1707 | 1708 | print(" creating %s" % cfg.versionfile_source) 1709 | with open(cfg.versionfile_source, "w") as f: 1710 | LONG = LONG_VERSION_PY[cfg.VCS] 1711 | f.write(LONG % {"DOLLAR": "$", 1712 | "STYLE": cfg.style, 1713 | "TAG_PREFIX": cfg.tag_prefix, 1714 | "PARENTDIR_PREFIX": cfg.parentdir_prefix, 1715 | "VERSIONFILE_SOURCE": cfg.versionfile_source, 1716 | }) 1717 | 1718 | ipy = os.path.join(os.path.dirname(cfg.versionfile_source), 1719 | "__init__.py") 1720 | if os.path.exists(ipy): 1721 | try: 1722 | with open(ipy, "r") as f: 1723 | old = f.read() 1724 | except EnvironmentError: 1725 | old = "" 1726 | if INIT_PY_SNIPPET not in old: 1727 | print(" appending to %s" % ipy) 1728 | with open(ipy, "a") as f: 1729 | f.write(INIT_PY_SNIPPET) 1730 | else: 1731 | print(" %s unmodified" % ipy) 1732 | else: 1733 | print(" %s doesn't exist, ok" % ipy) 1734 | ipy = None 1735 | 1736 | # Make sure both the top-level "versioneer.py" and versionfile_source 1737 | # (PKG/_version.py, used by runtime code) are in MANIFEST.in, so 1738 | # they'll be copied into source distributions. Pip won't be able to 1739 | # install the package without this. 1740 | manifest_in = os.path.join(root, "MANIFEST.in") 1741 | simple_includes = set() 1742 | try: 1743 | with open(manifest_in, "r") as f: 1744 | for line in f: 1745 | if line.startswith("include "): 1746 | for include in line.split()[1:]: 1747 | simple_includes.add(include) 1748 | except EnvironmentError: 1749 | pass 1750 | # That doesn't cover everything MANIFEST.in can do 1751 | # (http://docs.python.org/2/distutils/sourcedist.html#commands), so 1752 | # it might give some false negatives. Appending redundant 'include' 1753 | # lines is safe, though. 1754 | if "versioneer.py" not in simple_includes: 1755 | print(" appending 'versioneer.py' to MANIFEST.in") 1756 | with open(manifest_in, "a") as f: 1757 | f.write("include versioneer.py\n") 1758 | else: 1759 | print(" 'versioneer.py' already in MANIFEST.in") 1760 | if cfg.versionfile_source not in simple_includes: 1761 | print(" appending versionfile_source ('%s') to MANIFEST.in" % 1762 | cfg.versionfile_source) 1763 | with open(manifest_in, "a") as f: 1764 | f.write("include %s\n" % cfg.versionfile_source) 1765 | else: 1766 | print(" versionfile_source already in MANIFEST.in") 1767 | 1768 | # Make VCS-specific changes. For git, this means creating/changing 1769 | # .gitattributes to mark _version.py for export-subst keyword 1770 | # substitution. 1771 | do_vcs_install(manifest_in, cfg.versionfile_source, ipy) 1772 | return 0 1773 | 1774 | 1775 | def scan_setup_py(): 1776 | """Validate the contents of setup.py against Versioneer's expectations.""" 1777 | found = set() 1778 | setters = False 1779 | errors = 0 1780 | with open("setup.py", "r") as f: 1781 | for line in f.readlines(): 1782 | if "import versioneer" in line: 1783 | found.add("import") 1784 | if "versioneer.get_cmdclass()" in line: 1785 | found.add("cmdclass") 1786 | if "versioneer.get_version()" in line: 1787 | found.add("get_version") 1788 | if "versioneer.VCS" in line: 1789 | setters = True 1790 | if "versioneer.versionfile_source" in line: 1791 | setters = True 1792 | if len(found) != 3: 1793 | print("") 1794 | print("Your setup.py appears to be missing some important items") 1795 | print("(but I might be wrong). Please make sure it has something") 1796 | print("roughly like the following:") 1797 | print("") 1798 | print(" import versioneer") 1799 | print(" setup( version=versioneer.get_version(),") 1800 | print(" cmdclass=versioneer.get_cmdclass(), ...)") 1801 | print("") 1802 | errors += 1 1803 | if setters: 1804 | print("You should remove lines like 'versioneer.VCS = ' and") 1805 | print("'versioneer.versionfile_source = ' . This configuration") 1806 | print("now lives in setup.cfg, and should be removed from setup.py") 1807 | print("") 1808 | errors += 1 1809 | return errors 1810 | 1811 | if __name__ == "__main__": 1812 | cmd = sys.argv[1] 1813 | if cmd == "setup": 1814 | errors = do_setup() 1815 | errors += scan_setup_py() 1816 | if errors: 1817 | sys.exit(1) 1818 | --------------------------------------------------------------------------------