├── .coveragerc ├── .editorconfig ├── .github └── workflows │ ├── benchmark.yml │ ├── benchmark_template.yml │ ├── codeql-analysis.yml │ ├── release.yml │ └── test.yml ├── .gitignore ├── CHANGELOG.md ├── LICENSE ├── Makefile ├── README.md ├── README_ZH.md ├── benchmarks ├── __init__.py ├── benchmark_test.py ├── large.json ├── medium.json ├── small.json ├── trace.py └── zipf.py ├── cacheme ├── __init__.py ├── core.py ├── data.py ├── interfaces.py ├── models.py ├── py.typed ├── serializer.py ├── storages │ ├── __init__.py │ ├── base.py │ ├── local.py │ ├── mongo.py │ ├── mysql.py │ ├── postgres.py │ ├── redis.py │ ├── scripts │ │ ├── mongo.js │ │ ├── mysql.sql │ │ ├── postgresql.sql │ │ └── sqlite.sql │ ├── sqldb.py │ └── sqlite.py └── utils.py ├── poetry.lock ├── pyproject.toml └── tests ├── __init__.py ├── __main__.py ├── test_core.py ├── test_serializers.py ├── test_storages.py └── utils.py /.coveragerc: -------------------------------------------------------------------------------- 1 | [run] 2 | source = cacheme -------------------------------------------------------------------------------- /.editorconfig: -------------------------------------------------------------------------------- 1 | # http://editorconfig.org 2 | 3 | root = true 4 | 5 | [*] 6 | indent_style = space 7 | indent_size = 4 8 | insert_final_newline = true 9 | trim_trailing_whitespace = true 10 | end_of_line = lf 11 | charset = utf-8 12 | -------------------------------------------------------------------------------- /.github/workflows/benchmark.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: Benchmarks 3 | 4 | on: 5 | push: 6 | branches: 7 | - master 8 | 9 | jobs: 10 | benchmark-1: 11 | uses: ./.github/workflows/benchmark_template.yml 12 | with: 13 | case: "test_read_only" 14 | secrets: inherit 15 | 16 | benchmark-2: 17 | needs: benchmark-1 18 | uses: ./.github/workflows/benchmark_template.yml 19 | with: 20 | case: "test_write_only" 21 | secrets: inherit 22 | 23 | benchmark-3: 24 | needs: benchmark-2 25 | uses: ./.github/workflows/benchmark_template.yml 26 | with: 27 | case: "test_zipf" 28 | secrets: inherit 29 | 30 | benchmark-4: 31 | needs: benchmark-3 32 | uses: ./.github/workflows/benchmark_template.yml 33 | with: 34 | case: "test_read_only_batch" 35 | secrets: inherit 36 | -------------------------------------------------------------------------------- /.github/workflows/benchmark_template.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: Benchmarks 3 | 4 | on: 5 | workflow_call: 6 | inputs: 7 | case: 8 | required: true 9 | type: string 10 | 11 | jobs: 12 | benchmarks: 13 | name: "Benchmark ${{ inputs.case }}" 14 | runs-on: "ubuntu-latest" 15 | 16 | services: 17 | mysql: 18 | image: mysql:8.0 19 | env: 20 | MYSQL_USER: username 21 | MYSQL_PASSWORD: password 22 | MYSQL_ROOT_PASSWORD: password 23 | MYSQL_DATABASE: test 24 | ports: 25 | - 3306:3306 26 | options: --health-cmd="mysqladmin ping" --health-interval=10s --health-timeout=5s --health-retries=3 27 | 28 | postgres: 29 | image: postgres:14 30 | env: 31 | POSTGRES_USER: username 32 | POSTGRES_PASSWORD: password 33 | POSTGRES_DB: test 34 | ports: 35 | - 5432:5432 36 | options: --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5 37 | 38 | redis: 39 | image: redis 40 | options: >- 41 | --health-cmd "redis-cli ping" 42 | --health-interval 10s 43 | --health-timeout 5s 44 | --health-retries 5 45 | ports: 46 | - 6379:6379 47 | 48 | mongodb: 49 | image: mongo 50 | env: 51 | MONGO_INITDB_ROOT_USERNAME: test 52 | MONGO_INITDB_ROOT_PASSWORD: password 53 | MONGO_INITDB_DATABASE: test 54 | options: >- 55 | --health-cmd "mongosh --eval 'db.version()'" 56 | --health-interval 10s 57 | --health-timeout 5s 58 | --health-retries 5 59 | ports: 60 | - 27017:27017 61 | 62 | steps: 63 | - uses: "actions/checkout@v3" 64 | - uses: "actions/setup-python@v4" 65 | with: 66 | python-version: "3.11" 67 | - name: Install Poetry 68 | uses: abatilo/actions-poetry@v2 69 | with: 70 | version: 1.3 71 | - name: Setup Poetry 72 | run: "poetry config virtualenvs.in-project true" 73 | - name: Cache Deps 74 | uses: actions/cache@v2 75 | id: cached-poetry-dependencies 76 | with: 77 | path: .venv 78 | key: python-3.11-pydeps-${{ hashFiles('**/poetry.lock') }} 79 | - name: "Install Dependencies" 80 | if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true' 81 | run: "poetry install --no-interaction --no-root" 82 | - name: "Run Lint" 83 | run: "poetry run mypy --ignore-missing-imports ." 84 | - name: "Run Benchmarks" 85 | run: "poetry run pytest benchmarks/benchmark_test.py::${{ inputs.case }} --benchmark-only --benchmark-json output.json" 86 | - name: "Publish Benchmark Result" 87 | uses: benchmark-action/github-action-benchmark@v1 88 | if: ${{ github.ref == 'refs/heads/master' }} 89 | with: 90 | name: 'Cacheme Benchmark: ${{ inputs.case }}' 91 | tool: 'pytest' 92 | output-file-path: output.json 93 | github-token: ${{ secrets.BENCH_TOKEN }} 94 | auto-push: true 95 | gh-repository: 'github.com/Yiling-J/cacheme-benchmark' 96 | benchmark-data-dir-path: 'dev/${{ inputs.case }}' 97 | -------------------------------------------------------------------------------- /.github/workflows/codeql-analysis.yml: -------------------------------------------------------------------------------- 1 | # For most projects, this workflow file will not need changing; you simply need 2 | # to commit it to your repository. 3 | # 4 | # You may wish to alter this file to override the set of languages analyzed, 5 | # or to provide custom queries or build logic. 6 | name: "CodeQL" 7 | 8 | on: 9 | push: 10 | branches: [master] 11 | pull_request: 12 | # The branches below must be a subset of the branches above 13 | branches: [master] 14 | schedule: 15 | - cron: '0 7 * * 3' 16 | 17 | jobs: 18 | analyze: 19 | name: Analyze 20 | runs-on: ubuntu-latest 21 | 22 | strategy: 23 | fail-fast: false 24 | matrix: 25 | # Override automatic language detection by changing the below list 26 | # Supported options are ['csharp', 'cpp', 'go', 'java', 'javascript', 'python'] 27 | language: ['python'] 28 | # Learn more... 29 | # https://docs.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#overriding-automatic-language-detection 30 | 31 | steps: 32 | - name: Checkout repository 33 | uses: actions/checkout@v2 34 | with: 35 | # We must fetch at least the immediate parents so that if this is 36 | # a pull request then we can checkout the head. 37 | fetch-depth: 2 38 | 39 | # If this run was triggered by a pull request event, then checkout 40 | # the head of the pull request instead of the merge commit. 41 | - run: git checkout HEAD^2 42 | if: ${{ github.event_name == 'pull_request' }} 43 | 44 | # Initializes the CodeQL tools for scanning. 45 | - name: Initialize CodeQL 46 | uses: github/codeql-action/init@v2 47 | with: 48 | languages: ${{ matrix.language }} 49 | # If you wish to specify custom queries, you can do so here or in a config file. 50 | # By default, queries listed here will override any specified in a config file. 51 | # Prefix the list here with "+" to use these queries and those in the config file. 52 | # queries: ./path/to/local/query, your-org/your-repo/queries@main 53 | 54 | # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). 55 | # If this step fails, then you should remove it and run the build manually (see below) 56 | - name: Autobuild 57 | uses: github/codeql-action/autobuild@v2 58 | 59 | # ℹ️ Command-line programs to run using the OS shell. 60 | # 📚 https://git.io/JvXDl 61 | 62 | # ✏️ If the Autobuild fails above, remove it and uncomment the following three lines 63 | # and modify them (or add more) to build your code if your project 64 | # uses a compiled language 65 | 66 | #- run: | 67 | # make bootstrap 68 | # make release 69 | 70 | - name: Perform CodeQL Analysis 71 | uses: github/codeql-action/analyze@v2 72 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | name: Publish 2 | 3 | on: 4 | push: 5 | tags: 6 | - '*' 7 | 8 | jobs: 9 | publish: 10 | name: "Publish release" 11 | runs-on: "ubuntu-latest" 12 | 13 | environment: 14 | name: deploy 15 | 16 | steps: 17 | - uses: "actions/checkout@v3" 18 | - uses: "actions/setup-python@v4" 19 | with: 20 | python-version: '3.10' 21 | - name: Install Poetry 22 | uses: abatilo/actions-poetry@v2 23 | with: 24 | version: 1.3 25 | - name: Setup Poetry 26 | run: "poetry config virtualenvs.in-project true" 27 | - name: Cache Deps 28 | uses: actions/cache@v2 29 | id: cached-poetry-dependencies 30 | with: 31 | path: .venv 32 | key: python-3.10-pydeps-${{ hashFiles('**/poetry.lock') }} 33 | - name: "Install Dependencies" 34 | if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true' 35 | run: "poetry install --no-interaction --no-root" 36 | - name: "Build Package" 37 | run: "poetry build" 38 | - name: "Publish to PyPI" 39 | run: "poetry publish" 40 | env: 41 | POETRY_PYPI_TOKEN_PYPI: ${{ secrets.PYPI_API_TOKEN }} 42 | -------------------------------------------------------------------------------- /.github/workflows/test.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: Tests 3 | 4 | on: 5 | push: 6 | branches: 7 | - master 8 | - "[0-9].[0-9]" 9 | pull_request: 10 | branches: ["master"] 11 | 12 | jobs: 13 | tests: 14 | name: "Python ${{ matrix.python-version }}" 15 | runs-on: "ubuntu-latest" 16 | 17 | strategy: 18 | matrix: 19 | python-version: ["3.7", "3.8", "3.9", "3.10", "3.11"] 20 | 21 | services: 22 | mysql: 23 | image: mysql:8.0 24 | env: 25 | MYSQL_USER: username 26 | MYSQL_PASSWORD: password 27 | MYSQL_ROOT_PASSWORD: password 28 | MYSQL_DATABASE: test 29 | ports: 30 | - 3306:3306 31 | options: --health-cmd="mysqladmin ping" --health-interval=10s --health-timeout=5s --health-retries=3 32 | 33 | postgres: 34 | image: postgres:14 35 | env: 36 | POSTGRES_USER: username 37 | POSTGRES_PASSWORD: password 38 | POSTGRES_DB: test 39 | ports: 40 | - 5432:5432 41 | options: --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5 42 | 43 | redis: 44 | image: redis 45 | options: >- 46 | --health-cmd "redis-cli ping" 47 | --health-interval 10s 48 | --health-timeout 5s 49 | --health-retries 5 50 | ports: 51 | - 6379:6379 52 | 53 | mongodb: 54 | image: mongo 55 | env: 56 | MONGO_INITDB_ROOT_USERNAME: test 57 | MONGO_INITDB_ROOT_PASSWORD: password 58 | MONGO_INITDB_DATABASE: test 59 | options: >- 60 | --health-cmd "mongosh --eval 'db.version()'" 61 | --health-interval 10s 62 | --health-timeout 5s 63 | --health-retries 5 64 | ports: 65 | - 27017:27017 66 | 67 | steps: 68 | - uses: "actions/checkout@v3" 69 | - uses: "actions/setup-python@v4" 70 | with: 71 | python-version: "${{ matrix.python-version }}" 72 | - name: Install Poetry 73 | uses: abatilo/actions-poetry@v2 74 | with: 75 | version: 1.3 76 | - name: Setup Poetry 77 | run: "poetry config virtualenvs.in-project true" 78 | - name: Cache Deps 79 | uses: actions/cache@v2 80 | id: cached-poetry-dependencies 81 | with: 82 | path: .venv 83 | key: python-${{ matrix.python-version }}-pydeps-${{ hashFiles('**/poetry.lock') }} 84 | - name: "Install Dependencies" 85 | if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true' 86 | run: "poetry install --no-interaction --no-root" 87 | - name: "Run Lint" 88 | run: "make lint" 89 | - name: "Run Tests" 90 | env: 91 | CI: "TRUE" 92 | run: "poetry run pytest --benchmark-skip" 93 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.pyc 2 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Changelog 2 | All notable changes to this project will be documented in this file. 3 | 4 | ## [0.3.0] 5 | ### Changed 6 | - Refactor get and get_all method 7 | 8 | ## [0.2.3] 9 | ### Changed 10 | - Add theine in-memory cache(replace cacheme-utils) 11 | 12 | ## [0.2.2] 13 | ### Fixed 14 | - Fix local cache set existing key, policy should not update 15 | 16 | ### Changed 17 | - Bump Cacheme-utils version 18 | 19 | ## [0.2.1] 20 | ### Added 21 | - Remove expired nodes automatically 22 | - Add build_node API 23 | - Add missing py.typed 24 | 25 | ## [0.2.0] 26 | ### Added 27 | - Cacheme V2 28 | 29 | ## [0.1.1] 30 | ### Added 31 | - Node support Meta class 32 | - Add stale option to settings/cacheme decorator/node meta 33 | - Node support hit/miss function 34 | - Cacheme tags using nodes instead of cacheme instances 35 | 36 | ### Removed 37 | - Remove `invalid_all()` method from tag, using `tag.objects.invalid()` instead 38 | 39 | ## [0.1.0] 40 | ### Added 41 | - Add node capability to cacheme. 42 | 43 | ### Removed 44 | - Get keys from tag, tag only support invalid now. 45 | 46 | ## [0.0.9] 47 | ### Added 48 | - Cacheme first release. 49 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | BSD 3-Clause License 2 | 3 | Copyright (c) 2020, Yiling-J 4 | All rights reserved. 5 | 6 | Redistribution and use in source and binary forms, with or without 7 | modification, are permitted provided that the following conditions are met: 8 | 9 | 1. Redistributions of source code must retain the above copyright notice, this 10 | list of conditions and the following disclaimer. 11 | 12 | 2. Redistributions in binary form must reproduce the above copyright notice, 13 | this list of conditions and the following disclaimer in the documentation 14 | and/or other materials provided with the distribution. 15 | 16 | 3. Neither the name of the copyright holder nor the names of its 17 | contributors may be used to endorse or promote products derived from 18 | this software without specific prior written permission. 19 | 20 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 21 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 22 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 23 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE 24 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 25 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 26 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 27 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 28 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 29 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 30 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | .PHONY: test 2 | test: 3 | poetry run pytest --benchmark-skip 4 | 5 | .PHONY: testx 6 | testx: 7 | poetry run pytest --benchmark-skip -x 8 | 9 | .PHONY: benchmark 10 | benchmark: 11 | poetry run pytest --benchmark-only 12 | 13 | .PHONY: lint 14 | lint: 15 | poetry run mypy --check-untyped-defs --ignore-missing-imports . 16 | 17 | .PHONY: trace 18 | trace: 19 | poetry run python -m benchmarks.trace 20 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Cacheme 2 | 3 | Asyncio cache framework with multiple cache storages. 4 | 5 | - **Organize cache better:** Cache configuration with node, you can apply different strategies on different nodes. 6 | - **Multiple cache storages:** in-memory/redis/mongodb/postgres..., also support chain storages. 7 | - **Multiple serializers:** Pickle/Json/Msgpack serializers. 8 | - **Thundering herd protection:** Simultaneously requests to same key are blocked by asyncio Event and only load from source once. 9 | - **Cache stats API:** Stats of each node and colected automatically. 10 | - **Performance:** See Benchemarks section. 11 | 12 | Related projects: 13 | - High performance in-memory cache: https://github.com/Yiling-J/theine 14 | 15 | ## Table of Contents 16 | 17 | - [Installation](#installation) 18 | - [Add Node](#add-node) 19 | - [Register Storage](#register-storage) 20 | - [Cacheme API](#cacheme-api) 21 | - [Cache Node](#cache-node) 22 | + [Key](#key) 23 | + [Meta Class](#meta-class) 24 | + [Serializers](#serializers) 25 | + [DoorKeeper](#doorkeeper) 26 | - [Cache Storage](#cache-storage) 27 | + [Local Storage](#local-storage) 28 | + [Redis Storage](#redis-storage) 29 | + [MongoDB Storage](#mongodb-storage) 30 | + [Sqlite Storage](#sqlite-storage) 31 | + [PostgreSQL Storage](#postgresql-storage) 32 | + [MySQL Storage](#mysql-storage) 33 | - [How Thundering Herd Protection Works](#how-thundering-herd-protection-works) 34 | - [Benchmarks](#benchmarks) 35 | + [continuous benchmark](#continuous-benchemark) 36 | + [200k concurrent requests](#200k-concurrent-requests) 37 | + [20k concurrent batch requests](#20k-concurrent-batch-requests) 38 | 39 | ## Requirements 40 | Python 3.7+ 41 | 42 | ## Installation 43 | 44 | ``` 45 | pip install cacheme 46 | ``` 47 | 48 | Multiple storages are supported by drivers. You can install the required drivers with: 49 | ``` 50 | pip install cacheme[redis] 51 | pip install cacheme[aiomysql] 52 | pip install cacheme[motor] 53 | pip install cacheme[asyncpg] 54 | ``` 55 | 56 | ## Add Node 57 | Node is the core part of cache. Each node has its own key function, load function and storage options. Stats of each node are collected independently. You can place all node definations into one package/module, so everyone knows exactly what is cached and how they are cached. All cacheme API are based on node. 58 | 59 | Each node contains: 60 | - Key attritubes and `key` method, which are used to generate cache key. Here the `UserInfoNode` is a dataclass, so `__init__` method is generated automatically. 61 | - Async `load` method, which will be called to load data from data source on cache missing. This method can be omitted if you use `Memoize` decorator only. 62 | - `Meta` class, node cache configurations. See [Cache Node](#cache-node) 63 | 64 | ```python 65 | import cacheme 66 | from dataclasses import dataclass 67 | from cacheme.serializer import MsgPackSerializer 68 | 69 | @dataclass 70 | class UserInfoNode(cacheme.Node): 71 | user_id: int 72 | 73 | def key(self) -> str: 74 | return f"user:{self.user_id}:info" 75 | 76 | async def load(self) -> Dict: 77 | user = get_user_from_db(self.user_id) 78 | return serialize(user) 79 | 80 | class Meta(cacheme.Node.Meta): 81 | version = "v1" 82 | caches = [cacheme.Cache(storage="my-redis", ttl=None)] 83 | serializer = MsgPackSerializer() 84 | ``` 85 | This simple example use a cache storage called "my-redis", which will be registered next step. Also we use `MsgPackSerializer` here to dump and load data from redis. See [Cache Node] for more details. 86 | 87 | ## Register Storage 88 | 89 | Register a redis storage called "my-redis", which you can use in node meta data. The `register_storage` is asynchronous and will try to establish connection to cache store. 90 | See [Cache Storage] for more details. 91 | 92 | ```python 93 | import cacheme 94 | 95 | await cacheme.register_storage("my-redis", cacheme.Storage(url="redis://localhost:6379")) 96 | ``` 97 | 98 | ## Cacheme API 99 | 100 | `get`: get data from single node. 101 | ```python 102 | user = await cacheme.get(UserInfoNode(user_id=1)) 103 | ``` 104 | 105 | `get_all`: get data from multiple nodes, same node type. 106 | ```python 107 | users = await cacheme.get_all([UserInfoNode(user_id=1), UserInfoNode(user_id=2)]) 108 | ``` 109 | 110 | `invalidate`: invalidate a node, remove data from cache. 111 | ```python 112 | await cacheme.invalidate(UserInfoNode(user_id=1)) 113 | ``` 114 | 115 | `refresh`: reload node data using `load` method. 116 | ```python 117 | await cacheme.refresh(UserInfoNode(user_id=1)) 118 | ``` 119 | 120 | `Memoize`: memoize function with this decorator. 121 | 122 | Decorate your function with `cacheme.Memoize` decorator and cache node. Cacheme will load data using the decorated function and ignore `load` method. 123 | Because your function may contain variable number of args/kwargs, we need one more step to map between args/kwargs to node. The decorated map function should have same input signature as memoized function, and return a cache node. 124 | 125 | ```python 126 | @cacheme.Memoize(UserInfoNode) 127 | async def get_user_info(user_id: int) -> Dict: 128 | return {} 129 | 130 | # function name is not important, so just use _ here 131 | @get_user_info.to_node 132 | def _(user_id: int) -> UserInfoNode: 133 | return UserInfoNode(user_id=user_id) 134 | ``` 135 | 136 | `nodes`: list all nodes. 137 | ```python 138 | nodes = cacheme.nodes() 139 | ``` 140 | 141 | `stats`: get node stats. 142 | ``` 143 | metrics = cacheme.stats(UserInfoNode) 144 | 145 | metrics.request_count() # total request count 146 | metrics.hit_count() # total hit count 147 | metrics.hit_rate() # hit_count/request_count 148 | metrics.miss_count() # (request_count - hit_count)/request_count 149 | metrics.miss_rate() # miss_count/request_count 150 | metric.load_success_count() # total load success count 151 | metrics.load_failure_count() # total load fail count 152 | metrics.load_failure_rate() # load_failure_count/load_count 153 | metrics.load_count() # total load count 154 | metrics.total_load_time() # total load time in nanoseconds 155 | metrics.average_load_time() # total_load_time/load_count 156 | ``` 157 | 158 | `set_prefix`: set prefix for all keys. Default prefix is `cacheme`. Change prefix will invalid all keys, because prefix is part of the key. 159 | ```python 160 | cacheme.set_prefix("mycache") 161 | ``` 162 | 163 | 164 | ## Cache Node 165 | 166 | #### Key 167 | Generated cache key will be: `{prefix}:{key()}:{Meta.version}`. So change `version` will invalid all keys automatically. 168 | 169 | #### Meta Class 170 | - `version[str]`: Version of node, will be used as suffix of cache key. 171 | - `caches[List[Cache]]`: Caches for node. Each `Cache` has 2 attributes, `storage[str]` and `ttl[Optional[timedelta]]`. `storage` is the name you registered with `register_storage` and `ttl` is how long this cache will live. Cacheme will try to get data from each cache from left to right. In most cases, use single cache or [local, remote] combination. 172 | - `serializer[Optional[Serializer]]`: Serializer used to dump/load data. If storage type is `local`, serializer is ignored. See [Serializers](#serializers). 173 | - `doorkeeper[Optional[DoorKeeper]]`: See [DoorKeeper](#doorkeeper). 174 | 175 | Multiple caches example. Local cache is not synchronized, so set a much shorter ttl compared to redis one. Then we don't need to worry too much about stale data. 176 | 177 | ```python 178 | import cacheme 179 | from dataclasses import dataclass 180 | from datetime import timedelta 181 | from cacheme.serializer import MsgPackSerializer 182 | 183 | @dataclass 184 | class UserInfoNode(cacheme.Node): 185 | user_id: int 186 | 187 | def key(self) -> str: 188 | return f"user:{self.user_id}:info" 189 | 190 | async def load(self) -> Dict: 191 | user = get_user_from_db(self.user_id) 192 | return serialize(user) 193 | 194 | class Meta(cacheme.Node.Meta): 195 | version = "v1" 196 | caches = [ 197 | cacheme.Cache(storage="local", ttl=timedelta(seconds=30)), 198 | cacheme.Cache(storage="my-redis", ttl=timedelta(days=10)) 199 | ] 200 | serializer = MsgPackSerializer() 201 | ``` 202 | 203 | Cacheme also support creating Node dynamically, you can use this together with `Memoize` decorator: 204 | 205 | ```python 206 | @Memoize(cacheme.build_node("TestNodeDynamic", "v1", [Cache(storage="local", ttl=None)])) 207 | async def fn(a: int) -> int: 208 | return 1 209 | 210 | 211 | @fn.to_node 212 | def _(a: int) -> cacheme.DynamicNode: 213 | return DynamicNode(key=f"bar:{a}") 214 | ``` 215 | Here we use `DynamicNode`, which only support one param: `key` 216 | 217 | #### Serializers 218 | Cacheme provides serveral builtin serializers, you can also write your own serializer. 219 | 220 | - `PickleSerializer`: All python objects. 221 | - `JSONSerializer`: Use `pydantic_encoder` and `json`, support python primitive types, dataclass, pydantic model. See [pydantic types](https://docs.pydantic.dev/usage/types/). 222 | - `MsgPackSerializer`: Use `pydantic_encoder` and `msgpack`, support python primitive types, dataclass, pydantic model. See [pydantic types](https://docs.pydantic.dev/usage/types/). 223 | 224 | serializer with compression, use zlib level-3 225 | 226 | - `CompressedPickleSerializer` 227 | - `CompressedJSONSerializer` 228 | - `CompressedMsgPackSerializer` 229 | 230 | #### DoorKeeper 231 | Idea from [TinyLfu paper](https://arxiv.org/pdf/1512.00727.pdf). 232 | 233 | *The Doorkeeper is a regular Bloom filter placed in front of the cahce. Upon 234 | item arrival, we first check if the item is contained in the Doorkeeper. If it is not contained in the 235 | Doorkeeper (as is expected with first timers and tail items), the item is inserted to the Doorkeeper and 236 | otherwise, it is inserted to the cache.* 237 | 238 | ```python 239 | from cacheme import BloomFilter 240 | 241 | @dataclass 242 | class UserInfoNode(cacheme.Node): 243 | 244 | class Meta(cacheme.Node.Meta): 245 | # size 100000, false positive probability 0.01 246 | doorkeeper = BloomFilter(100000, 0.01) 247 | ``` 248 | BloomFilter is cleared automatically when requests count == size. 249 | 250 | 251 | ## Cache Storage 252 | 253 | #### Local Storage 254 | Local storage use the state-of-the-art library **Theine** to store data. If your use case in simple, also consider using [Theine](https://github.com/Yiling-J/theine) directly, which will have the best performance. 255 | 256 | ```python 257 | # lru policy 258 | Storage(url="local://lru", size=10000) 259 | 260 | # w-tinylfu policy 261 | Storage(url="local://tlfu", size=10000) 262 | 263 | ``` 264 | Parameters: 265 | 266 | - `url`: `local://{policy}`. 2 policies are currently supported: 267 | - `lru` 268 | - `tlfu`: W-TinyLfu policy 269 | 270 | - `size`: size of the storage. Policy will be used to evict key when cache is full. 271 | 272 | #### Redis Storage 273 | ```python 274 | Storage(url="redis://localhost:6379") 275 | 276 | # cluster 277 | Storage(url="redis://localhost:6379", cluster=True) 278 | ``` 279 | Parameters: 280 | 281 | - `url`: redis connection url. 282 | - `cluster`: bool, cluster or not, default False. 283 | - `pool_size`: connection pool size, default 100. 284 | 285 | #### MongoDB Storage 286 | To use mongodb storage, create index first. See [mongo.js](cacheme/storages/scripts/mongo.js) 287 | ```python 288 | Storage(url="mongodb://test:password@localhost:27017",database="test",collection="cache") 289 | ``` 290 | Parameters: 291 | 292 | - `url`: mongodb connection url. 293 | - `database`: mongodb database name. 294 | - `collection`: mongodb collection name. 295 | - `pool_size`: connection pool size, default 50. 296 | 297 | #### Sqlite Storage 298 | To use sqlite storage, create table and index first. See [sqlite.sql](cacheme/storages/scripts/sqlite.sql) 299 | ```python 300 | Storage(url="sqlite:///test", table="cache") 301 | ``` 302 | Parameters: 303 | 304 | - `url`: sqlite connection url. 305 | - `table`: cache table name. 306 | - `pool_size`: connection pool size, default 50. 307 | 308 | #### PostgreSQL Storage 309 | To use postgres storage, create table and index first. See [postgresql.sql](cacheme/storages/scripts/postgresql.sql) 310 | ```python 311 | Storage(url="postgresql://username:password@127.0.0.1:5432/test", table="cache") 312 | ``` 313 | Parameters: 314 | 315 | - `url`: postgres connection url. 316 | - `table`: cache table name. 317 | - `pool_size`: connection pool size, default 50. 318 | 319 | #### MySQL Storage 320 | To use mysql storage, create table and index first. See [mysql.sql](cacheme/storages/scripts/mysql.sql) 321 | ```python 322 | Storage("mysql://username:password@localhost:3306/test", table="cache") 323 | ``` 324 | Parameters: 325 | 326 | - `url`: mysql connection url. 327 | - `table`: cache table name. 328 | - `pool_size`: connection pool size, default 50. 329 | 330 | ## How Thundering Herd Protection Works 331 | 332 | If you are familar with Go [singleflight](https://pkg.go.dev/golang.org/x/sync/singleflight), you may have an idea how Cacheme works. Cacheme group concurrent requests to same resource(node) into a singleflight with asyncio Event, which will **load from remote cache OR data source only once**. That's why in next Benchmarks section, you will find Cacheme even reduce total redis GET command count under high concurrency. 333 | 334 | 335 | ## Benchmarks 336 | 337 | ### continuous benchmark 338 | https://github.com/Yiling-J/cacheme-benchmark 339 | 340 | ### 200k concurrent requests 341 | 342 | aiocache: https://github.com/aio-libs/aiocache 343 | 344 | cashews: https://github.com/Krukov/cashews 345 | 346 | source code: https://github.com/Yiling-J/cacheme/blob/master/benchmarks/trace.py 347 | 348 | How this benchmark run: 349 | 350 | 1. Initialize Cacheme/Aiocache/Cashews with Redis backend, use Redis blocking pool and set pool size to 100. 351 | 2. Decorate Aiocache/Cashews/Cacheme with a function which accept a number and sleep 0.1s. This function also record how many times it is called. 352 | 3. Register Redis response callback, so we can know how many times GET command are called. 353 | 4. Create 200k coroutines use a zipf generator and put them in async queue(around 50k-60k unique numbers). 354 | 5. Run coroutines in queue with N concurrent workers. 355 | 6. Collect results. 356 | 357 | Identifier: 358 | - Cacheme: Cacheme redis storage 359 | - Aiocahce: Aiocahce cached decorator 360 | - Cashews: Cashews cache decorate 361 | - Cacheme-2: Cacheme use cache chain [local, redis] 362 | - Aiocache-2: Aiocache cached_stampede decorator 363 | - Cashews-2: Cashews decorator with lock=True 364 | 365 | Result: 366 | - Time: How long it takes to finish bench. 367 | - Redis GET: How many times Redis GET command are called, use this to evaluate pressure to remote cache server. 368 | - Load Hits: How many times the load function(which sleep 0.1s) are called, use this to evaluate pressure to load source(database or something else). 369 | 370 | #### 1k concurrency 371 | 372 | | | Time | Redis GET | Load Hits | 373 | |------------|-------|------------|-----------| 374 | | Cacheme | 25 s | 166454 | 55579 | 375 | | Cacheme-2 | 20 s | 90681 | 55632 | 376 | | Aiocache | 46 s | 200000 | 56367 | 377 | | Aiocache-2 | 63 s | 256492 | 55417 | 378 | | Cashews | 51 s | 200000 | 56920 | 379 | | Cashews-2 | 134 s | 200000 | 55450 | 380 | 381 | 382 | #### 10k concurrency 383 | 384 | | | Time | Redis GET | Load Hits | 385 | |------------|-------|-----------|-----------| 386 | | Cacheme | 25 s | 123704 | 56736 | 387 | | Cacheme-2 | 19 s | 83750 | 56635 | 388 | | Aiocache | 67 s | 200000 | 62568 | 389 | | Aiocache-2 | 113 s | 263195 | 55507 | 390 | | Cashews | 68 s | 200000 | 66036 | 391 | | Cashews-2 | 175 s | 200000 | 55709 | 392 | 393 | 394 | #### 100k concurrency 395 | 396 | | | Time | Redis GET | Load Hits | 397 | |------------|-------|-----------|-----------| 398 | | Cacheme | 24 s | 60990 | 56782 | 399 | | Cacheme-2 | 22 s | 55762 | 55588 | 400 | | Aiocache | 80 s | 200000 | 125085 | 401 | | Aiocache-2 | 178 s | 326417 | 65598 | 402 | | Cashews | 88 s | 200000 | 87894 | 403 | | Cashews-2 | 236 s | 200000 | 55647 | 404 | 405 | ### 20k concurrent batch requests 406 | 407 | source code: https://github.com/Yiling-J/cacheme/blob/master/benchmarks/trace.py 408 | 409 | How this benchmark run: 410 | 411 | 1. Initialize Cacheme with Redis backend, use Redis blocking pool and set pool size to 100. 412 | 2. Decorate Cacheme with a function which accept a number and sleep 0.1s. This function also record how many times it is called. 413 | 3. Register Redis response callback, so we can know how many times MGET command are called. 414 | 4. Create 20k `get_all` coroutines use a zipf generator and put them in async queue(around 50k-60k unique numbers). Each `get_all` request will get 20 unique numbers in batch. So totally 400k numbers. 415 | 5. Run coroutines in queue with N concurrent workers. 416 | 6. Collect results. 417 | 418 | Result: 419 | - Time: How long it takes to finish bench. 420 | - Redis MGET: How many times Redis MGET command are called, use this to evaluate pressure to remote cache server. 421 | - Load Hits: How many times the load function(which sleep 0.1s) are called, use this to evaluate pressure to load source(database or something else). 422 | 423 | #### 1k concurrency 424 | 425 | | | Time | Redis MGET | Load Hits | 426 | |------------|------|------------|-----------| 427 | | Cacheme | 12 s | 9996 | 55902 | 428 | 429 | 430 | #### 10k concurrency 431 | 432 | | | Time | Redis MGET | Load Hits | 433 | |------------|-------|------------|-----------| 434 | | Cacheme | 11 s | 9908 | 42894 | 435 | -------------------------------------------------------------------------------- /README_ZH.md: -------------------------------------------------------------------------------- 1 | # Cacheme 2 | 3 | 多源结构化异步缓存框架。 4 | 5 | - 通过Node结构化管理缓存,可以给不同的Node分配不同的缓存存储方式及缓存策略 6 | - 多种序列化方式, 支持 pickle/json/msgpack及压缩存储 7 | - API全部添加Type hint 8 | - 基于TinyLFU的高效缓存管理策略,使用Rust编写 9 | - 通过asyncio Event避免Thundering herd问题,提高命中率及减轻高并发下数据源/缓存源压力 10 | - 基于Node的缓存统计API 11 | 12 | 相关项目: 13 | - 高性能内存缓存:https://github.com/Yiling-J/theine 14 | - 相关benchmarks:https://github.com/Yiling-J/cacheme-benchmark 15 | 16 | 相关介绍文章: 17 | - [Python异步缓存框架设计(Part 1): 缓存设计概览](https://zhuanlan.zhihu.com/p/601599941) 18 | - [Python异步缓存框架设计(Part 2): Cacheme介绍](https://zhuanlan.zhihu.com/p/601804308) 19 | 20 | ## 目录 21 | 22 | - [安装](#安装) 23 | - [定义Node](#定义Node) 24 | - [注册Storage](#注册Storage) 25 | - [Cacheme API](#cacheme-api) 26 | - [Cache Node](#cache-node) 27 | + [Key](#key) 28 | + [Meta Class](#meta-class) 29 | + [Serializers](#serializers) 30 | + [DoorKeeper](#doorkeeper) 31 | - [Cache Storage](#cache-storage) 32 | + [Local Storage](#local-storage) 33 | + [Redis Storage](#redis-storage) 34 | + [MongoDB Storage](#mongodb-storage) 35 | + [Sqlite Storage](#sqlite-storage) 36 | + [PostgreSQL Storage](#postgresql-storage) 37 | + [MySQL Storage](#mysql-storage) 38 | - [Benchmarks](#benchmarks) 39 | 40 | ## 基本要求 41 | Python 3.7+ 42 | 43 | ## 安装 44 | ``` 45 | pip install cacheme 46 | ``` 47 | 48 | 不同存储源通过对应driver支持,可以根据情况选择安装 49 | ``` 50 | pip install cacheme[redis] 51 | pip install cacheme[aiomysql] 52 | pip install cacheme[motor] 53 | pip install cacheme[asyncpg] 54 | ``` 55 | 56 | ## 定义Node 57 | Node是Cacheme的核心部分。Node定义包含了缓存的key定义,缓存源数据读取以及存储相关的各种配置。通过例子比较直接: 58 | ```python 59 | import cacheme 60 | from dataclasses import dataclass 61 | from cacheme.serializer import MsgPackSerializer 62 | 63 | @dataclass 64 | class UserInfoNode(cacheme.Node): 65 | user_id: int 66 | 67 | def key(self) -> str: 68 | return f"user:{self.user_id}:info" 69 | 70 | async def load(self) -> Dict: 71 | user = get_user_from_db(self.user_id) 72 | return serialize(user) 73 | 74 | class Meta(cacheme.Node.Meta): 75 | version = "v1" 76 | caches = [cacheme.Cache(storage="my-redis", ttl=None)] 77 | serializer = MsgPackSerializer() 78 | ``` 79 | 以上这个例子定义了UserInfoNode,用于缓存UserInfo数据。缓存的key通过`key`函数生成。通过dataclass装饰器自动生成init方法。这样在调用Cacheme API时只使用node,避免手工输入key string。load函数定义了当缓存miss时如何从数据源获取数据。而Meta class则定义了cache的version(会自动加入key中),cache的存储方式,这里用了名叫my-redis的存储源以及存储/读取时用的serializer。 80 | 81 | 同时Cacheme也支持动态创建Node,可以和装饰器一起使用来快速缓存现有函数。当然推荐的方法还是单独定义Node class。 82 | ```python 83 | @Memoize(cacheme.build_node("TestNodeDynamic", "v1", [Cache(storage="local", ttl=None)])) 84 | async def fn(a: int) -> int: 85 | return 1 86 | 87 | 88 | @fn.to_node 89 | def _(a: int) -> cacheme.DynamicNode: 90 | return DynamicNode(key=f"bar:{a}") 91 | ``` 92 | 这里使用的时`DynamicNode`, 因为没有预先定义的Node class。只支持单一的`key`参数。 93 | 94 | ## 注册Storage 95 | Cacheme的Node和Storage是分开的,Node表示业务信息,比如用户信息node。而storage则是cache的存储方式。一个Node可以支持串联多种存储方式,同样一个存储方式也可以用在多种node上。 96 | ```python 97 | import cacheme 98 | 99 | await cacheme.register_storage("my-redis", cacheme.Storage(url="redis://localhost:6379")) 100 | ``` 101 | 102 | ## Cacheme API 103 | 104 | `get`: 通过node获取数据 105 | ```python 106 | user = await cacheme.get(UserInfoNode(user_id=1)) 107 | ``` 108 | 109 | `get_all`: 通过node获取多条数据,传入nodes必须是同一类型 110 | ```python 111 | users = await cacheme.get_all([UserInfoNode(user_id=1), UserInfoNode(user_id=2)]) 112 | ``` 113 | 114 | `invalidate`: 删除某个node的缓存 115 | ```python 116 | await cacheme.invalidate(UserInfoNode(user_id=1)) 117 | ``` 118 | 119 | `refresh`: 重新从数据源读取某个node的缓存 120 | ```python 121 | await cacheme.refresh(UserInfoNode(user_id=1)) 122 | ``` 123 | 124 | `Memoize`: memoize装饰器,可用于memoize已有函数 125 | 126 | Decorate your function with `cacheme.Memoize` decorator and cache node. Cacheme will load data using the decorated function and ignore `load` method. 127 | Because your function may contain variable number of args/kwargs, we need one more step to map between args/kwargs to node. The decorated map function should have same input signature as memoized function, and return a cache node. 128 | 129 | ```python 130 | @cacheme.Memoize(UserInfoNode) 131 | async def get_user_info(user_id: int) -> Dict: 132 | return {} 133 | 134 | # function name is not important, so just use _ here 135 | @get_user_info.to_node 136 | def _(user_id: int) -> UserInfoNode: 137 | return UserInfoNode(user_id=user_id) 138 | ``` 139 | 140 | `nodes`: 列出所有nodes 141 | ```python 142 | nodes = cacheme.nodes() 143 | ``` 144 | 145 | `stats`: 获取节点统计数据 146 | ``` 147 | metrics = cacheme.stats(UserInfoNode) 148 | 149 | metrics.request_count() # total request count 150 | metrics.hit_count() # total hit count 151 | metrics.hit_rate() # hit_count/request_count 152 | metrics.miss_count() # (request_count - hit_count)/request_count 153 | metrics.miss_rate() # miss_count/request_count 154 | metric.load_success_count() # total load success count 155 | metrics.load_failure_count() # total load fail count 156 | metrics.load_failure_rate() # load_failure_count/load_count 157 | metrics.load_count() # total load count 158 | metrics.total_load_time() # total load time in nanoseconds 159 | metrics.average_load_time() # total_load_time/load_count 160 | ``` 161 | 162 | `set_prefix`: 设置全局key前缀 163 | ```python 164 | cacheme.set_prefix("mycache") 165 | ``` 166 | 167 | ## Cache Node 168 | 169 | #### Key 170 | 实际存储到storage层的key形式为`{prefix}:{key()}:{Meta.version}` 171 | 172 | #### Meta Class 173 | - `version[str]`: node版本信息. 174 | - `caches[List[Cache]]`: Node缓存的存储源. 多个存储源会按从左到右的顺序依次调用,在写入缓存时也会依次写入。定义`Cache`需要2个参数:`storage[str]` 和 `ttl[Optional[timedelta]]`. `storage`是调用 `register_storage`时传入的name, 而`ttl`就是这个node对应缓存的ttl. 175 | - `serializer[Optional[Serializer]]`: Serializer用于dump/load data. 如果是local cache,由于直接使用dict存储会忽略serializer. See [Serializers](#serializers). 176 | - `doorkeeper[Optional[DoorKeeper]]`: See [DoorKeeper](#doorkeeper). 177 | 178 | 以下例子展示了使用local + redis两级缓存的情况 179 | 180 | ```python 181 | import cacheme 182 | from dataclasses import dataclass 183 | from datetime import timedelta 184 | from cacheme.serializer import MsgPackSerializer 185 | 186 | @dataclass 187 | class UserInfoNode(cacheme.Node): 188 | user_id: int 189 | 190 | def key(self) -> str: 191 | return f"user:{self.user_id}:info" 192 | 193 | async def load(self) -> Dict: 194 | user = get_user_from_db(self.user_id) 195 | return serialize(user) 196 | 197 | class Meta(cacheme.Node.Meta): 198 | version = "v1" 199 | caches = [ 200 | cacheme.Cache(storage="local", ttl=timedelta(seconds=30)), 201 | cacheme.Cache(storage="my-redis", ttl=timedelta(days=10)) 202 | ] 203 | serializer = MsgPackSerializer() 204 | ``` 205 | 206 | #### Serializers 207 | Cacheme 提供以下内置serializer. 208 | 209 | - `PickleSerializer`: 使用Python pickle,支持各种类型. 210 | - `JSONSerializer`: 使用`pydantic_encoder` 和 `json`, 支持python基本类型/dataclass/pydantic model. See [pydantic types](https://docs.pydantic.dev/usage/types/). 211 | - `MsgPackSerializer`: 使用`pydantic_encoder` 和 `msgpack`, 支持python基本类型/dataclass/pydantic model. See [pydantic types](https://docs.pydantic.dev/usage/types/). 212 | 213 | 以上3种serializer同时有对应的压缩版本, 在存入存储源前会使用zlib level-3进行压缩 214 | 215 | - `CompressedPickleSerializer` 216 | - `CompressedJSONSerializer` 217 | - `CompressedMsgPackSerializer` 218 | 219 | #### DoorKeeper 220 | 概念来源于[TinyLfu 论文](https://arxiv.org/pdf/1512.00727.pdf). 221 | 222 | *The Doorkeeper is a regular Bloom filter placed in front of the cahce. Upon 223 | item arrival, we first check if the item is contained in the Doorkeeper. If it is not contained in the 224 | Doorkeeper (as is expected with first timers and tail items), the item is inserted to the Doorkeeper and 225 | otherwise, it is inserted to the cache.* 226 | 227 | 缓存请求第一次到达服务端时先不缓存数据,只是更新Bloom filter, 等请求第二次到达时才把数据存入缓存。这么做好处是很多只请求1次的数据会被筛掉不进入缓存,节约空间。坏处是所有请求都会至少从数据源load两次。BloomFilter在请求到达size后会自动重制。 228 | 229 | ```python 230 | from cacheme import BloomFilter 231 | 232 | @dataclass 233 | class UserInfoNode(cacheme.Node): 234 | 235 | class Meta(cacheme.Node.Meta): 236 | # size 100000, false positive probability 0.01 237 | doorkeeper = BloomFilter(100000, 0.01) 238 | ``` 239 | 240 | ## Cache Storage 241 | 242 | #### Local Storage 243 | Local Storage使用Python dict存储数据,支持lru和tlfu两种policy。当缓存到达设定size时会自动通过policy进行驱逐。 244 | ```python 245 | # lru policy 246 | Storage(url="local://lru", size=10000) 247 | 248 | # tinylfu policy 249 | Storage(url="local://tlfu", size=10000) 250 | 251 | ``` 252 | Parameters: 253 | 254 | - `url`: `local://{policy}`. 2 policies are currently supported: 255 | - `lru` 256 | - `tlfu`: TinyLfu policy, see https://arxiv.org/pdf/1512.00727.pdf 257 | 258 | - `size`: size of the storage. Policy will be used to evict key when cache is full. 259 | 260 | #### Redis Storage 261 | ```python 262 | Storage(url="redis://localhost:6379") 263 | 264 | # cluster 265 | Storage(url="redis://localhost:6379", cluster=True) 266 | ``` 267 | Parameters: 268 | 269 | - `url`: redis connection url. 270 | - `cluster`: bool, cluster or not, default False. 271 | - `pool_size`: connection pool size, default 100. 272 | 273 | #### MongoDB Storage 274 | 使用该storage前需要先创建index. See [mongo.js](cacheme/storages/scripts/mongo.js) 275 | ```python 276 | Storage(url="mongodb://test:password@localhost:27017",database="test",collection="cache") 277 | ``` 278 | Parameters: 279 | 280 | - `url`: mongodb connection url. 281 | - `database`: mongodb database name. 282 | - `collection`: mongodb collection name. 283 | - `pool_size`: connection pool size, default 50. 284 | 285 | #### Sqlite Storage 286 | 使用该storage前需要先创建table及index. See [sqlite.sql](cacheme/storages/scripts/sqlite.sql) 287 | ```python 288 | Storage(url="sqlite:///test", table="cache") 289 | ``` 290 | Parameters: 291 | 292 | - `url`: sqlite connection url. 293 | - `table`: cache table name. 294 | - `pool_size`: connection pool size, default 50. 295 | 296 | #### PostgreSQL Storage 297 | 使用该storage前需要先创建table及index. See [postgresql.sql](cacheme/storages/scripts/postgresql.sql) 298 | ```python 299 | Storage(url="postgresql://username:password@127.0.0.1:5432/test", table="cache") 300 | ``` 301 | Parameters: 302 | 303 | - `url`: postgres connection url. 304 | - `table`: cache table name. 305 | - `pool_size`: connection pool size, default 50. 306 | 307 | #### MySQL Storage 308 | 使用该storage前需要先创建table及index. See [mysql.sql](cacheme/storages/scripts/mysql.sql) 309 | ```python 310 | Storage("mysql://username:password@localhost:3306/test", table="cache") 311 | ``` 312 | Parameters: 313 | 314 | - `url`: mysql connection url. 315 | - `table`: cache table name. 316 | - `pool_size`: connection pool size, default 50. 317 | 318 | ## Benchmarks 319 | - Local Storage Hit Ratios(hit_count/request_count) 320 | ![hit ratios](benchmarks/hit_ratio.png) 321 | [source code](benchmarks/tlfu_hit.py) 322 | 323 | - Throughput Benchmark of different storages 324 | 325 | See [benchmark]( https://github.com/Yiling-J/cacheme-benchmark) 326 | -------------------------------------------------------------------------------- /benchmarks/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Yiling-J/cacheme/f402c45267ad107a647395a74e67e53760c13755/benchmarks/__init__.py -------------------------------------------------------------------------------- /benchmarks/benchmark_test.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import json 3 | import uuid 4 | from dataclasses import dataclass 5 | from random import sample 6 | from time import time 7 | from typing import Callable, ClassVar, Dict, List 8 | 9 | import pytest 10 | 11 | from benchmarks.zipf import Zipf 12 | from cacheme import Cache, Node, Storage, get, get_all, register_storage 13 | from cacheme.serializer import MsgPackSerializer 14 | from tests.utils import setup_storage 15 | 16 | REQUESTS = 1000 17 | 18 | 19 | async def storage_init(storage): 20 | if not isinstance(storage, Storage): 21 | return 22 | await register_storage("test", storage) 23 | await setup_storage(storage._storage) 24 | 25 | 26 | async def simple_get(Node: Callable, i: int): 27 | result = await get(Node(uid=i)) 28 | assert result["uid"] == i 29 | 30 | 31 | async def simple_get_all(Node: Callable, l: List[int]): 32 | result = await get_all([Node(uid=i) for i in l]) 33 | assert [r["uid"] for r in result] == l 34 | 35 | 36 | async def bench_run(queue): 37 | for f in queue: 38 | await f 39 | 40 | 41 | @pytest.fixture( 42 | params=[ 43 | "theine-tlfu", 44 | "redis", 45 | "mongo", 46 | "postgres", 47 | "mysql", 48 | ] 49 | ) 50 | def storage_provider(request): 51 | @dataclass 52 | class FooNode(Node): 53 | uid: int 54 | payload_fn: ClassVar[Callable] 55 | uuid: ClassVar[int] 56 | 57 | def key(self) -> str: 58 | return f"uid:{self.uid}:{self.uuid}" 59 | 60 | async def load(self) -> Dict: 61 | return self.payload_fn(self.uid) 62 | 63 | class Meta(Node.Meta): 64 | version = "v1" 65 | caches = [Cache(storage="test", ttl=None)] 66 | serializer = MsgPackSerializer() 67 | 68 | storages = { 69 | "theine-tlfu": lambda table, size: Storage(url="local://tlfu", size=size), 70 | "mysql": lambda table, _: Storage( 71 | "mysql://username:password@localhost:3306/test", table=table 72 | ), 73 | "postgres": lambda table, _: Storage( 74 | "postgresql://username:password@127.0.0.1:5432/test", table=table 75 | ), 76 | "redis": lambda table, _: Storage("redis://localhost:6379"), 77 | "mongo": lambda table, _: Storage( 78 | "mongodb://test:password@localhost:27017", 79 | database="test", 80 | collection=table, 81 | ), 82 | } 83 | yield { 84 | "storage": storages[request.param], 85 | "name": request.param, 86 | "node_cls": FooNode, 87 | } 88 | 89 | 90 | @pytest.fixture(params=["small", "medium", "large"]) 91 | def payload(request): 92 | with open(f"benchmarks/{request.param}.json") as f: 93 | content = f.read() 94 | content_json = json.loads(content) 95 | return { 96 | "fn": lambda _, uid: {"uid": uid, "data": content_json}, 97 | "name": request.param, 98 | } 99 | 100 | 101 | # each request contains 1 operation: a hit get 102 | def test_read_only(benchmark, storage_provider, payload): 103 | loop = asyncio.events.new_event_loop() 104 | asyncio.events.set_event_loop(loop) 105 | _uuid = uuid.uuid4().int 106 | table = f"test_{_uuid}" 107 | Node = storage_provider["node_cls"] 108 | Node.payload_fn = payload["fn"] 109 | Node.uuid = _uuid 110 | storage = storage_provider["storage"](table, REQUESTS) 111 | loop.run_until_complete(storage_init(storage)) 112 | queue = [] 113 | for i in range(REQUESTS): 114 | queue.append(simple_get(Node, i)) 115 | loop.run_until_complete(bench_run(queue)) 116 | 117 | def setup(): 118 | queue = [] 119 | for i in range(REQUESTS): 120 | queue.append(simple_get(Node, i)) 121 | return (queue,), {} 122 | 123 | benchmark.pedantic( 124 | lambda queue: loop.run_until_complete(bench_run(queue)), 125 | setup=setup, 126 | rounds=3, 127 | ) 128 | loop.run_until_complete(storage.close()) 129 | asyncio.events.set_event_loop(None) 130 | loop.close() 131 | 132 | 133 | # each request contains 3 operations: a miss get -> load from source -> set result to cache 134 | def test_write_only(benchmark, storage_provider, payload): 135 | loop = asyncio.events.new_event_loop() 136 | asyncio.events.set_event_loop(loop) 137 | _uuid = uuid.uuid4().int 138 | table = f"test_{_uuid}" 139 | Node = storage_provider["node_cls"] 140 | Node.payload_fn = payload["fn"] 141 | Node.uuid = _uuid 142 | storage = storage_provider["storage"](table, REQUESTS) 143 | loop.run_until_complete(storage_init(storage)) 144 | 145 | def setup(): 146 | queue = [] 147 | rand = int(time()) 148 | for i in range(REQUESTS): 149 | queue.append(simple_get(Node, rand + i)) 150 | return (queue,), {} 151 | 152 | benchmark.pedantic( 153 | lambda queue: loop.run_until_complete(bench_run(queue)), 154 | setup=setup, 155 | rounds=3, 156 | ) 157 | loop.run_until_complete(storage.close()) 158 | asyncio.events.set_event_loop(None) 159 | loop.close() 160 | 161 | 162 | # each request use a random zipf number: read >> write, size limit to REQUESTS//10 163 | def test_zipf(benchmark, storage_provider, payload): 164 | loop = asyncio.events.new_event_loop() 165 | asyncio.events.set_event_loop(loop) 166 | _uuid = uuid.uuid4().int 167 | table = f"test_{_uuid}" 168 | Node = storage_provider["node_cls"] 169 | Node.payload_fn = payload["fn"] 170 | Node.uuid = _uuid 171 | storage = storage_provider["storage"](table, REQUESTS // 10) 172 | loop.run_until_complete(storage_init(storage)) 173 | 174 | def setup(): 175 | queue = [] 176 | z = Zipf(1.0001, 10, REQUESTS) 177 | for _ in range(REQUESTS): 178 | queue.append(simple_get(Node, z.get())) 179 | return (queue,), {} 180 | 181 | benchmark.pedantic( 182 | lambda queue: loop.run_until_complete(bench_run(queue)), 183 | setup=setup, 184 | rounds=3, 185 | ) 186 | loop.run_until_complete(storage.close()) 187 | asyncio.events.set_event_loop(None) 188 | loop.close() 189 | 190 | 191 | # each request use 20 unique random numbers already in cache 192 | # REQUESTS // 10 requests to make benchmark run fast 193 | def test_read_only_batch(benchmark, storage_provider, payload): 194 | loop = asyncio.events.new_event_loop() 195 | asyncio.events.set_event_loop(loop) 196 | _uuid = uuid.uuid4().int 197 | table = f"test_{_uuid}" 198 | Node = storage_provider["node_cls"] 199 | Node.payload_fn = payload["fn"] 200 | Node.uuid = _uuid 201 | Node.sleep = True 202 | storage = storage_provider["storage"](table, REQUESTS // 10) 203 | loop.run_until_complete(storage_init(storage)) 204 | queue = [] 205 | for i in range(REQUESTS // 10): 206 | queue.append(simple_get(Node, i)) 207 | loop.run_until_complete(bench_run(queue)) 208 | 209 | def setup(): 210 | 211 | queue = [] 212 | for _ in range(REQUESTS // 10): 213 | queue.append(simple_get_all(Node, sample(range(REQUESTS // 10), 20))) 214 | return (queue,), {} 215 | 216 | benchmark.pedantic( 217 | lambda queue: loop.run_until_complete(bench_run(queue)), 218 | setup=setup, 219 | rounds=3, 220 | ) 221 | loop.run_until_complete(storage.close()) 222 | asyncio.events.set_event_loop(None) 223 | loop.close() 224 | -------------------------------------------------------------------------------- /benchmarks/large.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "_id": "63c9578aaf78c452f1be725e", 4 | "index": 0, 5 | "guid": "17de7a9c-b46d-47a6-8163-7792e5867458", 6 | "isActive": false, 7 | "balance": "$2,587.14", 8 | "picture": "http://placehold.it/32x32", 9 | "age": 28, 10 | "eyeColor": "blue", 11 | "name": "Waters Wallace", 12 | "gender": "male", 13 | "company": "CIRCUM", 14 | "email": "waterswallace@circum.com", 15 | "phone": "+1 (969) 489-2505", 16 | "address": "945 Columbus Place, Skyland, California, 9332", 17 | "about": "Sunt adipisicing excepteur id fugiat ullamco velit est officia tempor nulla ea consequat. Ullamco ullamco labore ad ullamco proident cillum occaecat dolor tempor sint sunt adipisicing. Sint reprehenderit reprehenderit duis mollit minim fugiat veniam. Sunt eiusmod sunt commodo sint esse ex aute laboris qui Lorem esse ea.\r\n", 18 | "registered": "2019-09-23T07:34:54 -08:00", 19 | "latitude": -54.00815, 20 | "longitude": 0.947831, 21 | "tags": ["qui", "esse", "magna", "quis", "sit", "anim", "exercitation"], 22 | "friends": [ 23 | { 24 | "id": 0, 25 | "name": "Janna Yang" 26 | }, 27 | { 28 | "id": 1, 29 | "name": "Hendricks Glass" 30 | }, 31 | { 32 | "id": 2, 33 | "name": "Kim Benson" 34 | } 35 | ], 36 | "greeting": "Hello, Waters Wallace! You have 4 unread messages.", 37 | "favoriteFruit": "apple" 38 | }, 39 | { 40 | "_id": "63c9578a8d2d63143fca354a", 41 | "index": 1, 42 | "guid": "3b12928f-7dfa-4e31-8d98-79478ea29561", 43 | "isActive": false, 44 | "balance": "$2,429.47", 45 | "picture": "http://placehold.it/32x32", 46 | "age": 40, 47 | "eyeColor": "green", 48 | "name": "Janette Ayala", 49 | "gender": "female", 50 | "company": "PRISMATIC", 51 | "email": "janetteayala@prismatic.com", 52 | "phone": "+1 (826) 514-3793", 53 | "address": "104 Denton Place, Hoagland, Massachusetts, 4656", 54 | "about": "Duis consectetur non tempor commodo in proident consectetur exercitation duis tempor non nisi sit reprehenderit. Minim sunt velit id laborum exercitation eu veniam velit veniam dolor esse sint. Exercitation elit quis id proident velit duis do elit.\r\n", 55 | "registered": "2020-12-01T10:08:15 -08:00", 56 | "latitude": 35.440803, 57 | "longitude": -100.341256, 58 | "tags": [ 59 | "in", 60 | "est", 61 | "exercitation", 62 | "irure", 63 | "ea", 64 | "excepteur", 65 | "ipsum" 66 | ], 67 | "friends": [ 68 | { 69 | "id": 0, 70 | "name": "Baxter Macias" 71 | }, 72 | { 73 | "id": 1, 74 | "name": "Manning Montoya" 75 | }, 76 | { 77 | "id": 2, 78 | "name": "Erickson Hahn" 79 | } 80 | ], 81 | "greeting": "Hello, Janette Ayala! You have 10 unread messages.", 82 | "favoriteFruit": "strawberry" 83 | }, 84 | { 85 | "_id": "63c9578a377239f764ef5d58", 86 | "index": 2, 87 | "guid": "edd0edcb-f717-40ba-bdc3-17f9ba0afa1b", 88 | "isActive": false, 89 | "balance": "$1,057.50", 90 | "picture": "http://placehold.it/32x32", 91 | "age": 31, 92 | "eyeColor": "green", 93 | "name": "Mariana Clarke", 94 | "gender": "female", 95 | "company": "GOLISTIC", 96 | "email": "marianaclarke@golistic.com", 97 | "phone": "+1 (889) 539-2943", 98 | "address": "900 Hutchinson Court, Veguita, New Jersey, 3415", 99 | "about": "Sit nostrud tempor sint occaecat nostrud quis ut nostrud do nisi ad. Laboris dolore ipsum do occaecat. Quis magna nulla non in adipisicing reprehenderit anim enim ullamco adipisicing. Exercitation dolor officia veniam mollit nisi. Do magna id irure mollit ex qui do nisi nulla nisi.\r\n", 100 | "registered": "2018-03-14T12:16:34 -08:00", 101 | "latitude": 23.242467, 102 | "longitude": -156.106635, 103 | "tags": [ 104 | "non", 105 | "laboris", 106 | "commodo", 107 | "consectetur", 108 | "minim", 109 | "dolor", 110 | "velit" 111 | ], 112 | "friends": [ 113 | { 114 | "id": 0, 115 | "name": "Sharpe Weaver" 116 | }, 117 | { 118 | "id": 1, 119 | "name": "Jana Pitts" 120 | }, 121 | { 122 | "id": 2, 123 | "name": "Steele Mills" 124 | } 125 | ], 126 | "greeting": "Hello, Mariana Clarke! You have 1 unread messages.", 127 | "favoriteFruit": "banana" 128 | }, 129 | { 130 | "_id": "63c9578ac19cd9b59f8494b9", 131 | "index": 3, 132 | "guid": "b4e4c64a-8357-42aa-aad3-d17cd9ec9d12", 133 | "isActive": true, 134 | "balance": "$3,143.02", 135 | "picture": "http://placehold.it/32x32", 136 | "age": 33, 137 | "eyeColor": "blue", 138 | "name": "Gwendolyn Gallagher", 139 | "gender": "female", 140 | "company": "DOGNOST", 141 | "email": "gwendolyngallagher@dognost.com", 142 | "phone": "+1 (828) 445-3854", 143 | "address": "742 Ashford Street, Hayes, Maryland, 9584", 144 | "about": "Eu cillum dolore nisi fugiat voluptate occaecat excepteur ipsum eiusmod voluptate mollit. Eiusmod ea aliqua deserunt esse sint. Commodo quis eiusmod ipsum voluptate ea in proident laboris aute incididunt duis voluptate labore laborum. Ex cupidatat veniam elit aliqua exercitation duis voluptate et quis exercitation culpa cupidatat.\r\n", 145 | "registered": "2015-01-23T11:21:47 -08:00", 146 | "latitude": -69.067974, 147 | "longitude": -39.522313, 148 | "tags": ["sint", "labore", "sit", "do", "deserunt", "proident", "esse"], 149 | "friends": [ 150 | { 151 | "id": 0, 152 | "name": "Violet Randolph" 153 | }, 154 | { 155 | "id": 1, 156 | "name": "Callie Guthrie" 157 | }, 158 | { 159 | "id": 2, 160 | "name": "Juliet West" 161 | } 162 | ], 163 | "greeting": "Hello, Gwendolyn Gallagher! You have 1 unread messages.", 164 | "favoriteFruit": "strawberry" 165 | }, 166 | { 167 | "_id": "63c9578ac4d941890bfbc909", 168 | "index": 4, 169 | "guid": "6bc31e42-5d31-4064-bb6c-199064f8477f", 170 | "isActive": false, 171 | "balance": "$1,753.57", 172 | "picture": "http://placehold.it/32x32", 173 | "age": 26, 174 | "eyeColor": "brown", 175 | "name": "Kinney Espinoza", 176 | "gender": "male", 177 | "company": "ENDIPIN", 178 | "email": "kinneyespinoza@endipin.com", 179 | "phone": "+1 (892) 528-3591", 180 | "address": "642 Pitkin Avenue, Loretto, New York, 215", 181 | "about": "Id minim aliquip est ad mollit reprehenderit. Non ut nulla proident cillum cupidatat. Cupidatat proident adipisicing culpa minim labore deserunt commodo fugiat dolor laboris consequat laboris excepteur. Aliqua et ea deserunt est deserunt tempor incididunt esse consectetur sint aute tempor officia excepteur.\r\n", 182 | "registered": "2020-07-16T09:46:26 -08:00", 183 | "latitude": 88.37495, 184 | "longitude": 107.729811, 185 | "tags": [ 186 | "duis", 187 | "proident", 188 | "dolor", 189 | "laboris", 190 | "anim", 191 | "mollit", 192 | "dolore" 193 | ], 194 | "friends": [ 195 | { 196 | "id": 0, 197 | "name": "Crosby Romero" 198 | }, 199 | { 200 | "id": 1, 201 | "name": "Mack Robbins" 202 | }, 203 | { 204 | "id": 2, 205 | "name": "Pate Myers" 206 | } 207 | ], 208 | "greeting": "Hello, Kinney Espinoza! You have 9 unread messages.", 209 | "favoriteFruit": "banana" 210 | }, 211 | { 212 | "_id": "63c9578a632ff1418c075dee", 213 | "index": 5, 214 | "guid": "a549df0d-9103-47c5-afca-cfa6d175c1fc", 215 | "isActive": false, 216 | "balance": "$2,912.14", 217 | "picture": "http://placehold.it/32x32", 218 | "age": 40, 219 | "eyeColor": "brown", 220 | "name": "Pearlie Harrington", 221 | "gender": "female", 222 | "company": "MITROC", 223 | "email": "pearlieharrington@mitroc.com", 224 | "phone": "+1 (830) 445-3806", 225 | "address": "690 India Street, Oneida, Northern Mariana Islands, 7753", 226 | "about": "Voluptate officia velit occaecat adipisicing minim id dolor laboris minim pariatur. Aliqua cillum consectetur elit magna consectetur proident. Elit velit in elit est cupidatat aliqua qui tempor aute Lorem sunt aute.\r\n", 227 | "registered": "2015-05-28T03:11:36 -08:00", 228 | "latitude": 28.387139, 229 | "longitude": 45.673307, 230 | "tags": [ 231 | "in", 232 | "veniam", 233 | "amet", 234 | "excepteur", 235 | "anim", 236 | "consequat", 237 | "cupidatat" 238 | ], 239 | "friends": [ 240 | { 241 | "id": 0, 242 | "name": "Santos Kirkland" 243 | }, 244 | { 245 | "id": 1, 246 | "name": "Chambers Dejesus" 247 | }, 248 | { 249 | "id": 2, 250 | "name": "Diane Holloway" 251 | } 252 | ], 253 | "greeting": "Hello, Pearlie Harrington! You have 1 unread messages.", 254 | "favoriteFruit": "apple" 255 | }, 256 | { 257 | "_id": "63c9578a7e3d873ea85a58b5", 258 | "index": 6, 259 | "guid": "24bb7ea3-caf2-49ef-80f7-619a054e9d6a", 260 | "isActive": false, 261 | "balance": "$1,888.36", 262 | "picture": "http://placehold.it/32x32", 263 | "age": 20, 264 | "eyeColor": "blue", 265 | "name": "Sherman Fernandez", 266 | "gender": "male", 267 | "company": "EVENTIX", 268 | "email": "shermanfernandez@eventix.com", 269 | "phone": "+1 (966) 449-2388", 270 | "address": "739 Oakland Place, Gerton, New Mexico, 7918", 271 | "about": "Excepteur ipsum reprehenderit duis id nostrud ipsum cillum est. Officia occaecat eu nostrud laborum et incididunt mollit nisi laboris incididunt cupidatat labore irure. Qui duis incididunt cupidatat Lorem aliqua eu nostrud adipisicing.\r\n", 272 | "registered": "2021-06-14T02:04:58 -08:00", 273 | "latitude": 87.066869, 274 | "longitude": 85.177542, 275 | "tags": [ 276 | "nostrud", 277 | "laboris", 278 | "ea", 279 | "laboris", 280 | "enim", 281 | "ullamco", 282 | "consequat" 283 | ], 284 | "friends": [ 285 | { 286 | "id": 0, 287 | "name": "Nellie Swanson" 288 | }, 289 | { 290 | "id": 1, 291 | "name": "Shelly Orr" 292 | }, 293 | { 294 | "id": 2, 295 | "name": "Bishop Durham" 296 | } 297 | ], 298 | "greeting": "Hello, Sherman Fernandez! You have 8 unread messages.", 299 | "favoriteFruit": "strawberry" 300 | }, 301 | { 302 | "_id": "63c9578a9c8d64ac530ddbd9", 303 | "index": 7, 304 | "guid": "bb6e84bc-adfa-4ffc-a24c-549fb4674137", 305 | "isActive": true, 306 | "balance": "$2,571.37", 307 | "picture": "http://placehold.it/32x32", 308 | "age": 29, 309 | "eyeColor": "blue", 310 | "name": "Beulah Young", 311 | "gender": "female", 312 | "company": "OZEAN", 313 | "email": "beulahyoung@ozean.com", 314 | "phone": "+1 (824) 593-3377", 315 | "address": "791 Berriman Street, Greenbush, Louisiana, 1690", 316 | "about": "Adipisicing ipsum velit aute quis laboris adipisicing fugiat et reprehenderit. Labore in nulla aute in ipsum reprehenderit Lorem officia Lorem amet. Labore labore et anim enim officia et ipsum dolor commodo. Lorem esse minim ut ullamco sint. Laborum ex ipsum sint ad veniam. Ut ea culpa est cupidatat aliquip proident commodo ut est aliqua fugiat sunt. Nostrud commodo id nostrud deserunt ullamco adipisicing sint Lorem consequat proident ad.\r\n", 317 | "registered": "2016-04-08T02:45:04 -08:00", 318 | "latitude": 18.677141, 319 | "longitude": -3.562633, 320 | "tags": [ 321 | "dolore", 322 | "exercitation", 323 | "ad", 324 | "ipsum", 325 | "et", 326 | "irure", 327 | "consectetur" 328 | ], 329 | "friends": [ 330 | { 331 | "id": 0, 332 | "name": "Kemp Sanchez" 333 | }, 334 | { 335 | "id": 1, 336 | "name": "Madge Chaney" 337 | }, 338 | { 339 | "id": 2, 340 | "name": "Morin Rutledge" 341 | } 342 | ], 343 | "greeting": "Hello, Beulah Young! You have 1 unread messages.", 344 | "favoriteFruit": "apple" 345 | }, 346 | { 347 | "_id": "63c9578a6ae5e2823621fc52", 348 | "index": 8, 349 | "guid": "e59e74b8-fcf4-49ad-a8df-50f21243a75d", 350 | "isActive": true, 351 | "balance": "$2,978.94", 352 | "picture": "http://placehold.it/32x32", 353 | "age": 40, 354 | "eyeColor": "green", 355 | "name": "Harrington Moore", 356 | "gender": "male", 357 | "company": "COGNICODE", 358 | "email": "harringtonmoore@cognicode.com", 359 | "phone": "+1 (980) 525-3889", 360 | "address": "793 Just Court, Ventress, Pennsylvania, 2821", 361 | "about": "Eu minim et cupidatat excepteur quis elit incididunt velit ea labore. Incididunt culpa ullamco dolore pariatur culpa. Occaecat ea eu sit eu occaecat ex consequat do. Culpa in excepteur ut elit dolor ipsum eu laboris aute.\r\n", 362 | "registered": "2022-12-14T12:16:29 -08:00", 363 | "latitude": -73.172102, 364 | "longitude": -179.1638, 365 | "tags": [ 366 | "ullamco", 367 | "duis", 368 | "consectetur", 369 | "ipsum", 370 | "nulla", 371 | "ea", 372 | "ipsum" 373 | ], 374 | "friends": [ 375 | { 376 | "id": 0, 377 | "name": "Freeman Pruitt" 378 | }, 379 | { 380 | "id": 1, 381 | "name": "Maxwell Middleton" 382 | }, 383 | { 384 | "id": 2, 385 | "name": "Kristin Hall" 386 | } 387 | ], 388 | "greeting": "Hello, Harrington Moore! You have 1 unread messages.", 389 | "favoriteFruit": "apple" 390 | }, 391 | { 392 | "_id": "63c9578ac06f7671c48f5920", 393 | "index": 9, 394 | "guid": "6a380434-f543-4138-a203-1f5066df728f", 395 | "isActive": true, 396 | "balance": "$1,554.38", 397 | "picture": "http://placehold.it/32x32", 398 | "age": 21, 399 | "eyeColor": "green", 400 | "name": "Summers Ford", 401 | "gender": "male", 402 | "company": "OPTYK", 403 | "email": "summersford@optyk.com", 404 | "phone": "+1 (892) 406-2402", 405 | "address": "800 Fayette Street, Roderfield, Minnesota, 6533", 406 | "about": "Aliqua excepteur ullamco culpa sunt in eu reprehenderit. Cupidatat eiusmod nisi non magna voluptate nulla pariatur voluptate culpa. Lorem amet deserunt officia mollit occaecat id excepteur. Culpa nulla consequat tempor aute est mollit deserunt sit aliquip veniam fugiat ipsum culpa adipisicing.\r\n", 407 | "registered": "2014-08-19T10:53:30 -08:00", 408 | "latitude": 18.522918, 409 | "longitude": 0.334827, 410 | "tags": [ 411 | "reprehenderit", 412 | "ad", 413 | "sint", 414 | "aliquip", 415 | "tempor", 416 | "voluptate", 417 | "ex" 418 | ], 419 | "friends": [ 420 | { 421 | "id": 0, 422 | "name": "Laurel Farmer" 423 | }, 424 | { 425 | "id": 1, 426 | "name": "Marian Haney" 427 | }, 428 | { 429 | "id": 2, 430 | "name": "Althea Cooper" 431 | } 432 | ], 433 | "greeting": "Hello, Summers Ford! You have 1 unread messages.", 434 | "favoriteFruit": "apple" 435 | }, 436 | { 437 | "_id": "63c9578aac38101632cf173c", 438 | "index": 10, 439 | "guid": "652b3314-25b1-4263-8d24-0bd1f9a41cf8", 440 | "isActive": true, 441 | "balance": "$3,946.73", 442 | "picture": "http://placehold.it/32x32", 443 | "age": 31, 444 | "eyeColor": "green", 445 | "name": "Dale Wong", 446 | "gender": "female", 447 | "company": "LUDAK", 448 | "email": "dalewong@ludak.com", 449 | "phone": "+1 (838) 531-2974", 450 | "address": "468 Portland Avenue, Thermal, Hawaii, 9597", 451 | "about": "Dolore eu elit sint aliquip commodo enim sunt esse magna qui et mollit. Consequat incididunt nostrud officia ea excepteur mollit. Eiusmod cillum quis laboris voluptate nostrud irure voluptate exercitation qui veniam ipsum. Ad nisi cupidatat Lorem laborum esse pariatur sit nisi fugiat excepteur sint commodo occaecat. Enim ad magna dolore et ipsum qui ipsum.\r\n", 452 | "registered": "2020-11-19T05:26:11 -08:00", 453 | "latitude": -17.26675, 454 | "longitude": 128.884284, 455 | "tags": [ 456 | "enim", 457 | "aliquip", 458 | "officia", 459 | "commodo", 460 | "nisi", 461 | "ullamco", 462 | "esse" 463 | ], 464 | "friends": [ 465 | { 466 | "id": 0, 467 | "name": "Cleo Pierce" 468 | }, 469 | { 470 | "id": 1, 471 | "name": "Rachelle Vargas" 472 | }, 473 | { 474 | "id": 2, 475 | "name": "Alma Sears" 476 | } 477 | ], 478 | "greeting": "Hello, Dale Wong! You have 1 unread messages.", 479 | "favoriteFruit": "apple" 480 | }, 481 | { 482 | "_id": "63c9578a98c8bcde26ce05a6", 483 | "index": 11, 484 | "guid": "8ada4b0a-9a21-4bf1-b266-f7c8e63c1287", 485 | "isActive": false, 486 | "balance": "$1,560.83", 487 | "picture": "http://placehold.it/32x32", 488 | "age": 26, 489 | "eyeColor": "brown", 490 | "name": "Dyer Lucas", 491 | "gender": "male", 492 | "company": "STRALUM", 493 | "email": "dyerlucas@stralum.com", 494 | "phone": "+1 (831) 515-3493", 495 | "address": "453 Sheffield Avenue, Neibert, South Dakota, 8896", 496 | "about": "Reprehenderit Lorem Lorem nisi in excepteur adipisicing tempor reprehenderit ea nostrud. Deserunt et Lorem tempor eiusmod elit do. Sunt excepteur proident occaecat consectetur ad elit. Nisi ex commodo mollit incididunt nisi voluptate voluptate. Ex ipsum exercitation qui consequat. Culpa tempor laboris minim reprehenderit Lorem in nisi. Laborum minim sit commodo exercitation ullamco voluptate eu deserunt mollit nisi laborum.\r\n", 497 | "registered": "2018-07-28T12:15:02 -08:00", 498 | "latitude": -83.672454, 499 | "longitude": -27.460246, 500 | "tags": [ 501 | "officia", 502 | "excepteur", 503 | "anim", 504 | "ad", 505 | "excepteur", 506 | "ipsum", 507 | "incididunt" 508 | ], 509 | "friends": [ 510 | { 511 | "id": 0, 512 | "name": "Frederick Lancaster" 513 | }, 514 | { 515 | "id": 1, 516 | "name": "Opal Kramer" 517 | }, 518 | { 519 | "id": 2, 520 | "name": "Gray Mcfarland" 521 | } 522 | ], 523 | "greeting": "Hello, Dyer Lucas! You have 4 unread messages.", 524 | "favoriteFruit": "banana" 525 | }, 526 | { 527 | "_id": "63c9578ab03cbf8b9f8cbca3", 528 | "index": 12, 529 | "guid": "429a929a-fe4a-457a-9ab3-6f2e5277dfaa", 530 | "isActive": false, 531 | "balance": "$2,923.50", 532 | "picture": "http://placehold.it/32x32", 533 | "age": 30, 534 | "eyeColor": "green", 535 | "name": "Erna Gates", 536 | "gender": "female", 537 | "company": "QOT", 538 | "email": "ernagates@qot.com", 539 | "phone": "+1 (898) 593-3412", 540 | "address": "768 Schenck Place, Norwood, Ohio, 1196", 541 | "about": "Dolor pariatur irure non eiusmod ut nostrud enim ad proident. Deserunt voluptate nulla dolore id duis officia ea cupidatat. Deserunt enim consequat do proident voluptate ad quis laborum nulla.\r\n", 542 | "registered": "2021-05-29T05:31:12 -08:00", 543 | "latitude": -21.708898, 544 | "longitude": 119.615038, 545 | "tags": [ 546 | "amet", 547 | "eiusmod", 548 | "sit", 549 | "id", 550 | "voluptate", 551 | "aliquip", 552 | "incididunt" 553 | ], 554 | "friends": [ 555 | { 556 | "id": 0, 557 | "name": "Selena Sosa" 558 | }, 559 | { 560 | "id": 1, 561 | "name": "Erin Hicks" 562 | }, 563 | { 564 | "id": 2, 565 | "name": "Herring Greer" 566 | } 567 | ], 568 | "greeting": "Hello, Erna Gates! You have 8 unread messages.", 569 | "favoriteFruit": "strawberry" 570 | }, 571 | { 572 | "_id": "63c9578a03fe0bf0b22ed523", 573 | "index": 13, 574 | "guid": "b070c5f1-f422-4bbc-b040-338edd8d90ea", 575 | "isActive": false, 576 | "balance": "$1,452.04", 577 | "picture": "http://placehold.it/32x32", 578 | "age": 26, 579 | "eyeColor": "green", 580 | "name": "Parsons Harmon", 581 | "gender": "male", 582 | "company": "BEDLAM", 583 | "email": "parsonsharmon@bedlam.com", 584 | "phone": "+1 (809) 491-3640", 585 | "address": "138 Howard Alley, Davenport, Virgin Islands, 5409", 586 | "about": "Consectetur officia magna officia consectetur eiusmod quis excepteur anim. Id officia et enim non. Lorem reprehenderit adipisicing sunt laboris.\r\n", 587 | "registered": "2014-12-14T01:09:36 -08:00", 588 | "latitude": -39.858511, 589 | "longitude": -11.999853, 590 | "tags": [ 591 | "esse", 592 | "eiusmod", 593 | "excepteur", 594 | "minim", 595 | "sit", 596 | "anim", 597 | "dolor" 598 | ], 599 | "friends": [ 600 | { 601 | "id": 0, 602 | "name": "Peters Watson" 603 | }, 604 | { 605 | "id": 1, 606 | "name": "Lisa Casey" 607 | }, 608 | { 609 | "id": 2, 610 | "name": "Dena Campos" 611 | } 612 | ], 613 | "greeting": "Hello, Parsons Harmon! You have 4 unread messages.", 614 | "favoriteFruit": "strawberry" 615 | }, 616 | { 617 | "_id": "63c9578a51264a817e3a7c9c", 618 | "index": 14, 619 | "guid": "50aedbbb-8c5c-4eba-8693-d0c51becb51f", 620 | "isActive": true, 621 | "balance": "$3,917.50", 622 | "picture": "http://placehold.it/32x32", 623 | "age": 39, 624 | "eyeColor": "green", 625 | "name": "Bowen Bryant", 626 | "gender": "male", 627 | "company": "NEPTIDE", 628 | "email": "bowenbryant@neptide.com", 629 | "phone": "+1 (844) 453-3051", 630 | "address": "839 Sunnyside Avenue, Onton, Utah, 9365", 631 | "about": "Aliqua labore mollit ad incididunt. Exercitation consectetur mollit exercitation consectetur ullamco. Sit est elit officia elit non sunt non velit excepteur. Excepteur ut ullamco deserunt aliqua tempor. Do nulla sit deserunt nulla consequat officia occaecat commodo.\r\n", 632 | "registered": "2014-05-24T03:58:42 -08:00", 633 | "latitude": -71.385638, 634 | "longitude": -11.961434, 635 | "tags": ["excepteur", "ut", "aliqua", "amet", "id", "anim", "officia"], 636 | "friends": [ 637 | { 638 | "id": 0, 639 | "name": "Carey Petersen" 640 | }, 641 | { 642 | "id": 1, 643 | "name": "Rose Vazquez" 644 | }, 645 | { 646 | "id": 2, 647 | "name": "Cleveland Farrell" 648 | } 649 | ], 650 | "greeting": "Hello, Bowen Bryant! You have 9 unread messages.", 651 | "favoriteFruit": "banana" 652 | }, 653 | { 654 | "_id": "63c9578ae17387e3cb7a0025", 655 | "index": 15, 656 | "guid": "c6efceb6-e273-42c7-9bb8-509e743b2781", 657 | "isActive": true, 658 | "balance": "$1,404.43", 659 | "picture": "http://placehold.it/32x32", 660 | "age": 31, 661 | "eyeColor": "green", 662 | "name": "Rhoda Schroeder", 663 | "gender": "female", 664 | "company": "LYRICHORD", 665 | "email": "rhodaschroeder@lyrichord.com", 666 | "phone": "+1 (828) 578-3599", 667 | "address": "404 Jardine Place, Martinez, Nebraska, 6994", 668 | "about": "Elit adipisicing fugiat Lorem exercitation velit. Ea officia magna fugiat minim irure aliqua dolore. Fugiat velit aute veniam eiusmod mollit irure laboris voluptate laborum elit duis magna anim laborum. Sunt adipisicing non deserunt aute sit laboris do laborum.\r\n", 669 | "registered": "2016-02-13T07:52:18 -08:00", 670 | "latitude": -75.020879, 671 | "longitude": -168.858184, 672 | "tags": ["enim", "et", "labore", "sit", "labore", "non", "quis"], 673 | "friends": [ 674 | { 675 | "id": 0, 676 | "name": "Luisa Bond" 677 | }, 678 | { 679 | "id": 1, 680 | "name": "Natalia Poole" 681 | }, 682 | { 683 | "id": 2, 684 | "name": "Aimee Kent" 685 | } 686 | ], 687 | "greeting": "Hello, Rhoda Schroeder! You have 7 unread messages.", 688 | "favoriteFruit": "strawberry" 689 | }, 690 | { 691 | "_id": "63c9578afd8ecd6db50b5298", 692 | "index": 16, 693 | "guid": "968bcaa6-c507-46bd-a264-d98ce8fd1de9", 694 | "isActive": true, 695 | "balance": "$2,450.63", 696 | "picture": "http://placehold.it/32x32", 697 | "age": 36, 698 | "eyeColor": "green", 699 | "name": "June Dickson", 700 | "gender": "female", 701 | "company": "ZBOO", 702 | "email": "junedickson@zboo.com", 703 | "phone": "+1 (909) 593-2010", 704 | "address": "399 Stryker Street, Cassel, West Virginia, 8144", 705 | "about": "Eu pariatur cillum est commodo mollit consequat veniam magna quis adipisicing fugiat pariatur deserunt. Magna laboris consequat reprehenderit ullamco consectetur sint laborum nulla aute occaecat reprehenderit. Aute amet voluptate sint non consectetur amet id occaecat sit. Cupidatat irure fugiat anim Lorem. Officia laborum dolore esse consequat consectetur officia minim enim minim Lorem enim velit. Lorem culpa exercitation do ea Lorem cillum exercitation commodo. Ea qui velit Lorem aliqua anim aliquip nulla cupidatat eiusmod dolor.\r\n", 706 | "registered": "2016-01-17T10:35:25 -08:00", 707 | "latitude": -17.758453, 708 | "longitude": 15.354588, 709 | "tags": [ 710 | "mollit", 711 | "exercitation", 712 | "dolor", 713 | "incididunt", 714 | "occaecat", 715 | "amet", 716 | "reprehenderit" 717 | ], 718 | "friends": [ 719 | { 720 | "id": 0, 721 | "name": "Hyde Foreman" 722 | }, 723 | { 724 | "id": 1, 725 | "name": "Bobbi Lowery" 726 | }, 727 | { 728 | "id": 2, 729 | "name": "Malinda Larsen" 730 | } 731 | ], 732 | "greeting": "Hello, June Dickson! You have 5 unread messages.", 733 | "favoriteFruit": "strawberry" 734 | }, 735 | { 736 | "_id": "63c9578a4bf5cbb014fa888e", 737 | "index": 17, 738 | "guid": "4235fec4-43c7-4d12-866e-c9ecfc1b1e76", 739 | "isActive": false, 740 | "balance": "$3,313.77", 741 | "picture": "http://placehold.it/32x32", 742 | "age": 35, 743 | "eyeColor": "green", 744 | "name": "Jennings Solis", 745 | "gender": "male", 746 | "company": "DECRATEX", 747 | "email": "jenningssolis@decratex.com", 748 | "phone": "+1 (927) 495-3980", 749 | "address": "776 Huntington Street, Macdona, Idaho, 4433", 750 | "about": "Pariatur voluptate ut ex occaecat voluptate dolor culpa ex nisi laboris magna. Anim magna in sit anim laborum esse non et duis laboris voluptate in. Sit irure enim enim magna velit esse non aute mollit nisi nisi. Laboris sit eiusmod enim velit occaecat Lorem aliquip anim pariatur commodo consectetur.\r\n", 751 | "registered": "2017-05-11T10:22:58 -08:00", 752 | "latitude": -53.347395, 753 | "longitude": -170.727909, 754 | "tags": [ 755 | "aute", 756 | "voluptate", 757 | "consectetur", 758 | "sint", 759 | "do", 760 | "ea", 761 | "exercitation" 762 | ], 763 | "friends": [ 764 | { 765 | "id": 0, 766 | "name": "Kari Rich" 767 | }, 768 | { 769 | "id": 1, 770 | "name": "Shannon Chen" 771 | }, 772 | { 773 | "id": 2, 774 | "name": "Taylor Bonner" 775 | } 776 | ], 777 | "greeting": "Hello, Jennings Solis! You have 5 unread messages.", 778 | "favoriteFruit": "apple" 779 | }, 780 | { 781 | "_id": "63c9578a43c335850849a804", 782 | "index": 18, 783 | "guid": "d71665f1-8f29-47a3-a605-a4941cffb04a", 784 | "isActive": false, 785 | "balance": "$1,682.67", 786 | "picture": "http://placehold.it/32x32", 787 | "age": 32, 788 | "eyeColor": "blue", 789 | "name": "Franklin Brennan", 790 | "gender": "male", 791 | "company": "ZILLACON", 792 | "email": "franklinbrennan@zillacon.com", 793 | "phone": "+1 (805) 572-3031", 794 | "address": "536 Kathleen Court, Wiscon, Vermont, 449", 795 | "about": "Elit aliqua ullamco fugiat reprehenderit sit do. Adipisicing nisi dolore irure deserunt ipsum excepteur anim laborum quis. Ipsum sit dolore cillum proident est ullamco sint dolor id sint officia ullamco reprehenderit pariatur. Lorem duis ut dolor labore voluptate. Et duis consequat elit laboris. Do cupidatat commodo ullamco et duis laboris consequat.\r\n", 796 | "registered": "2019-02-28T12:47:33 -08:00", 797 | "latitude": -16.205568, 798 | "longitude": -54.527567, 799 | "tags": [ 800 | "labore", 801 | "anim", 802 | "deserunt", 803 | "minim", 804 | "nostrud", 805 | "veniam", 806 | "elit" 807 | ], 808 | "friends": [ 809 | { 810 | "id": 0, 811 | "name": "Mcknight Bryan" 812 | }, 813 | { 814 | "id": 1, 815 | "name": "Albert Mccullough" 816 | }, 817 | { 818 | "id": 2, 819 | "name": "Molly Crane" 820 | } 821 | ], 822 | "greeting": "Hello, Franklin Brennan! You have 1 unread messages.", 823 | "favoriteFruit": "banana" 824 | }, 825 | { 826 | "_id": "63c9578a6adbb75b4c6a577f", 827 | "index": 19, 828 | "guid": "c0cd88d6-67e0-4eec-9a84-8b075fe6ef37", 829 | "isActive": true, 830 | "balance": "$1,806.14", 831 | "picture": "http://placehold.it/32x32", 832 | "age": 23, 833 | "eyeColor": "brown", 834 | "name": "Loretta Hodge", 835 | "gender": "female", 836 | "company": "REMOLD", 837 | "email": "lorettahodge@remold.com", 838 | "phone": "+1 (808) 502-2244", 839 | "address": "727 Woodbine Street, Templeton, Delaware, 4983", 840 | "about": "Mollit consequat reprehenderit est ad labore est sit ex. Quis consequat sint nulla aliqua sint officia Lorem nostrud minim culpa ullamco. Esse est irure ex do sint velit irure eu sit pariatur laborum aliquip qui. Eu est occaecat dolor aliqua ea ea reprehenderit ex consectetur exercitation ex mollit enim. Reprehenderit et laborum sint aliquip. Reprehenderit ea quis aliqua deserunt ea reprehenderit duis amet pariatur velit excepteur elit.\r\n", 841 | "registered": "2020-03-21T03:39:12 -08:00", 842 | "latitude": -46.480465, 843 | "longitude": -65.829932, 844 | "tags": ["non", "velit", "proident", "quis", "irure", "in", "commodo"], 845 | "friends": [ 846 | { 847 | "id": 0, 848 | "name": "Wood Morris" 849 | }, 850 | { 851 | "id": 1, 852 | "name": "Ratliff Shaw" 853 | }, 854 | { 855 | "id": 2, 856 | "name": "Shawna Dotson" 857 | } 858 | ], 859 | "greeting": "Hello, Loretta Hodge! You have 1 unread messages.", 860 | "favoriteFruit": "banana" 861 | }, 862 | { 863 | "_id": "63c9578ac7d03aed3e94ca1f", 864 | "index": 20, 865 | "guid": "5a79b0aa-709c-46b2-9a0e-4b8270882bb8", 866 | "isActive": true, 867 | "balance": "$3,101.11", 868 | "picture": "http://placehold.it/32x32", 869 | "age": 22, 870 | "eyeColor": "green", 871 | "name": "Mabel Harper", 872 | "gender": "female", 873 | "company": "ZENTURY", 874 | "email": "mabelharper@zentury.com", 875 | "phone": "+1 (997) 454-2797", 876 | "address": "202 Brooklyn Avenue, Lupton, Colorado, 6735", 877 | "about": "Deserunt fugiat voluptate et non exercitation. Tempor eu irure elit in quis et dolore aliqua dolore commodo quis adipisicing. Occaecat consequat pariatur excepteur eu aliqua sit excepteur voluptate. Ex velit ad pariatur nulla reprehenderit aliquip nostrud ex nulla adipisicing duis. Cillum Lorem amet sit irure anim aliqua amet elit. Ad enim voluptate ullamco anim est commodo cillum.\r\n", 878 | "registered": "2017-06-11T01:48:51 -08:00", 879 | "latitude": -12.092468, 880 | "longitude": 100.707155, 881 | "tags": ["aute", "duis", "enim", "reprehenderit", "qui", "in", "non"], 882 | "friends": [ 883 | { 884 | "id": 0, 885 | "name": "Coffey Buckley" 886 | }, 887 | { 888 | "id": 1, 889 | "name": "Estela Dodson" 890 | }, 891 | { 892 | "id": 2, 893 | "name": "Earline Knowles" 894 | } 895 | ], 896 | "greeting": "Hello, Mabel Harper! You have 6 unread messages.", 897 | "favoriteFruit": "apple" 898 | }, 899 | { 900 | "_id": "63c9578ae0e626237a07ec47", 901 | "index": 21, 902 | "guid": "e1a37d36-3287-4a3c-bd09-690caabc765a", 903 | "isActive": false, 904 | "balance": "$1,117.75", 905 | "picture": "http://placehold.it/32x32", 906 | "age": 24, 907 | "eyeColor": "green", 908 | "name": "Flossie Waller", 909 | "gender": "female", 910 | "company": "INJOY", 911 | "email": "flossiewaller@injoy.com", 912 | "phone": "+1 (832) 565-2119", 913 | "address": "574 Dunne Place, Bainbridge, Maine, 8653", 914 | "about": "Dolor cupidatat fugiat do dolor enim nisi. Fugiat deserunt esse id deserunt dolor consequat voluptate deserunt dolor ad velit. Id qui veniam dolor aliquip. Ad est Lorem eu ipsum Lorem duis exercitation id qui est ea dolore tempor consectetur. Ut voluptate irure cupidatat deserunt. Non id elit non ipsum consequat sunt irure consequat.\r\n", 915 | "registered": "2019-12-29T04:04:48 -08:00", 916 | "latitude": -9.107839, 917 | "longitude": 146.051227, 918 | "tags": [ 919 | "aute", 920 | "dolor", 921 | "adipisicing", 922 | "nisi", 923 | "do", 924 | "enim", 925 | "tempor" 926 | ], 927 | "friends": [ 928 | { 929 | "id": 0, 930 | "name": "Montoya Sellers" 931 | }, 932 | { 933 | "id": 1, 934 | "name": "Erica Huber" 935 | }, 936 | { 937 | "id": 2, 938 | "name": "Carson Robinson" 939 | } 940 | ], 941 | "greeting": "Hello, Flossie Waller! You have 10 unread messages.", 942 | "favoriteFruit": "banana" 943 | }, 944 | { 945 | "_id": "63c9578aac465c2ec235700d", 946 | "index": 22, 947 | "guid": "7a48d076-b3f5-437e-bd4f-22c5bc74cf42", 948 | "isActive": true, 949 | "balance": "$2,606.33", 950 | "picture": "http://placehold.it/32x32", 951 | "age": 23, 952 | "eyeColor": "blue", 953 | "name": "Jenifer Parks", 954 | "gender": "female", 955 | "company": "PREMIANT", 956 | "email": "jeniferparks@premiant.com", 957 | "phone": "+1 (874) 548-3007", 958 | "address": "502 Court Square, Gratton, South Carolina, 6343", 959 | "about": "Occaecat non ipsum reprehenderit occaecat id dolore laboris exercitation reprehenderit in do. Cillum veniam non id sunt eu ad sit duis non. Excepteur ut anim tempor sint amet labore amet sit esse sunt ad ut. Id esse pariatur consectetur labore irure mollit elit dolor aliqua id. Cillum ipsum excepteur nulla magna irure. Duis eu mollit sunt ex ullamco.\r\n", 960 | "registered": "2014-11-04T05:37:38 -08:00", 961 | "latitude": -39.803216, 962 | "longitude": -60.696759, 963 | "tags": ["nisi", "proident", "ut", "magna", "esse", "id", "pariatur"], 964 | "friends": [ 965 | { 966 | "id": 0, 967 | "name": "Carmela Sexton" 968 | }, 969 | { 970 | "id": 1, 971 | "name": "Janice Everett" 972 | }, 973 | { 974 | "id": 2, 975 | "name": "Boyer Reynolds" 976 | } 977 | ], 978 | "greeting": "Hello, Jenifer Parks! You have 1 unread messages.", 979 | "favoriteFruit": "banana" 980 | }, 981 | { 982 | "_id": "63c9578a8c77561e5250d1b8", 983 | "index": 23, 984 | "guid": "72848ee9-9e9d-44ed-be71-22e52941ba0c", 985 | "isActive": false, 986 | "balance": "$2,823.21", 987 | "picture": "http://placehold.it/32x32", 988 | "age": 39, 989 | "eyeColor": "brown", 990 | "name": "Rosalinda Fischer", 991 | "gender": "female", 992 | "company": "PHEAST", 993 | "email": "rosalindafischer@pheast.com", 994 | "phone": "+1 (854) 571-2786", 995 | "address": "101 Logan Street, Nettie, Washington, 8733", 996 | "about": "Enim ea aliqua consectetur velit deserunt aliquip aliquip nostrud. Deserunt voluptate eiusmod elit aute cupidatat velit dolor incididunt incididunt aliqua ullamco id ex. Laborum non reprehenderit sit tempor anim. Quis nulla qui do esse magna fugiat qui magna velit veniam. Eu consectetur elit nisi esse irure duis aute occaecat ex sit officia labore nulla.\r\n", 997 | "registered": "2020-11-24T09:43:01 -08:00", 998 | "latitude": -62.638393, 999 | "longitude": 117.764048, 1000 | "tags": [ 1001 | "dolor", 1002 | "eiusmod", 1003 | "eiusmod", 1004 | "ea", 1005 | "adipisicing", 1006 | "fugiat", 1007 | "fugiat" 1008 | ], 1009 | "friends": [ 1010 | { 1011 | "id": 0, 1012 | "name": "Love Frost" 1013 | }, 1014 | { 1015 | "id": 1, 1016 | "name": "Rojas Ward" 1017 | }, 1018 | { 1019 | "id": 2, 1020 | "name": "Ochoa Hopkins" 1021 | } 1022 | ], 1023 | "greeting": "Hello, Rosalinda Fischer! You have 6 unread messages.", 1024 | "favoriteFruit": "apple" 1025 | }, 1026 | { 1027 | "_id": "63c9578af8ffc65c4408e69c", 1028 | "index": 24, 1029 | "guid": "7f12390d-4c26-4614-9535-bc850aa48d24", 1030 | "isActive": true, 1031 | "balance": "$2,834.36", 1032 | "picture": "http://placehold.it/32x32", 1033 | "age": 40, 1034 | "eyeColor": "blue", 1035 | "name": "Joanna Woods", 1036 | "gender": "female", 1037 | "company": "MAXIMIND", 1038 | "email": "joannawoods@maximind.com", 1039 | "phone": "+1 (882) 518-2877", 1040 | "address": "995 Bushwick Avenue, Harold, Guam, 2658", 1041 | "about": "Occaecat cillum laborum adipisicing anim mollit nostrud nulla aliqua exercitation elit veniam cillum eiusmod. Aliquip dolore in Lorem et ea occaecat fugiat sint in. Laboris qui do ullamco sint enim consectetur ad dolor excepteur fugiat. Elit pariatur Lorem ullamco dolor sint laboris esse esse ex deserunt magna nisi fugiat non. Labore mollit ex occaecat sint ex pariatur exercitation cillum aliqua. Adipisicing deserunt anim officia est consectetur fugiat duis sint ullamco commodo nostrud reprehenderit adipisicing.\r\n", 1042 | "registered": "2016-08-05T12:13:58 -08:00", 1043 | "latitude": 32.580919, 1044 | "longitude": 166.491111, 1045 | "tags": [ 1046 | "pariatur", 1047 | "ea", 1048 | "laborum", 1049 | "adipisicing", 1050 | "est", 1051 | "mollit", 1052 | "ea" 1053 | ], 1054 | "friends": [ 1055 | { 1056 | "id": 0, 1057 | "name": "Whitehead Stuart" 1058 | }, 1059 | { 1060 | "id": 1, 1061 | "name": "Hayden Holder" 1062 | }, 1063 | { 1064 | "id": 2, 1065 | "name": "Sheri Flores" 1066 | } 1067 | ], 1068 | "greeting": "Hello, Joanna Woods! You have 5 unread messages.", 1069 | "favoriteFruit": "strawberry" 1070 | }, 1071 | { 1072 | "_id": "63c9578af66699eb05c3e750", 1073 | "index": 25, 1074 | "guid": "2278abb2-063a-4c46-9572-d134adf7ad81", 1075 | "isActive": false, 1076 | "balance": "$3,133.38", 1077 | "picture": "http://placehold.it/32x32", 1078 | "age": 31, 1079 | "eyeColor": "blue", 1080 | "name": "Tanner Price", 1081 | "gender": "male", 1082 | "company": "STEELFAB", 1083 | "email": "tannerprice@steelfab.com", 1084 | "phone": "+1 (998) 487-2116", 1085 | "address": "982 Oak Street, Caln, Connecticut, 6371", 1086 | "about": "Dolor incididunt consectetur dolore cupidatat adipisicing nulla excepteur ut irure ipsum aliquip id eiusmod sunt. Nisi ut deserunt aliquip occaecat id esse reprehenderit qui. Officia fugiat enim aliqua id exercitation pariatur Lorem ex amet occaecat deserunt eiusmod tempor veniam. Quis cupidatat nisi excepteur nulla id. Culpa nostrud culpa velit cupidatat do enim ipsum eu. Commodo aliquip adipisicing pariatur cillum. Anim voluptate do culpa ipsum nulla nisi labore proident.\r\n", 1087 | "registered": "2014-07-15T09:58:33 -08:00", 1088 | "latitude": 62.267142, 1089 | "longitude": -58.464797, 1090 | "tags": [ 1091 | "anim", 1092 | "est", 1093 | "quis", 1094 | "veniam", 1095 | "consectetur", 1096 | "deserunt", 1097 | "aute" 1098 | ], 1099 | "friends": [ 1100 | { 1101 | "id": 0, 1102 | "name": "Darlene Scott" 1103 | }, 1104 | { 1105 | "id": 1, 1106 | "name": "Richards Cummings" 1107 | }, 1108 | { 1109 | "id": 2, 1110 | "name": "Lauri Clements" 1111 | } 1112 | ], 1113 | "greeting": "Hello, Tanner Price! You have 2 unread messages.", 1114 | "favoriteFruit": "apple" 1115 | }, 1116 | { 1117 | "_id": "63c9578adba0ca50f66c8ed0", 1118 | "index": 26, 1119 | "guid": "50cfc2f4-3c28-4047-9392-d69a4b2a190a", 1120 | "isActive": true, 1121 | "balance": "$3,341.45", 1122 | "picture": "http://placehold.it/32x32", 1123 | "age": 35, 1124 | "eyeColor": "blue", 1125 | "name": "Tyson Dennis", 1126 | "gender": "male", 1127 | "company": "BRAINCLIP", 1128 | "email": "tysondennis@brainclip.com", 1129 | "phone": "+1 (907) 491-3143", 1130 | "address": "187 Forest Place, Gloucester, Oregon, 4062", 1131 | "about": "Ullamco nisi aute et enim labore incididunt. Esse excepteur aliqua deserunt ad velit mollit nostrud ad. Elit in ipsum do officia laboris ea cillum enim.\r\n", 1132 | "registered": "2017-02-08T02:57:12 -08:00", 1133 | "latitude": -79.201319, 1134 | "longitude": -39.120717, 1135 | "tags": [ 1136 | "occaecat", 1137 | "labore", 1138 | "labore", 1139 | "excepteur", 1140 | "laboris", 1141 | "voluptate", 1142 | "enim" 1143 | ], 1144 | "friends": [ 1145 | { 1146 | "id": 0, 1147 | "name": "Hawkins Aguirre" 1148 | }, 1149 | { 1150 | "id": 1, 1151 | "name": "House Prince" 1152 | }, 1153 | { 1154 | "id": 2, 1155 | "name": "Noreen Fields" 1156 | } 1157 | ], 1158 | "greeting": "Hello, Tyson Dennis! You have 7 unread messages.", 1159 | "favoriteFruit": "strawberry" 1160 | }, 1161 | { 1162 | "_id": "63c9578af92ada554bb560da", 1163 | "index": 27, 1164 | "guid": "4c6532c8-d83c-4208-b210-d84667c064f7", 1165 | "isActive": false, 1166 | "balance": "$2,229.05", 1167 | "picture": "http://placehold.it/32x32", 1168 | "age": 20, 1169 | "eyeColor": "blue", 1170 | "name": "Alicia Reese", 1171 | "gender": "female", 1172 | "company": "ZOINAGE", 1173 | "email": "aliciareese@zoinage.com", 1174 | "phone": "+1 (988) 435-2337", 1175 | "address": "768 Madison Street, Islandia, Virginia, 1490", 1176 | "about": "Sunt ea eu ut est. Deserunt Lorem quis in ut consectetur Lorem voluptate dolore esse ullamco. Enim adipisicing mollit ad cupidatat tempor eiusmod esse.\r\n", 1177 | "registered": "2019-11-29T09:26:08 -08:00", 1178 | "latitude": -46.632506, 1179 | "longitude": -61.376247, 1180 | "tags": ["culpa", "enim", "veniam", "id", "sint", "sit", "deserunt"], 1181 | "friends": [ 1182 | { 1183 | "id": 0, 1184 | "name": "Collins Zamora" 1185 | }, 1186 | { 1187 | "id": 1, 1188 | "name": "Reynolds Hale" 1189 | }, 1190 | { 1191 | "id": 2, 1192 | "name": "Angelia Whitley" 1193 | } 1194 | ], 1195 | "greeting": "Hello, Alicia Reese! You have 4 unread messages.", 1196 | "favoriteFruit": "strawberry" 1197 | }, 1198 | { 1199 | "_id": "63c9578af24b967ade207cbc", 1200 | "index": 28, 1201 | "guid": "c3a7c11c-d8a6-4a8a-bb77-18c00f04a35c", 1202 | "isActive": true, 1203 | "balance": "$3,339.25", 1204 | "picture": "http://placehold.it/32x32", 1205 | "age": 34, 1206 | "eyeColor": "green", 1207 | "name": "Juarez Calderon", 1208 | "gender": "male", 1209 | "company": "GORGANIC", 1210 | "email": "juarezcalderon@gorganic.com", 1211 | "phone": "+1 (809) 428-2897", 1212 | "address": "633 Nolans Lane, Weogufka, North Dakota, 7000", 1213 | "about": "Nostrud adipisicing dolor deserunt duis exercitation sunt amet laborum. Reprehenderit velit magna cupidatat cupidatat laborum Lorem id labore. Qui magna nulla cupidatat dolor aliquip.\r\n", 1214 | "registered": "2015-12-28T08:08:40 -08:00", 1215 | "latitude": 2.497795, 1216 | "longitude": 135.924155, 1217 | "tags": ["dolore", "amet", "enim", "ex", "do", "irure", "in"], 1218 | "friends": [ 1219 | { 1220 | "id": 0, 1221 | "name": "Summer Gould" 1222 | }, 1223 | { 1224 | "id": 1, 1225 | "name": "Joyner Stokes" 1226 | }, 1227 | { 1228 | "id": 2, 1229 | "name": "Delores Powell" 1230 | } 1231 | ], 1232 | "greeting": "Hello, Juarez Calderon! You have 6 unread messages.", 1233 | "favoriteFruit": "strawberry" 1234 | }, 1235 | { 1236 | "_id": "63c9578a658dbe085d1aa24e", 1237 | "index": 29, 1238 | "guid": "2e291d80-2fde-4a26-a15f-430162af2431", 1239 | "isActive": true, 1240 | "balance": "$2,298.49", 1241 | "picture": "http://placehold.it/32x32", 1242 | "age": 28, 1243 | "eyeColor": "blue", 1244 | "name": "Lucinda Henry", 1245 | "gender": "female", 1246 | "company": "UNCORP", 1247 | "email": "lucindahenry@uncorp.com", 1248 | "phone": "+1 (809) 413-3598", 1249 | "address": "326 Norwood Avenue, Eagleville, Mississippi, 2047", 1250 | "about": "Culpa commodo anim irure quis nostrud officia sit ex eiusmod cillum dolore. Est velit nisi deserunt ipsum cupidatat. Et id deserunt amet sunt proident sunt deserunt. Occaecat veniam cillum cillum non velit duis non. Anim Lorem elit occaecat occaecat Lorem Lorem aute duis. Excepteur labore sunt quis deserunt proident mollit velit aliquip veniam tempor.\r\n", 1251 | "registered": "2020-06-06T04:39:49 -08:00", 1252 | "latitude": 60.554464, 1253 | "longitude": 40.752453, 1254 | "tags": ["et", "nostrud", "et", "velit", "duis", "aute", "culpa"], 1255 | "friends": [ 1256 | { 1257 | "id": 0, 1258 | "name": "Concetta Rush" 1259 | }, 1260 | { 1261 | "id": 1, 1262 | "name": "Candace Baxter" 1263 | }, 1264 | { 1265 | "id": 2, 1266 | "name": "Le Fitzpatrick" 1267 | } 1268 | ], 1269 | "greeting": "Hello, Lucinda Henry! You have 4 unread messages.", 1270 | "favoriteFruit": "banana" 1271 | }, 1272 | { 1273 | "_id": "63c9578a40a2d30313d90cd3", 1274 | "index": 30, 1275 | "guid": "0642c6de-b94b-40e9-a494-c7e4d9ea2645", 1276 | "isActive": true, 1277 | "balance": "$1,865.45", 1278 | "picture": "http://placehold.it/32x32", 1279 | "age": 40, 1280 | "eyeColor": "brown", 1281 | "name": "Frazier Dixon", 1282 | "gender": "male", 1283 | "company": "SENTIA", 1284 | "email": "frazierdixon@sentia.com", 1285 | "phone": "+1 (830) 511-2481", 1286 | "address": "215 Greenpoint Avenue, Darrtown, Tennessee, 4553", 1287 | "about": "Anim quis ut minim id nulla est laboris ipsum. Cillum elit nostrud exercitation exercitation. Aliqua duis anim culpa dolor proident dolor enim cupidatat Lorem eu. Amet dolore in eiusmod ad quis occaecat tempor ad laborum. Consequat ex aute laborum aute exercitation proident ea fugiat pariatur ut magna anim culpa anim. Sit occaecat elit sint ea nostrud. Non sunt velit nisi ullamco dolor excepteur adipisicing.\r\n", 1288 | "registered": "2020-05-03T07:27:41 -08:00", 1289 | "latitude": -25.786738, 1290 | "longitude": -98.082427, 1291 | "tags": ["eu", "aute", "velit", "reprehenderit", "ex", "eu", "est"], 1292 | "friends": [ 1293 | { 1294 | "id": 0, 1295 | "name": "Hurley Ratliff" 1296 | }, 1297 | { 1298 | "id": 1, 1299 | "name": "Pearson Best" 1300 | }, 1301 | { 1302 | "id": 2, 1303 | "name": "Evelyn Massey" 1304 | } 1305 | ], 1306 | "greeting": "Hello, Frazier Dixon! You have 7 unread messages.", 1307 | "favoriteFruit": "banana" 1308 | }, 1309 | { 1310 | "_id": "63c9578a1156814c8f094d10", 1311 | "index": 31, 1312 | "guid": "25f8de49-e35e-4beb-a612-bfeef3e607b4", 1313 | "isActive": false, 1314 | "balance": "$3,877.75", 1315 | "picture": "http://placehold.it/32x32", 1316 | "age": 24, 1317 | "eyeColor": "brown", 1318 | "name": "Freda Bowen", 1319 | "gender": "female", 1320 | "company": "AUTOGRATE", 1321 | "email": "fredabowen@autograte.com", 1322 | "phone": "+1 (833) 497-2914", 1323 | "address": "312 Box Street, Fulford, Kansas, 1193", 1324 | "about": "Deserunt eiusmod culpa eiusmod esse mollit. Sit id deserunt ut minim sint laboris culpa sit consectetur ullamco veniam aliquip sint. Minim cillum veniam est sit magna enim.\r\n", 1325 | "registered": "2020-06-08T09:28:42 -08:00", 1326 | "latitude": 31.937198, 1327 | "longitude": 7.328336, 1328 | "tags": [ 1329 | "veniam", 1330 | "qui", 1331 | "reprehenderit", 1332 | "eiusmod", 1333 | "nulla", 1334 | "minim", 1335 | "non" 1336 | ], 1337 | "friends": [ 1338 | { 1339 | "id": 0, 1340 | "name": "Newman Wilder" 1341 | }, 1342 | { 1343 | "id": 1, 1344 | "name": "Marshall Simpson" 1345 | }, 1346 | { 1347 | "id": 2, 1348 | "name": "Walls Rasmussen" 1349 | } 1350 | ], 1351 | "greeting": "Hello, Freda Bowen! You have 3 unread messages.", 1352 | "favoriteFruit": "apple" 1353 | }, 1354 | { 1355 | "_id": "63c9578a74d31cfd180fdebe", 1356 | "index": 32, 1357 | "guid": "8626fe1c-3beb-4e88-8df7-0273a437777c", 1358 | "isActive": false, 1359 | "balance": "$1,014.16", 1360 | "picture": "http://placehold.it/32x32", 1361 | "age": 38, 1362 | "eyeColor": "green", 1363 | "name": "Tessa Branch", 1364 | "gender": "female", 1365 | "company": "IRACK", 1366 | "email": "tessabranch@irack.com", 1367 | "phone": "+1 (811) 477-3879", 1368 | "address": "706 Paerdegat Avenue, Inkerman, Rhode Island, 8475", 1369 | "about": "In fugiat laborum sunt mollit reprehenderit mollit minim fugiat fugiat ipsum. Officia labore occaecat excepteur amet nisi labore aute culpa labore magna tempor eiusmod veniam consequat. Aliqua sit ex esse est eu non consectetur veniam mollit. Ullamco ad sit incididunt occaecat exercitation qui veniam ullamco occaecat et fugiat commodo ipsum.\r\n", 1370 | "registered": "2018-02-15T04:26:28 -08:00", 1371 | "latitude": -42.075111, 1372 | "longitude": -138.436955, 1373 | "tags": [ 1374 | "laboris", 1375 | "occaecat", 1376 | "id", 1377 | "dolor", 1378 | "in", 1379 | "sunt", 1380 | "cupidatat" 1381 | ], 1382 | "friends": [ 1383 | { 1384 | "id": 0, 1385 | "name": "Pace Christian" 1386 | }, 1387 | { 1388 | "id": 1, 1389 | "name": "Sparks Beard" 1390 | }, 1391 | { 1392 | "id": 2, 1393 | "name": "Mitzi Torres" 1394 | } 1395 | ], 1396 | "greeting": "Hello, Tessa Branch! You have 6 unread messages.", 1397 | "favoriteFruit": "apple" 1398 | }, 1399 | { 1400 | "_id": "63c9578a112491258b87e28d", 1401 | "index": 33, 1402 | "guid": "bcc9132e-543c-41b4-8537-8d1e4a067925", 1403 | "isActive": false, 1404 | "balance": "$1,573.64", 1405 | "picture": "http://placehold.it/32x32", 1406 | "age": 21, 1407 | "eyeColor": "brown", 1408 | "name": "Ivy Workman", 1409 | "gender": "female", 1410 | "company": "COGENTRY", 1411 | "email": "ivyworkman@cogentry.com", 1412 | "phone": "+1 (842) 552-3027", 1413 | "address": "962 McKibbin Street, Bancroft, Texas, 7872", 1414 | "about": "Culpa laborum nulla minim in ullamco eu reprehenderit id non ut. Velit elit mollit minim occaecat consequat culpa sint sint nisi cillum velit minim sunt. Do eiusmod elit esse id ullamco voluptate elit commodo excepteur aliqua irure id laborum deserunt. Quis eiusmod nulla incididunt deserunt adipisicing proident exercitation velit culpa dolor excepteur laborum. Dolore velit enim velit consectetur officia culpa ipsum reprehenderit. Id excepteur reprehenderit cillum duis.\r\n", 1415 | "registered": "2019-12-06T11:42:05 -08:00", 1416 | "latitude": 50.656861, 1417 | "longitude": 94.043607, 1418 | "tags": ["do", "nostrud", "in", "id", "qui", "voluptate", "aute"], 1419 | "friends": [ 1420 | { 1421 | "id": 0, 1422 | "name": "Kara Tyler" 1423 | }, 1424 | { 1425 | "id": 1, 1426 | "name": "Bettie Riley" 1427 | }, 1428 | { 1429 | "id": 2, 1430 | "name": "Kathryn Conner" 1431 | } 1432 | ], 1433 | "greeting": "Hello, Ivy Workman! You have 7 unread messages.", 1434 | "favoriteFruit": "strawberry" 1435 | }, 1436 | { 1437 | "_id": "63c9578adc16e1f8fe8291b0", 1438 | "index": 34, 1439 | "guid": "d3c737d3-ef2e-475d-9093-9f4eeffa771b", 1440 | "isActive": true, 1441 | "balance": "$2,793.77", 1442 | "picture": "http://placehold.it/32x32", 1443 | "age": 36, 1444 | "eyeColor": "brown", 1445 | "name": "Blair York", 1446 | "gender": "male", 1447 | "company": "GEEKMOSIS", 1448 | "email": "blairyork@geekmosis.com", 1449 | "phone": "+1 (950) 401-2988", 1450 | "address": "108 Knapp Street, Nelson, Alabama, 6875", 1451 | "about": "Fugiat commodo elit minim dolore in sit. Dolore nulla minim labore proident. Ut dolore proident aliqua aute qui id duis exercitation eu.\r\n", 1452 | "registered": "2016-11-03T05:59:05 -08:00", 1453 | "latitude": 7.528868, 1454 | "longitude": -100.501605, 1455 | "tags": [ 1456 | "ipsum", 1457 | "cillum", 1458 | "consequat", 1459 | "sunt", 1460 | "sunt", 1461 | "fugiat", 1462 | "consequat" 1463 | ], 1464 | "friends": [ 1465 | { 1466 | "id": 0, 1467 | "name": "Deann Bowman" 1468 | }, 1469 | { 1470 | "id": 1, 1471 | "name": "Sheryl Tate" 1472 | }, 1473 | { 1474 | "id": 2, 1475 | "name": "Barry Santiago" 1476 | } 1477 | ], 1478 | "greeting": "Hello, Blair York! You have 7 unread messages.", 1479 | "favoriteFruit": "apple" 1480 | }, 1481 | { 1482 | "_id": "63c9578a5e290362599cc8cd", 1483 | "index": 35, 1484 | "guid": "4a6a0e99-83e4-436b-a5f7-d98f97009202", 1485 | "isActive": false, 1486 | "balance": "$1,684.42", 1487 | "picture": "http://placehold.it/32x32", 1488 | "age": 23, 1489 | "eyeColor": "blue", 1490 | "name": "Ronda Velez", 1491 | "gender": "female", 1492 | "company": "DANJA", 1493 | "email": "rondavelez@danja.com", 1494 | "phone": "+1 (988) 452-3576", 1495 | "address": "895 Veranda Place, Cedarville, North Carolina, 5887", 1496 | "about": "Eiusmod nostrud voluptate nisi mollit. Tempor sunt magna aute ut ex tempor laboris fugiat enim laborum dolore sunt. Est incididunt ullamco enim amet dolore. Enim commodo proident sit culpa. Qui mollit deserunt cillum consequat irure veniam proident irure sit.\r\n", 1497 | "registered": "2014-11-25T01:59:11 -08:00", 1498 | "latitude": -65.597416, 1499 | "longitude": 104.316127, 1500 | "tags": [ 1501 | "reprehenderit", 1502 | "sunt", 1503 | "esse", 1504 | "eu", 1505 | "proident", 1506 | "et", 1507 | "cupidatat" 1508 | ], 1509 | "friends": [ 1510 | { 1511 | "id": 0, 1512 | "name": "Deanne Simon" 1513 | }, 1514 | { 1515 | "id": 1, 1516 | "name": "Alfreda Lindsey" 1517 | }, 1518 | { 1519 | "id": 2, 1520 | "name": "Drake Johnson" 1521 | } 1522 | ], 1523 | "greeting": "Hello, Ronda Velez! You have 4 unread messages.", 1524 | "favoriteFruit": "banana" 1525 | } 1526 | ] 1527 | -------------------------------------------------------------------------------- /benchmarks/medium.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "_id": "63c9575f4598cd9426cca500", 4 | "index": 0, 5 | "guid": "3be39339-19c8-4403-bcca-c85582ed9539", 6 | "isActive": true, 7 | "balance": "$2,231.91", 8 | "picture": "http://placehold.it/32x32", 9 | "age": 38, 10 | "eyeColor": "blue", 11 | "name": "Sparks Wallace", 12 | "gender": "male", 13 | "company": "EXOBLUE", 14 | "email": "sparkswallace@exoblue.com", 15 | "phone": "+1 (808) 535-2717", 16 | "address": "967 Vermont Court, Ernstville, Arizona, 8455", 17 | "about": "Eiusmod quis nisi ea in anim id aute duis reprehenderit nulla id reprehenderit proident occaecat. In qui occaecat et officia. Voluptate nisi in elit adipisicing aliquip non aute consectetur. Deserunt ad ullamco ad voluptate cupidatat sint ea culpa exercitation nulla sit in anim deserunt. Anim cupidatat consequat duis id cupidatat commodo.\r\n", 18 | "registered": "2019-07-27T08:34:51 -08:00", 19 | "latitude": -40.53408, 20 | "longitude": -151.51883, 21 | "tags": ["enim", "commodo", "et", "minim", "tempor", "amet", "ut"], 22 | "friends": [ 23 | { 24 | "id": 0, 25 | "name": "Carolina Crane" 26 | }, 27 | { 28 | "id": 1, 29 | "name": "Aguilar Malone" 30 | }, 31 | { 32 | "id": 2, 33 | "name": "Terri English" 34 | } 35 | ], 36 | "greeting": "Hello, Sparks Wallace! You have 1 unread messages.", 37 | "favoriteFruit": "strawberry" 38 | }, 39 | { 40 | "_id": "63c9575fbffd102eafae0482", 41 | "index": 1, 42 | "guid": "fc6cd59a-4603-4074-9a21-1741303ee2de", 43 | "isActive": true, 44 | "balance": "$3,053.02", 45 | "picture": "http://placehold.it/32x32", 46 | "age": 22, 47 | "eyeColor": "blue", 48 | "name": "Bright Walker", 49 | "gender": "male", 50 | "company": "VERTIDE", 51 | "email": "brightwalker@vertide.com", 52 | "phone": "+1 (967) 520-3235", 53 | "address": "909 Lefferts Place, Martell, Northern Mariana Islands, 4028", 54 | "about": "Reprehenderit aute mollit dolore proident cupidatat officia consectetur laborum deserunt sunt anim duis nisi duis. Consectetur nostrud consequat duis sunt Lorem nisi nostrud mollit incididunt. Sunt laborum ullamco culpa commodo exercitation cupidatat sint ex cillum adipisicing laborum. Ea reprehenderit reprehenderit exercitation id in in ad non ea commodo occaecat laborum et quis. Veniam mollit id tempor cillum laborum Lorem duis laboris cillum esse. Tempor deserunt pariatur minim dolore anim labore ea aliqua do minim.\r\n", 55 | "registered": "2020-06-06T01:57:27 -08:00", 56 | "latitude": -17.372144, 57 | "longitude": -133.050188, 58 | "tags": ["aliqua", "laboris", "dolor", "culpa", "irure", "Lorem", "ut"], 59 | "friends": [ 60 | { 61 | "id": 0, 62 | "name": "Latasha Ingram" 63 | }, 64 | { 65 | "id": 1, 66 | "name": "Maura Barrett" 67 | }, 68 | { 69 | "id": 2, 70 | "name": "Fitzgerald Paul" 71 | } 72 | ], 73 | "greeting": "Hello, Bright Walker! You have 10 unread messages.", 74 | "favoriteFruit": "apple" 75 | }, 76 | { 77 | "_id": "63c9575fe709b2f9cc83bafb", 78 | "index": 2, 79 | "guid": "2960f2f0-3385-4c86-b923-cc4dafa4eabc", 80 | "isActive": false, 81 | "balance": "$2,612.63", 82 | "picture": "http://placehold.it/32x32", 83 | "age": 27, 84 | "eyeColor": "green", 85 | "name": "Vickie Kirk", 86 | "gender": "female", 87 | "company": "TERRASYS", 88 | "email": "vickiekirk@terrasys.com", 89 | "phone": "+1 (855) 510-3805", 90 | "address": "226 Clifton Place, Umapine, West Virginia, 9024", 91 | "about": "Cillum id sunt duis duis. Aliqua sunt reprehenderit cillum elit do ex anim labore aliqua dolore. Est commodo sint esse dolore. Ullamco nostrud consequat Lorem amet ex aliqua eu aliqua. Dolor nulla pariatur voluptate laborum ipsum labore nostrud. Deserunt velit duis cupidatat Lorem voluptate dolor cupidatat elit ad laborum. Dolor cillum consectetur ipsum ea.\r\n", 92 | "registered": "2016-03-16T08:02:52 -08:00", 93 | "latitude": 4.663802, 94 | "longitude": -109.641215, 95 | "tags": [ 96 | "eiusmod", 97 | "qui", 98 | "consectetur", 99 | "esse", 100 | "duis", 101 | "pariatur", 102 | "occaecat" 103 | ], 104 | "friends": [ 105 | { 106 | "id": 0, 107 | "name": "Roberta Holmes" 108 | }, 109 | { 110 | "id": 1, 111 | "name": "Wade Carson" 112 | }, 113 | { 114 | "id": 2, 115 | "name": "Angela Elliott" 116 | } 117 | ], 118 | "greeting": "Hello, Vickie Kirk! You have 7 unread messages.", 119 | "favoriteFruit": "apple" 120 | }, 121 | { 122 | "_id": "63c9575f751d888ced174947", 123 | "index": 3, 124 | "guid": "9a54406a-5984-4b8f-b099-793a9f6eb058", 125 | "isActive": false, 126 | "balance": "$2,237.38", 127 | "picture": "http://placehold.it/32x32", 128 | "age": 36, 129 | "eyeColor": "blue", 130 | "name": "Gwendolyn Santana", 131 | "gender": "female", 132 | "company": "CORIANDER", 133 | "email": "gwendolynsantana@coriander.com", 134 | "phone": "+1 (964) 500-3511", 135 | "address": "935 Colonial Road, Waikele, Maryland, 915", 136 | "about": "Anim irure ullamco id sit commodo elit consequat fugiat ea et consequat sit elit. Non est excepteur elit mollit dolor minim id nisi. Veniam in mollit est cupidatat ut nisi. Enim tempor quis esse ad qui elit in labore fugiat. Cupidatat officia duis sunt nulla nulla irure occaecat incididunt Lorem ipsum anim aute fugiat. Incididunt veniam ullamco ad enim laboris veniam fugiat.\r\n", 137 | "registered": "2016-07-31T07:24:07 -08:00", 138 | "latitude": 0.394964, 139 | "longitude": 15.55369, 140 | "tags": [ 141 | "Lorem", 142 | "laboris", 143 | "culpa", 144 | "aute", 145 | "amet", 146 | "occaecat", 147 | "est" 148 | ], 149 | "friends": [ 150 | { 151 | "id": 0, 152 | "name": "Lacey Baird" 153 | }, 154 | { 155 | "id": 1, 156 | "name": "Jodie Buck" 157 | }, 158 | { 159 | "id": 2, 160 | "name": "Marcia Dale" 161 | } 162 | ], 163 | "greeting": "Hello, Gwendolyn Santana! You have 9 unread messages.", 164 | "favoriteFruit": "strawberry" 165 | }, 166 | { 167 | "_id": "63c9575f35c6b1e5c05fa196", 168 | "index": 4, 169 | "guid": "1d99be6f-9b19-462d-9620-f4d4d4d1395f", 170 | "isActive": true, 171 | "balance": "$1,540.12", 172 | "picture": "http://placehold.it/32x32", 173 | "age": 29, 174 | "eyeColor": "blue", 175 | "name": "Myrtle Graves", 176 | "gender": "female", 177 | "company": "KONNECT", 178 | "email": "myrtlegraves@konnect.com", 179 | "phone": "+1 (879) 519-3135", 180 | "address": "214 Aviation Road, Cartwright, Oklahoma, 6375", 181 | "about": "Consequat excepteur cillum aute mollit eiusmod nisi sint magna tempor. Officia excepteur ad adipisicing commodo sunt sit exercitation nostrud consequat enim esse consequat ad id. Et consectetur nisi sint fugiat ea esse ex dolor aute consectetur Lorem enim esse. Enim in laborum voluptate ut anim quis ex. Tempor eiusmod excepteur sunt ullamco ut irure. Irure commodo in magna ipsum aliqua anim duis. Cupidatat id cillum fugiat est non ullamco proident excepteur fugiat.\r\n", 182 | "registered": "2016-07-19T03:54:30 -08:00", 183 | "latitude": 25.370126, 184 | "longitude": -108.945028, 185 | "tags": [ 186 | "proident", 187 | "sint", 188 | "elit", 189 | "dolor", 190 | "tempor", 191 | "voluptate", 192 | "sint" 193 | ], 194 | "friends": [ 195 | { 196 | "id": 0, 197 | "name": "Jeanine Ramirez" 198 | }, 199 | { 200 | "id": 1, 201 | "name": "Hudson Larsen" 202 | }, 203 | { 204 | "id": 2, 205 | "name": "Ray Sosa" 206 | } 207 | ], 208 | "greeting": "Hello, Myrtle Graves! You have 2 unread messages.", 209 | "favoriteFruit": "banana" 210 | }, 211 | { 212 | "_id": "63c9575f4e9fd02e6d786c98", 213 | "index": 5, 214 | "guid": "1e8da65d-9cc3-4c54-b12f-3f07b3d75405", 215 | "isActive": false, 216 | "balance": "$3,043.61", 217 | "picture": "http://placehold.it/32x32", 218 | "age": 38, 219 | "eyeColor": "green", 220 | "name": "Ava Coleman", 221 | "gender": "female", 222 | "company": "SPHERIX", 223 | "email": "avacoleman@spherix.com", 224 | "phone": "+1 (870) 443-2790", 225 | "address": "552 Montgomery Street, Chemung, Colorado, 4277", 226 | "about": "Deserunt deserunt culpa commodo velit. Sunt cupidatat culpa pariatur anim elit ea consequat sint officia nostrud nulla ea excepteur reprehenderit. Ad officia quis deserunt commodo pariatur commodo anim deserunt tempor ut voluptate. Pariatur officia sint mollit pariatur. Ipsum proident laboris aliquip mollit quis laborum occaecat. Nulla tempor sit magna dolore ullamco laborum officia culpa elit reprehenderit. Mollit adipisicing culpa enim non et cupidatat elit.\r\n", 227 | "registered": "2022-05-08T12:58:12 -08:00", 228 | "latitude": -65.769191, 229 | "longitude": -131.469445, 230 | "tags": [ 231 | "minim", 232 | "aliquip", 233 | "qui", 234 | "quis", 235 | "proident", 236 | "culpa", 237 | "minim" 238 | ], 239 | "friends": [ 240 | { 241 | "id": 0, 242 | "name": "Barbra Yates" 243 | }, 244 | { 245 | "id": 1, 246 | "name": "Kristina Nash" 247 | }, 248 | { 249 | "id": 2, 250 | "name": "Hannah Lawrence" 251 | } 252 | ], 253 | "greeting": "Hello, Ava Coleman! You have 8 unread messages.", 254 | "favoriteFruit": "apple" 255 | }, 256 | { 257 | "_id": "63c9575f6930d162c5252a99", 258 | "index": 6, 259 | "guid": "52eaeba1-9b1a-4a8d-894f-fb18a2713224", 260 | "isActive": false, 261 | "balance": "$3,793.45", 262 | "picture": "http://placehold.it/32x32", 263 | "age": 22, 264 | "eyeColor": "green", 265 | "name": "Barr Mcfarland", 266 | "gender": "male", 267 | "company": "MATRIXITY", 268 | "email": "barrmcfarland@matrixity.com", 269 | "phone": "+1 (860) 552-3334", 270 | "address": "450 Withers Street, Holcombe, Pennsylvania, 362", 271 | "about": "Nulla ipsum sit laboris est laborum esse aliqua veniam est commodo reprehenderit. Velit minim pariatur dolor elit est ex. Cillum amet ad Lorem ut occaecat non in reprehenderit deserunt dolore nisi. Magna culpa ullamco eiusmod enim consequat ad incididunt ea commodo proident. Sit excepteur proident aliqua consectetur. Ipsum tempor laborum id commodo commodo.\r\n", 272 | "registered": "2021-08-11T07:59:57 -08:00", 273 | "latitude": 20.675162, 274 | "longitude": -27.52169, 275 | "tags": [ 276 | "et", 277 | "reprehenderit", 278 | "excepteur", 279 | "labore", 280 | "amet", 281 | "aute", 282 | "reprehenderit" 283 | ], 284 | "friends": [ 285 | { 286 | "id": 0, 287 | "name": "Parker Stevenson" 288 | }, 289 | { 290 | "id": 1, 291 | "name": "Amelia Velazquez" 292 | }, 293 | { 294 | "id": 2, 295 | "name": "Terra Lamb" 296 | } 297 | ], 298 | "greeting": "Hello, Barr Mcfarland! You have 8 unread messages.", 299 | "favoriteFruit": "apple" 300 | }, 301 | { 302 | "_id": "63c9575ff828fc991a1d4755", 303 | "index": 7, 304 | "guid": "82638221-0764-4f8d-94ef-4791fd04efef", 305 | "isActive": true, 306 | "balance": "$2,784.86", 307 | "picture": "http://placehold.it/32x32", 308 | "age": 37, 309 | "eyeColor": "blue", 310 | "name": "Salinas Guthrie", 311 | "gender": "male", 312 | "company": "HYDROCOM", 313 | "email": "salinasguthrie@hydrocom.com", 314 | "phone": "+1 (883) 549-2405", 315 | "address": "799 Lott Street, Bergoo, Montana, 1069", 316 | "about": "Ut reprehenderit ipsum veniam consequat. Aliquip excepteur dolore occaecat nostrud adipisicing incididunt adipisicing aliqua magna excepteur proident ut dolor aliquip. Eu laboris eiusmod nisi ea culpa in velit tempor et magna eu nulla. Quis exercitation est sunt ea magna. Ipsum commodo elit ipsum qui ullamco aliquip et incididunt aute labore sit pariatur deserunt.\r\n", 317 | "registered": "2017-09-13T11:29:28 -08:00", 318 | "latitude": -85.916878, 319 | "longitude": -115.646715, 320 | "tags": ["eu", "ex", "anim", "voluptate", "est", "nisi", "nisi"], 321 | "friends": [ 322 | { 323 | "id": 0, 324 | "name": "Samantha Richards" 325 | }, 326 | { 327 | "id": 1, 328 | "name": "Solis Greer" 329 | }, 330 | { 331 | "id": 2, 332 | "name": "Hines Craft" 333 | } 334 | ], 335 | "greeting": "Hello, Salinas Guthrie! You have 2 unread messages.", 336 | "favoriteFruit": "apple" 337 | }, 338 | { 339 | "_id": "63c9575f0bff7d55315eef4e", 340 | "index": 8, 341 | "guid": "dc853d6a-381e-46af-bfc5-d9e85cf55f9b", 342 | "isActive": true, 343 | "balance": "$2,258.94", 344 | "picture": "http://placehold.it/32x32", 345 | "age": 29, 346 | "eyeColor": "green", 347 | "name": "Nola Hood", 348 | "gender": "female", 349 | "company": "BLANET", 350 | "email": "nolahood@blanet.com", 351 | "phone": "+1 (984) 473-3936", 352 | "address": "895 Boerum Street, Ezel, Mississippi, 7898", 353 | "about": "Sit aliqua laborum commodo non amet cillum eiusmod in. Consectetur et nostrud cillum incididunt tempor deserunt exercitation id. Non officia ex labore reprehenderit duis proident occaecat aliqua. Irure pariatur labore qui minim do laboris. Tempor nisi sint laborum ex dolor amet tempor anim magna ullamco. Et esse eu consequat sit ex occaecat occaecat.\r\n", 354 | "registered": "2021-05-21T03:57:28 -08:00", 355 | "latitude": 5.554395, 356 | "longitude": 19.776156, 357 | "tags": [ 358 | "nulla", 359 | "consequat", 360 | "ad", 361 | "aute", 362 | "officia", 363 | "tempor", 364 | "est" 365 | ], 366 | "friends": [ 367 | { 368 | "id": 0, 369 | "name": "Stefanie Mercado" 370 | }, 371 | { 372 | "id": 1, 373 | "name": "Christi Lindsey" 374 | }, 375 | { 376 | "id": 2, 377 | "name": "Corine Dickerson" 378 | } 379 | ], 380 | "greeting": "Hello, Nola Hood! You have 7 unread messages.", 381 | "favoriteFruit": "banana" 382 | } 383 | ] 384 | -------------------------------------------------------------------------------- /benchmarks/small.json: -------------------------------------------------------------------------------- 1 | { 2 | "_id": "63c956f065bf6fafd365f977", 3 | "index": 0, 4 | "guid": "7876d7af-1b0c-4cff-9a23-9239f9652f16", 5 | "isActive": true, 6 | "balance": "$3,738.42", 7 | "picture": "http://placehold.it/32x32", 8 | "age": 36, 9 | "eyeColor": "green", 10 | "name": "Wilkerson Wheeler", 11 | "gender": "male", 12 | "company": "TRASOLA", 13 | "email": "wilkersonwheeler@trasola.com", 14 | "phone": "+1 (982) 534-2287", 15 | "address": "612 Beekman Place, Heil, Alabama, 1778", 16 | "latitude": -12.695606, 17 | "longitude": -177.70091, 18 | "tags": [ 19 | "sint", 20 | "ut", 21 | "consectetur", 22 | "esse", 23 | "deserunt", 24 | "cupidatat", 25 | "ipsum" 26 | ], 27 | "friends": [ 28 | { 29 | "id": 0, 30 | "name": "Heather Nolan" 31 | }, 32 | { 33 | "id": 1, 34 | "name": "Rivera Foley" 35 | }, 36 | { 37 | "id": 2, 38 | "name": "Billie Gregory" 39 | } 40 | ], 41 | "greeting": "Hello, Wilkerson Wheeler! You have 6 unread messages.", 42 | "favoriteFruit": "banana" 43 | } 44 | -------------------------------------------------------------------------------- /benchmarks/trace.py: -------------------------------------------------------------------------------- 1 | # type: ignore 2 | import asyncio 3 | import time 4 | from asyncio import create_task, sleep 5 | from dataclasses import dataclass 6 | from typing import Callable, Iterable, List, cast 7 | 8 | import aiocache 9 | import redis 10 | from cashews import cache as wcache 11 | from redis.asyncio import Redis 12 | from redis.asyncio.connection import BlockingConnectionPool 13 | 14 | from benchmarks.zipf import Zipf 15 | from cacheme.core import get, get_all, _awaits_len 16 | from cacheme.data import list_storages, register_storage 17 | from cacheme.models import Cache, Node 18 | from cacheme.serializer import MsgPackSerializer 19 | from cacheme.storages import Storage 20 | 21 | wb = wcache.setup("redis://", max_connections=100, wait_for_connection_timeout=300) 22 | 23 | 24 | REQUESTS = 200000 25 | 26 | aiocache.caches.set_config( 27 | { 28 | "default": { 29 | "cache": "aiocache.SimpleMemoryCache", 30 | "serializer": {"class": "aiocache.serializers.StringSerializer"}, 31 | }, 32 | "redis_alt": { 33 | "cache": "aiocache.RedisCache", 34 | "endpoint": "127.0.0.1", 35 | "port": 6379, 36 | "timeout": 200, 37 | "plugins": [], 38 | }, 39 | } 40 | ) 41 | 42 | 43 | async def simple_get(Node: Callable, i: int): 44 | result = await get(Node(uid=i)) 45 | assert result == i 46 | 47 | 48 | async def simple_get_all(Node: Callable, l: List[int]): 49 | result = await get_all([Node(uid=i) for i in l]) 50 | assert list(result) == l 51 | 52 | 53 | async def simple_get_ii(Node: Callable, i: int): 54 | result = await Node(uid=i).load_ii() 55 | assert result == i 56 | 57 | 58 | async def simple_get_iii(Node: Callable, i: int): 59 | result = await Node(uid=i).load_iii() 60 | assert result == i 61 | 62 | 63 | async def simple_get_iv(Node: Callable, i: int): 64 | result = await Node(uid=i).load_iv() 65 | assert result == i 66 | 67 | 68 | async def simple_get_v(Node: Callable, i: int): 69 | result = await Node(uid=i).load_v() 70 | assert result == i 71 | 72 | 73 | def zipf_key_gen() -> Iterable: 74 | z = Zipf(1.001, 10, REQUESTS) 75 | for _ in range(REQUESTS): 76 | yield f"{z.get()}" 77 | 78 | 79 | def ucb_key_gen() -> Iterable: 80 | with open(f"benchmarks/trace/ucb", "rb") as f: 81 | for line in f: 82 | vb = line.split(b" ")[-2] 83 | try: 84 | v = vb.decode() 85 | except: 86 | v = "failed" 87 | yield v 88 | 89 | 90 | def ds1_key_gen() -> Iterable: 91 | with open(f"benchmarks/trace/ds1", "r") as f: 92 | for line in f: 93 | yield line.split(",")[0] 94 | 95 | 96 | def s3_key_gen() -> Iterable: 97 | with open(f"benchmarks/trace/s3", "r") as f: 98 | for line in f: 99 | yield line.split(",")[0] 100 | 101 | 102 | async def worker(queue): 103 | while True: 104 | try: 105 | task = queue.get_nowait() 106 | except: 107 | return 108 | await task 109 | queue.task_done() 110 | 111 | 112 | async def run_concurrency(queue, workers): 113 | await asyncio.gather(*[worker(queue) for _ in range(workers)]) 114 | 115 | 116 | @dataclass 117 | class FooNode(Node): 118 | uid: str 119 | load_count = 0 120 | 121 | def key(self) -> str: 122 | return f"uid:{self.uid}" 123 | 124 | async def load(self) -> int: 125 | self.__class__.load_count += 1 126 | await sleep(0.1) 127 | return self.uid 128 | 129 | @aiocache.cached( 130 | alias="redis_alt", key_builder=lambda *args, **kw: f"uid2:{args[1].uid}" 131 | ) 132 | async def load_ii(self) -> int: 133 | self.__class__.load_count += 1 134 | await sleep(0.1) 135 | return self.uid 136 | 137 | @aiocache.cached_stampede( 138 | alias="redis_alt", 139 | key_builder=lambda *args, **kw: f"uid3:{args[1].uid}", 140 | lease=30, 141 | ) 142 | async def load_iii(self) -> int: 143 | self.__class__.load_count += 1 144 | await sleep(0.1) 145 | return self.uid 146 | 147 | @wcache(key="uid4:{self.uid}", ttl=None) 148 | async def load_iv(self) -> int: 149 | self.__class__.load_count += 1 150 | await sleep(0.1) 151 | return self.uid 152 | 153 | @wcache(key="uid5:{self.uid}", ttl=500, lock=True) 154 | async def load_v(self) -> int: 155 | self.__class__.load_count += 1 156 | await sleep(0.1) 157 | return self.uid 158 | 159 | class Meta(Node.Meta): 160 | version = "v1" 161 | caches = [Cache(storage="redis", ttl=None)] 162 | serializer = MsgPackSerializer() 163 | 164 | 165 | async def bench_cacheme_zipf(gen: Callable[..., Iterable], workers: int): 166 | # reset node cache 167 | FooNode.Meta.caches = [Cache(storage="redis", ttl=None)] 168 | redis_counter = 0 169 | await register_storage("redis", Storage(url="redis://localhost:6379")) 170 | client = cast(Redis, list_storages()["redis"]._storage.client) 171 | FooNode.load_count = 0 172 | 173 | def callback(response): 174 | nonlocal redis_counter 175 | redis_counter += 1 176 | return response 177 | 178 | client.set_response_callback("GET", callback) 179 | 180 | queue = asyncio.Queue() 181 | for uid in gen(): 182 | queue.put_nowait(simple_get(FooNode, uid)) 183 | now = time.time() 184 | await run_concurrency(queue, workers) 185 | print( 186 | f"cacheme redis count {redis_counter}, load count {FooNode.load_count}, spent {time.time() - now}s" 187 | ) 188 | await client.close() 189 | 190 | 191 | async def bench_cacheme_zipf_with_local(gen: Callable[..., Iterable], workers: int): 192 | # reset node cache 193 | FooNode.Meta.caches = [ 194 | Cache(storage="local", ttl=None), 195 | Cache(storage="redis", ttl=None), 196 | ] 197 | redis_counter = 0 198 | await register_storage("redis", Storage(url="redis://localhost:6379")) 199 | await register_storage("local", Storage(url="local://tlfu", size=3000)) 200 | client = cast(Redis, list_storages()["redis"]._storage.client) 201 | FooNode.load_count = 0 202 | 203 | def callback(response): 204 | nonlocal redis_counter 205 | redis_counter += 1 206 | return response 207 | 208 | client.set_response_callback("GET", callback) 209 | 210 | queue = asyncio.Queue() 211 | for uid in gen(): 212 | queue.put_nowait(simple_get(FooNode, uid)) 213 | now = time.time() 214 | await run_concurrency(queue, workers) 215 | print( 216 | f"cacheme with local redis count {redis_counter}, load count {FooNode.load_count}, spent {time.time() - now}s" 217 | ) 218 | await client.close() 219 | 220 | 221 | async def bench_cacheme_batch_zipf(workers: int): 222 | if workers > 10000: 223 | return 224 | # reset node cache 225 | FooNode.Meta.caches = [Cache(storage="redis", ttl=None)] 226 | redis_counter = 0 227 | await register_storage("redis", Storage(url="redis://localhost:6379")) 228 | client = cast(Redis, list_storages()["redis"]._storage.client) 229 | FooNode.load_count = 0 230 | 231 | def callback(response): 232 | nonlocal redis_counter 233 | redis_counter += 1 234 | return response 235 | 236 | client.set_response_callback("MGET", callback) 237 | z = Zipf(1.0001, 10, REQUESTS) 238 | 239 | def get20(z): 240 | l = set() 241 | while True: 242 | l.add(z.get()) 243 | if len(l) == 20: 244 | break 245 | return list(l) 246 | 247 | queue = asyncio.Queue() 248 | for _ in range(REQUESTS // 20): 249 | queue.put_nowait(simple_get_all(FooNode, get20(z))) 250 | now = time.time() 251 | await run_concurrency(queue, workers) 252 | print( 253 | f"cacheme redis count {redis_counter}, load count {FooNode.load_count}, spent {time.time() - now}s" 254 | ) 255 | await client.close() 256 | 257 | 258 | async def bench_aiocache_zipf(gen: Callable[..., Iterable], workers: int): 259 | redis_counter = 0 260 | client = cast(Redis, FooNode.load_ii.cache.client) 261 | FooNode.load_count = 0 262 | 263 | def callback(response): 264 | nonlocal redis_counter 265 | redis_counter += 1 266 | return response 267 | 268 | client.set_response_callback("GET", callback) 269 | client.connection_pool = BlockingConnectionPool.from_url( 270 | "redis://localhost:6379", max_connections=100, timeout=None 271 | ) 272 | 273 | queue = asyncio.Queue() 274 | for uid in gen(): 275 | queue.put_nowait(simple_get_ii(FooNode, uid)) 276 | now = time.time() 277 | await run_concurrency(queue, workers) 278 | print( 279 | f"aiocache redis count {redis_counter}, load count {FooNode.load_count}, spent {time.time() - now}s" 280 | ) 281 | await client.close() 282 | 283 | 284 | async def bench_aiocache_stampede_zipf(gen: Callable[..., Iterable], workers: int): 285 | redis_counter = 0 286 | client = cast(Redis, FooNode.load_iii.cache.client) 287 | FooNode.load_count = 0 288 | 289 | def callback(response): 290 | nonlocal redis_counter 291 | redis_counter += 1 292 | return response 293 | 294 | client.set_response_callback("GET", callback) 295 | client.connection_pool = BlockingConnectionPool.from_url( 296 | "redis://localhost:6379", max_connections=100, timeout=None 297 | ) 298 | 299 | queue = asyncio.Queue() 300 | for uid in gen(): 301 | queue.put_nowait(simple_get_iii(FooNode, uid)) 302 | now = time.time() 303 | await run_concurrency(queue, workers) 304 | print( 305 | f"aiocache stampede redis count {redis_counter}, load count {FooNode.load_count}, spent {time.time() - now}s" 306 | ) 307 | await client.close() 308 | 309 | 310 | async def bench_cashews_zipf(gen: Callable[..., Iterable], workers: int): 311 | redis_counter = 0 312 | await wcache.get("foo") 313 | client = cast(Redis, wb._client) 314 | FooNode.load_count = 0 315 | 316 | def callback(response): 317 | nonlocal redis_counter 318 | redis_counter += 1 319 | return response 320 | 321 | client.set_response_callback("GET", callback) 322 | 323 | queue = asyncio.Queue() 324 | for uid in gen(): 325 | queue.put_nowait(simple_get_iv(FooNode, uid)) 326 | now = time.time() 327 | await run_concurrency(queue, workers) 328 | print( 329 | f"cashews redis count {redis_counter}, load count {FooNode.load_count}, spent {time.time() - now}s" 330 | ) 331 | 332 | 333 | async def bench_cashews_lock_zipf(gen: Callable[..., Iterable], workers: int): 334 | redis_counter = 0 335 | await wcache.get("foo") 336 | client = cast(Redis, wb._client) 337 | FooNode.load_count = 0 338 | 339 | def callback(response): 340 | nonlocal redis_counter 341 | redis_counter += 1 342 | return response 343 | 344 | client.set_response_callback("GET", callback) 345 | 346 | queue = asyncio.Queue() 347 | for uid in gen(): 348 | queue.put_nowait(simple_get_v(FooNode, uid)) 349 | now = time.time() 350 | await run_concurrency(queue, workers) 351 | print( 352 | f"cashews locked redis count {redis_counter}, load count {FooNode.load_count}, spent {time.time() - now}s" 353 | ) 354 | 355 | 356 | async def worker_wait(queue): 357 | while True: 358 | task = await queue.get() 359 | await task 360 | queue.task_done() 361 | 362 | 363 | async def run_concurrency_wait(queue, workers): 364 | await asyncio.gather(*[worker_wait(queue) for _ in range(workers)]) 365 | 366 | 367 | async def infinit_run(cap: int): 368 | FooNode.Meta.caches = [Cache(storage="local", ttl=None)] 369 | await register_storage("local", Storage(url="local://tlfu", size=cap)) 370 | z = Zipf(1.001, 10, 100000000) 371 | counter = 0 372 | queue = asyncio.Queue(maxsize=2000) 373 | task = create_task(run_concurrency_wait(queue, 2000)) 374 | while True: 375 | uid = z.get() 376 | await queue.put(simple_get(FooNode, uid)) 377 | counter += 1 378 | if counter % 100000 == 0: 379 | await sleep(0.5) 380 | print(f"finish {counter // 100000}, tmp len: {_awaits_len()}") 381 | 382 | 383 | async def run(): 384 | 385 | for w in [1000, 10000, 100000]: 386 | r = redis.Redis(host="localhost", port=6379) 387 | r.flushall() 388 | 389 | print(f"==== zipf benchmark: concurrency {w} ====") 390 | await bench_cacheme_zipf(zipf_key_gen, w) 391 | r.flushall() # flush because local use same key 392 | await bench_cacheme_zipf_with_local(zipf_key_gen, w) 393 | await bench_aiocache_zipf(zipf_key_gen, w) 394 | await bench_aiocache_stampede_zipf(zipf_key_gen, w) 395 | await bench_cashews_zipf(zipf_key_gen, w) 396 | await bench_cashews_lock_zipf(zipf_key_gen, w) 397 | 398 | for w in [1000, 10000, 100000]: 399 | r = redis.Redis(host="localhost", port=6379) 400 | r.flushall() 401 | 402 | print(f"==== zipf batch benchmark: concurrency {w} ====") 403 | await bench_cacheme_batch_zipf(w) 404 | 405 | 406 | asyncio.run(run()) 407 | # asyncio.run(infinit_run(50000)) 408 | -------------------------------------------------------------------------------- /benchmarks/zipf.py: -------------------------------------------------------------------------------- 1 | import math 2 | import random 3 | 4 | 5 | class Zipf: 6 | def __init__(self, s: float, v: float, imax: int): 7 | if s <= 1 or v < 1: 8 | raise 9 | self.imax = float(imax) 10 | self.v = v 11 | self.q = s 12 | self.oneminus_q = 1.0 - self.q 13 | self.oneminus_qinv = 1.0 / self.oneminus_q 14 | self.hxm = self.h(self.imax + 0.5) 15 | self.hx0minus_hxm = ( 16 | self.h(0.5) - math.exp(math.log(self.v) * (-self.q)) - self.hxm 17 | ) 18 | self.s = 1 - self.hinv(self.h(1.5) - math.exp(-self.q * math.log(self.v + 1.0))) 19 | 20 | def h(self, x: float) -> float: 21 | return math.exp(self.oneminus_q * math.log(self.v + x)) * self.oneminus_qinv 22 | 23 | def hinv(self, x: float) -> float: 24 | return math.exp(self.oneminus_qinv * math.log(self.oneminus_q * x)) - self.v 25 | 26 | def get(self) -> int: 27 | k = 0 28 | while True: 29 | r = random.random() 30 | ur = self.hxm + r * self.hx0minus_hxm 31 | x = self.hinv(ur) 32 | k = math.floor(x + 0.5) 33 | if k - x <= self.s: 34 | break 35 | if ur >= self.h(k + 0.5) - math.exp(-math.log(k + self.v) * self.q): 36 | break 37 | return int(k) 38 | -------------------------------------------------------------------------------- /cacheme/__init__.py: -------------------------------------------------------------------------------- 1 | from theine import BloomFilter 2 | 3 | from cacheme.core import (Memoize, build_node, get, get_all, invalidate, nodes, 4 | refresh, stats) 5 | from cacheme.data import register_storage 6 | from cacheme.models import Cache, DynamicNode, Node, set_prefix 7 | from cacheme.storages import Storage 8 | -------------------------------------------------------------------------------- /cacheme/core.py: -------------------------------------------------------------------------------- 1 | from asyncio import Event, Future 2 | from collections import OrderedDict 3 | from functools import update_wrapper 4 | from time import time_ns 5 | from typing import ( 6 | Any, 7 | Awaitable, 8 | Callable, 9 | Dict, 10 | Iterable, 11 | List, 12 | Optional, 13 | Sequence, 14 | Tuple, 15 | Type, 16 | TypeVar, 17 | cast, 18 | overload, 19 | ) 20 | 21 | from typing_extensions import ParamSpec, Protocol 22 | 23 | from cacheme.interfaces import DoorKeeper, Metrics, Serializer, Node 24 | from cacheme.models import ( 25 | Cache, 26 | DynamicNode, 27 | Fetcher, 28 | _add_node, 29 | get_nodes, 30 | sentinel, 31 | ) 32 | 33 | 34 | P = ParamSpec("P") 35 | R = TypeVar("R", covariant=True) 36 | N = TypeVar("N", bound=Node) 37 | 38 | 39 | class Locker: 40 | lock: Event 41 | value: Any 42 | 43 | def __init__(self): 44 | self.lock = Event() 45 | self.value = None 46 | 47 | 48 | _awaits: Dict[str, Future] = {} 49 | 50 | 51 | def _awaits_len(): 52 | return len(_awaits) 53 | 54 | 55 | @overload 56 | async def get(node: Node[R]) -> R: 57 | ... 58 | 59 | 60 | @overload 61 | async def get(node: N, load_fn: Callable[[N], Awaitable[R]]) -> R: 62 | ... 63 | 64 | 65 | async def get(node: Node, load_fn=None): 66 | """ 67 | Get data from node. Will call load function if cahce miss. 68 | 69 | :param node: node instance to get data. 70 | :param load_fn: override load function, which will be called instead of node load function if set. 71 | """ 72 | metrics = node.Meta.metrics 73 | result = sentinel 74 | caches = node.Meta.caches 75 | local_caches: List[Cache] = [] 76 | remote_caches: List[Cache] = [] 77 | miss: List[Cache] = [] 78 | 79 | for cache in caches: 80 | if cache.is_local: 81 | local_caches.append(cache) 82 | else: 83 | remote_caches.append(cache) 84 | 85 | # try get cached data from local storages first 86 | for cache in local_caches: 87 | result = cache.storage.get_sync(node, None) 88 | if result is not sentinel: 89 | metrics._hit_count += 1 90 | # return fast if hit on first local cache 91 | if not miss: 92 | return result 93 | break 94 | miss.append(cache) 95 | 96 | # can't find cached result in any local storage, try load from remote storage 97 | # remote storages are slow and asynchronous, use tmp cached awaitables to avoid thundering herd 98 | if result is sentinel: 99 | key = node.full_key() 100 | future = _awaits.get(key, None) 101 | if future is None: 102 | metrics._miss_count += 1 103 | future = Future() 104 | _awaits[node.full_key()] = future 105 | now = time_ns() 106 | try: 107 | result = await _load_from_caches(node, remote_caches, miss, load_fn) 108 | except Exception as e: 109 | metrics._load_failure_count += 1 110 | metrics._total_load_time += time_ns() - now 111 | _awaits.pop(node.full_key(), None) 112 | raise (e) 113 | metrics._load_success_count += 1 114 | metrics._total_load_time += time_ns() - now 115 | future.set_result(result) 116 | else: 117 | metrics._hit_count += 1 118 | result = await future 119 | 120 | # fill missing caches 121 | for cache in miss: 122 | await cache.storage.set(node, result, cache.ttl, node.Meta.serializer) 123 | # remove from tmp cache after fill 124 | _awaits.pop(node.full_key(), None) 125 | 126 | return result 127 | 128 | 129 | # try load data from remote storages, load from source if not found 130 | async def _load_from_caches( 131 | node: Node, caches: List[Cache], miss: List[Cache], load_fn=None 132 | ): 133 | serializer = node.get_seriaizer() 134 | result = sentinel 135 | for cache in caches: 136 | result = await cache.storage.get(node, serializer) 137 | if result is not sentinel: 138 | break 139 | miss.append(cache) 140 | # load from source 141 | if result is sentinel: 142 | result = await node.load() if load_fn is None else await load_fn(node) 143 | 144 | return result 145 | 146 | 147 | async def get_all(nodes: Sequence[Node[R]]) -> List[R]: 148 | """ 149 | Get data from multiple nodes. Will call load function if cahce miss. 150 | 151 | :param nodes: sequence of nodes, must be same type. 152 | """ 153 | if len(nodes) == 0: 154 | return [] 155 | node_cls = nodes[0].__class__ 156 | metrics = nodes[0].get_metrics() 157 | pending: Dict[str, Node] = {} 158 | missing: Dict[Cache, Iterable[Node]] = {} 159 | caches = nodes[0].get_caches() 160 | results: OrderedDict[str, Any] = OrderedDict() 161 | # initialize reuslts dict and pending list 162 | for node in nodes: 163 | if node.__class__ != node_cls: 164 | raise Exception( 165 | f"node class mismatch: expect [{node_cls}], get [{node.__class__}]" 166 | ) 167 | pending[node.full_key()] = node 168 | results[node.full_key()] = sentinel 169 | 170 | # split local/remote cache 171 | local_caches: List[Cache] = [] 172 | remote_caches: List[Cache] = [] 173 | for cache in caches: 174 | if cache.is_local: 175 | local_caches.append(cache) 176 | else: 177 | remote_caches.append(cache) 178 | 179 | # load from local caches first 180 | for cache in local_caches: 181 | result = cache.storage.get_all_sync(tuple(pending.values()), None) 182 | for k, v in result: 183 | pending.pop(k.full_key(), None) 184 | results[k.full_key()] = v 185 | missing[cache] = tuple(pending.values()) 186 | 187 | # load from remote cache 188 | fetch: Dict[str, Node] = {} # missing nodes, need to load from source 189 | if len(pending) > 0: 190 | wait: List[ 191 | Tuple[str, Future] 192 | ] = [] # nodes already loading by others, only need to wait here 193 | for node in pending.values(): 194 | future = _awaits.get(node.full_key(), None) 195 | if future is None: 196 | fetch[node.full_key()] = node 197 | else: 198 | wait.append((node.full_key(), future)) 199 | 200 | # update metrics 201 | metrics._miss_count += len(fetch) 202 | metrics._hit_count += len(nodes) - len(fetch) 203 | 204 | if len(fetch) > 0: 205 | fetcher = Fetcher() 206 | aws: List[Tuple[str, Future]] = [] 207 | for key, node in fetch.items(): 208 | future = Future() 209 | _awaits[key] = future 210 | aws.append((key, future)) 211 | fetcher.data = await _get_multi( 212 | nodes[0], remote_caches, fetch, missing, metrics 213 | ) 214 | # load done, set all events and results 215 | for aw in aws: 216 | aw[1].set_result(fetcher.data[aw[0]]) 217 | for ks, vs in fetcher.data.items(): 218 | results[ks] = vs 219 | for w in wait: 220 | results[w[0]] = await w[1] 221 | 222 | # fill missing caches 223 | for cache, missing_nodes in missing.items(): 224 | data = [(node, results[node.full_key()]) for node in missing_nodes] 225 | if len(data) > 0: 226 | await cache.storage.set_all(data, cache.ttl, node_cls.Meta.serializer) 227 | 228 | # remove tmp_cache 229 | for key in fetch: 230 | _awaits.pop(key) 231 | 232 | # finally 233 | return list(results.values()) 234 | 235 | 236 | async def _get_multi( 237 | node: Node, 238 | caches: List[Cache], 239 | nodes: Dict[str, Node], 240 | missing: Dict[Cache, Iterable], 241 | metrics: Metrics, 242 | ) -> Dict[str, Any]: 243 | serializer = node.get_seriaizer() 244 | results: Dict[str, Any] = {} 245 | for cache in caches: 246 | cached = await cache.storage.get_all(list(nodes.values()), serializer) 247 | for k, v in cached: 248 | nodes.pop(k.full_key(), None) 249 | results[k.full_key()] = v 250 | missing[cache] = tuple(nodes.values()) 251 | 252 | # load from source 253 | if len(nodes) > 0: 254 | now = time_ns() 255 | try: 256 | loaded = await node.load_all(tuple(nodes.values())) 257 | for k, v in loaded: 258 | results[k.full_key()] = v 259 | except Exception as e: 260 | metrics._load_failure_count += len(nodes) 261 | metrics._total_load_time += time_ns() - now 262 | raise (e) 263 | metrics._load_success_count += len(nodes) 264 | metrics._total_load_time += time_ns() - now 265 | return results 266 | 267 | 268 | class Cached(Protocol[P, R]): 269 | def to_node(self, fn: Callable[P, Node]): 270 | ... 271 | 272 | def __call__(self, *args, **kwargs) -> R: 273 | ... 274 | 275 | 276 | def Wrapper( 277 | fn: Callable[P, R], 278 | ) -> Cached[P, R]: 279 | _func = fn 280 | _node_func = None 281 | 282 | def to_node(fn: Callable[P, Node]): 283 | nonlocal _node_func 284 | _node_func = fn 285 | 286 | async def fetch(*args: P.args, **kwargs: P.kwargs) -> R: 287 | node = _node_func(*args, **kwargs) # type: ignore 288 | node = cast(Node, node) 289 | 290 | # inline load function 291 | async def load() -> Any: 292 | return await _func(*args, **kwargs) # type: ignore 293 | 294 | node.load = load # type: ignore 295 | return await get(node) 296 | 297 | fetch.to_node = to_node # type: ignore 298 | return fetch # type: ignore 299 | 300 | 301 | class Memoize: 302 | def __init__(self, node: Type[Node]): 303 | self.node = node 304 | 305 | def __call__(self, fn: Callable[P, R]) -> Cached[P, R]: 306 | wrapper = Wrapper(fn) 307 | return update_wrapper(wrapper, fn) 308 | 309 | 310 | def nodes() -> List[Type[Node]]: 311 | return get_nodes() 312 | 313 | 314 | def stats(node: Type[Node]) -> Metrics: 315 | return node.get_metrics() 316 | 317 | 318 | async def invalidate(node: Node): 319 | caches = node.get_caches() 320 | for cache in caches: 321 | await cache.storage.remove(node) 322 | 323 | 324 | async def refresh(node: Node[R]) -> R: 325 | await invalidate(node) 326 | return await get(node) 327 | 328 | 329 | _dynamic_nodes: Dict[str, Type[DynamicNode]] = {} 330 | 331 | 332 | def build_node( 333 | name: str, 334 | version: str, 335 | caches: List[Cache], 336 | serializer: Optional[Serializer] = None, 337 | doorkeeper: Optional[DoorKeeper] = None, 338 | ) -> Type[DynamicNode]: 339 | if name in _dynamic_nodes: 340 | return _dynamic_nodes[name] 341 | new: Type[DynamicNode] = type(name, (DynamicNode,), {}) 342 | new.Meta.version = version 343 | new.Meta.caches = caches 344 | new.Meta.serializer = serializer 345 | new.Meta.doorkeeper = doorkeeper 346 | new.Meta.metrics = Metrics() 347 | _dynamic_nodes[name] = new 348 | _add_node(new) 349 | return new 350 | -------------------------------------------------------------------------------- /cacheme/data.py: -------------------------------------------------------------------------------- 1 | from typing import Dict 2 | 3 | from cacheme.interfaces import Storage 4 | 5 | _storages: Dict[str, Storage] = {} 6 | 7 | 8 | async def register_storage(name: str, storage: Storage): 9 | _storages[name] = storage 10 | await storage.connect() 11 | 12 | 13 | def get_storage_by_name(name: str) -> Storage: 14 | global _storages 15 | return _storages[name] 16 | 17 | 18 | def list_storages() -> Dict[str, Storage]: 19 | return _storages 20 | -------------------------------------------------------------------------------- /cacheme/interfaces.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime, timedelta 2 | from typing import TYPE_CHECKING, NamedTuple, Optional, Sequence, Tuple, TypeVar, List 3 | 4 | from typing_extensions import Any, Protocol, ClassVar 5 | 6 | if TYPE_CHECKING: 7 | from cacheme.models import Cache 8 | 9 | R = TypeVar("R", covariant=True) 10 | 11 | # - When a cache lookup encounters an existing cache entry hit_count is incremented 12 | # - After successfully loading an entry miss_count and load_success_count are 13 | # incremented, and the total loading time, in nanoseconds, is added to total_load_time 14 | # - When an exception is thrown while loading an entry, 15 | # miss_count and load_failure_count are incremented, and the total loading 16 | # time, in nanoseconds, is added to total_load_time 17 | class Metrics: 18 | _hit_count: int = 0 19 | _miss_count: int = 0 20 | _load_success_count: int = 0 21 | _load_failure_count: int = 0 22 | _total_load_time: int = 0 23 | 24 | def request_count(self) -> int: 25 | return self._hit_count + self._miss_count 26 | 27 | def hit_count(self) -> int: 28 | return self._hit_count 29 | 30 | def hit_rate(self) -> float: 31 | return self._hit_count / self.request_count() 32 | 33 | def miss_count(self) -> int: 34 | return self._miss_count 35 | 36 | def miss_rate(self) -> float: 37 | return self._miss_count / self.request_count() 38 | 39 | def load_success_count(self) -> int: 40 | return self._load_success_count 41 | 42 | def load_failure_count(self) -> int: 43 | return self._load_failure_count 44 | 45 | def load_failure_rate(self) -> float: 46 | return self._load_failure_count / self.load_count() 47 | 48 | def load_count(self) -> int: 49 | return self._load_failure_count + self._load_success_count 50 | 51 | def total_load_time(self) -> int: 52 | return self._total_load_time 53 | 54 | def average_load_time(self) -> float: 55 | return self._total_load_time / self.load_count() 56 | 57 | 58 | class CachedData(NamedTuple): 59 | data: Any 60 | expire: Optional[datetime] = None 61 | 62 | 63 | class Storage(Protocol): 64 | async def connect(self): 65 | ... 66 | 67 | async def get(self, node: "Node", serializer: Optional["Serializer"]) -> Any: 68 | ... 69 | 70 | # local storage only 71 | def get_sync( 72 | self, node: "Node", serializer: Optional["Serializer"] 73 | ) -> Optional[CachedData]: 74 | ... 75 | 76 | async def get_all( 77 | self, nodes: Sequence["Node"], serializer: Optional["Serializer"] 78 | ) -> Sequence[Tuple["Node", CachedData]]: 79 | ... 80 | 81 | # local storage only 82 | def get_all_sync( 83 | self, nodes: Sequence["Node"], serializer: Optional["Serializer"] 84 | ) -> Sequence[Tuple["Node", CachedData]]: 85 | ... 86 | 87 | async def set( 88 | self, 89 | node: "Node", 90 | value: Any, 91 | ttl: Optional[timedelta], 92 | serializer: Optional["Serializer"], 93 | ): 94 | ... 95 | 96 | async def remove(self, node: "Node"): 97 | ... 98 | 99 | async def set_all( 100 | self, 101 | data: Sequence[Tuple["Node", Any]], 102 | ttl: Optional[timedelta], 103 | serializer: Optional["Serializer"], 104 | ): 105 | ... 106 | 107 | def scheme(self) -> str: 108 | ... 109 | 110 | def is_local(self) -> bool: 111 | ... 112 | 113 | 114 | class Serializer(Protocol): 115 | def dumps(self, obj: Any) -> bytes: 116 | ... 117 | 118 | def loads(self, blob: bytes) -> Any: 119 | ... 120 | 121 | 122 | class DoorKeeper(Protocol): 123 | def put(self, key: str): 124 | ... 125 | 126 | def contains(self, key: str): 127 | ... 128 | 129 | 130 | class Policy(Protocol): 131 | def __init__(self, size: int): 132 | ... 133 | 134 | def set(self, key: str) -> Optional[str]: 135 | ... 136 | 137 | def remove(self, key: str): 138 | ... 139 | 140 | def access(self, key: str): 141 | ... 142 | 143 | 144 | class Node(Protocol[R]): 145 | _full_key: Optional[str] 146 | 147 | def key(self) -> str: 148 | ... 149 | 150 | def full_key(self) -> str: 151 | ... 152 | 153 | async def load(self) -> R: 154 | ... 155 | 156 | @classmethod 157 | async def load_all(cls, nodes: Sequence["Node"]) -> Sequence[Tuple["Node", Any]]: 158 | ... 159 | 160 | def get_version(self) -> str: 161 | ... 162 | 163 | def get_caches(self) -> List["Cache"]: 164 | ... 165 | 166 | def get_seriaizer(self) -> Optional[Serializer]: 167 | ... 168 | 169 | def get_doorkeeper(self) -> Optional[DoorKeeper]: 170 | ... 171 | 172 | @classmethod 173 | def get_metrics(cls) -> Metrics: 174 | ... 175 | 176 | class Meta(Protocol): 177 | version: ClassVar[str] = "" 178 | caches: List["Cache"] = [] 179 | serializer: ClassVar[Optional[Serializer]] = None 180 | doorkeeper: ClassVar[Optional[DoorKeeper]] = None 181 | metrics: ClassVar[Metrics] 182 | -------------------------------------------------------------------------------- /cacheme/models.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import asyncio 4 | from datetime import timedelta 5 | from time import time_ns 6 | from typing import ( 7 | ClassVar, 8 | Dict, 9 | Generic, 10 | List, 11 | Optional, 12 | Sequence, 13 | Tuple, 14 | Type, 15 | TypeVar, 16 | cast, 17 | ) 18 | 19 | from typing_extensions import Any 20 | 21 | from cacheme.data import get_storage_by_name 22 | from cacheme.interfaces import DoorKeeper, Metrics, Serializer, Storage 23 | from cacheme.interfaces import Node as NodeP 24 | 25 | _nodes: List[Type[Node]] = [] 26 | _prefix: str = "cacheme" 27 | 28 | sentinel = object() 29 | C = TypeVar("C") 30 | 31 | 32 | def get_nodes(): 33 | return _nodes 34 | 35 | 36 | def _add_node(node: Type[Node]): 37 | _nodes.append(node) 38 | 39 | 40 | def set_prefix(prefix: str): 41 | global _prefix 42 | _prefix = prefix 43 | 44 | 45 | class Cache: 46 | __slots__ = ["_storage", "_storage_name", "ttl", "_is_local"] 47 | 48 | def __init__(self, storage: str, ttl: Optional[timedelta]): 49 | self._storage: Optional[Storage] = None 50 | self._storage_name: str = storage 51 | self.ttl: Optional[timedelta] = ttl 52 | self._is_local: Optional[bool] = None 53 | 54 | @property 55 | def is_local(self): 56 | if self._is_local is None: 57 | self._is_local = self.storage.is_local() 58 | return self._is_local 59 | 60 | @property 61 | def storage(self) -> Storage: 62 | if self._storage is None: 63 | self._storage = get_storage_by_name(self._storage_name) 64 | return cast(Storage, self._storage) 65 | 66 | 67 | class MetaNode(type): 68 | def __new__(cls, name, bases, dct): 69 | new = super().__new__(cls, name, bases, dct) 70 | if len(new.Meta.caches) > 0: 71 | _nodes.append(cast(Type[Node], cls)) 72 | new.Meta.metrics = Metrics() 73 | return new 74 | 75 | class Meta: 76 | metrics: ClassVar[Metrics] 77 | storage: ClassVar[str] = "" 78 | caches: List = [] 79 | 80 | 81 | class Node(Generic[C], metaclass=MetaNode): 82 | _full_key = None 83 | 84 | def key(self) -> str: 85 | raise NotImplementedError() 86 | 87 | def full_key(self) -> str: 88 | if self._full_key is None: 89 | self._full_key = f"{_prefix}:{self.key()}:{self.Meta.version}" 90 | return self._full_key 91 | 92 | async def load(self) -> C: 93 | raise NotImplementedError() 94 | 95 | @classmethod 96 | async def load_all(cls, nodes: Sequence[NodeP]) -> Sequence[Tuple[NodeP, Any]]: 97 | data = [] 98 | for node in nodes: 99 | v = await node.load() 100 | data.append((node, v)) 101 | return data 102 | 103 | def get_version(self) -> str: 104 | return self.Meta.version 105 | 106 | def get_caches(self) -> List[Cache]: 107 | return self.Meta.caches 108 | 109 | def get_seriaizer(self) -> Optional[Serializer]: 110 | return self.Meta.serializer 111 | 112 | def get_doorkeeper(self) -> Optional[DoorKeeper]: 113 | return self.Meta.doorkeeper 114 | 115 | @classmethod 116 | def get_metrics(cls) -> Metrics: 117 | return cls.Meta.metrics 118 | 119 | class Meta: 120 | version: ClassVar[str] = "" 121 | caches: List[Cache] = [] 122 | serializer: ClassVar[Optional[Serializer]] = None 123 | doorkeeper: ClassVar[Optional[DoorKeeper]] = None 124 | metrics: ClassVar[Metrics] 125 | 126 | 127 | class DynamicNode(Node): 128 | key_str: str 129 | 130 | def __init__(self, key: str): 131 | super().__init__() 132 | self.key_str = key 133 | 134 | def key(self) -> str: 135 | return self.key_str 136 | 137 | 138 | class Fetcher: 139 | def __init__(self): 140 | self.data: Dict[str, Any] = {} 141 | -------------------------------------------------------------------------------- /cacheme/py.typed: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Yiling-J/cacheme/f402c45267ad107a647395a74e67e53760c13755/cacheme/py.typed -------------------------------------------------------------------------------- /cacheme/serializer.py: -------------------------------------------------------------------------------- 1 | import base64 2 | import importlib 3 | import json 4 | import pickle 5 | import zlib 6 | from types import ModuleType 7 | from typing import Any, Dict, cast 8 | 9 | import msgpack 10 | import pydantic 11 | from pydantic.json import pydantic_encoder 12 | from typing_extensions import Protocol 13 | 14 | 15 | class Serializer(Protocol): 16 | def dumps(self, obj: Any) -> bytes: 17 | raise NotImplementedError() 18 | 19 | def loads(self, blob: bytes) -> Any: 20 | raise NotImplementedError() 21 | 22 | 23 | def to_qualified_name(obj: Any) -> str: 24 | return obj.__module__ + "." + obj.__qualname__ 25 | 26 | 27 | __import_cache: Dict[str, Any] = {} 28 | 29 | 30 | def from_qualified_name(name: str) -> Any: 31 | module = __import_cache.get(name, None) 32 | if module is not None: 33 | return module 34 | try: 35 | module = importlib.import_module(name) 36 | __import_cache[name] = module 37 | return module 38 | except ImportError: 39 | # If no subitem was included raise the import error 40 | if "." not in name: 41 | raise 42 | 43 | # Otherwise, we'll try to load it as an attribute of a module 44 | mod_name, attr_name = name.rsplit(".", 1) 45 | module = importlib.import_module(mod_name) 46 | imported = getattr(module, attr_name) 47 | __import_cache[name] = imported 48 | return imported 49 | 50 | 51 | def object_encoder(obj: Any) -> Any: 52 | return { 53 | "__class__": to_qualified_name(obj.__class__), 54 | "data": pydantic_encoder(obj), 55 | } 56 | 57 | 58 | def object_decoder(result: dict): 59 | if "__class__" in result: 60 | return pydantic.parse_obj_as( 61 | from_qualified_name(result["__class__"]), result["data"] 62 | ) 63 | return result 64 | 65 | 66 | class PickleSerializer: 67 | def dumps(self, obj: Any) -> bytes: 68 | blob = pickle.dumps(obj) 69 | return base64.encodebytes(blob) 70 | 71 | def loads(self, blob: bytes) -> Any: 72 | return pickle.loads(base64.decodebytes(blob)) 73 | 74 | 75 | class JSONSerializer: 76 | def dumps(self, obj: Any) -> bytes: 77 | return json.dumps(obj, default=object_encoder).encode() 78 | 79 | def loads(self, blob: bytes) -> Any: 80 | return json.loads(blob.decode(), object_hook=object_decoder) 81 | 82 | 83 | class MsgPackSerializer: 84 | def dumps(self, obj: Any) -> bytes: 85 | return cast(bytes, msgpack.dumps(obj, default=object_encoder)) 86 | 87 | def loads(self, blob: bytes) -> Any: 88 | return msgpack.loads(blob, object_hook=object_decoder, strict_map_key=False) 89 | 90 | 91 | class CompressedSerializer: 92 | serializer: Serializer 93 | 94 | def dumps(self, obj: Any) -> bytes: 95 | blob = self.serializer.dumps(obj) 96 | return zlib.compress(blob, level=3) 97 | 98 | def loads(self, blob: bytes) -> Any: 99 | uncompressed = zlib.decompress(blob) 100 | return self.serializer.loads(uncompressed) 101 | 102 | 103 | class CompressedPickleSerializer(CompressedSerializer): 104 | serializer: Serializer = PickleSerializer() 105 | 106 | 107 | class CompressedJSONSerializer(CompressedSerializer): 108 | 109 | serializer: Serializer = JSONSerializer() 110 | 111 | 112 | class CompressedMsgPackSerializer(CompressedSerializer): 113 | serializer: Serializer = MsgPackSerializer() 114 | -------------------------------------------------------------------------------- /cacheme/storages/__init__.py: -------------------------------------------------------------------------------- 1 | import importlib 2 | from datetime import timedelta 3 | from typing import Any, Optional, Sequence, Tuple 4 | from urllib.parse import urlparse 5 | 6 | from cacheme.interfaces import Node 7 | from cacheme.serializer import Serializer 8 | from cacheme.storages.base import BaseStorage 9 | 10 | 11 | class Storage: 12 | SUPPORTED_STORAGES = { 13 | "local": "cacheme.storages.local:LocalStorage", 14 | "redis": "cacheme.storages.redis:RedisStorage", 15 | "sqlite": "cacheme.storages.sqlite:SQLiteStorage", 16 | "mongodb": "cacheme.storages.mongo:MongoStorage", 17 | "postgresql": "cacheme.storages.postgres:PostgresStorage", 18 | "mysql": "cacheme.storages.mysql:MySQLStorage", 19 | "sqlite": "cacheme.storages.sqlite:SQLiteStorage", 20 | } 21 | 22 | def __init__(self, url: str, **options: Any): 23 | u = urlparse(url) 24 | self._scheme = u.scheme 25 | self._is_local = True if self._scheme == "local" else False 26 | 27 | name = self.SUPPORTED_STORAGES.get(u.scheme) 28 | if name is None: 29 | raise Exception(f"storage:{u.scheme} not found") 30 | storage_cls = self.__import(name) 31 | assert issubclass(storage_cls, BaseStorage) 32 | self._storage = storage_cls(address=url, **options) 33 | 34 | def scheme(self) -> str: 35 | return self._scheme 36 | 37 | def is_local(self) -> bool: 38 | return self._is_local 39 | 40 | def __import(self, name: str) -> Any: 41 | mod_name, attr_name = name.rsplit(":", 1) 42 | module = importlib.import_module(mod_name) 43 | return getattr(module, attr_name) 44 | 45 | async def connect(self): 46 | await self._storage.connect() 47 | 48 | async def get(self, node: Node, serializer: Optional[Serializer]) -> Any: 49 | return await self._storage.get(node, serializer) 50 | 51 | async def get_all( 52 | self, nodes: Sequence[Node], serializer: Optional[Serializer] 53 | ) -> Sequence[Tuple[Node, Any]]: 54 | return await self._storage.get_all(nodes, serializer) 55 | 56 | async def set( 57 | self, 58 | node: Node, 59 | value: Any, 60 | ttl: Optional[timedelta], 61 | serializer: Optional[Serializer], 62 | ): 63 | return await self._storage.set(node, value, ttl, serializer) 64 | 65 | async def remove(self, node: Node): 66 | return await self._storage.remove(node) 67 | 68 | async def set_all( 69 | self, 70 | data: Sequence[Tuple[Node, Any]], 71 | ttl: Optional[timedelta], 72 | serializer: Optional[Serializer], 73 | ): 74 | return await self._storage.set_all(data, ttl, serializer) 75 | 76 | async def close(self): 77 | return await self._storage.close() 78 | 79 | # local storage only 80 | def get_sync(self, node: Node, serializer: Optional[Serializer]) -> Any: 81 | return self._storage.get_sync(node, serializer) 82 | 83 | # local storage only 84 | def get_all_sync( 85 | self, nodes: Sequence[Node], serializer: Optional[Serializer] 86 | ) -> Sequence[Tuple[Node, Any]]: 87 | return self._storage.get_all_sync(nodes, serializer) 88 | -------------------------------------------------------------------------------- /cacheme/storages/base.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime, timedelta, timezone 2 | from typing import Dict, List, Optional, Sequence, Tuple, cast 3 | 4 | from typing_extensions import Any 5 | 6 | from cacheme.interfaces import CachedData, Node 7 | from cacheme.models import sentinel 8 | from cacheme.serializer import Serializer 9 | 10 | 11 | class BaseStorage: 12 | def __init__(self, address: str, *args, **kwargs): 13 | self.address = address 14 | 15 | async def connect(self): 16 | raise NotImplementedError() 17 | 18 | async def get_by_key(self, key: str) -> Any: 19 | raise NotImplementedError() 20 | 21 | async def get_by_keys(self, keys: List[str]) -> Dict[str, Any]: 22 | raise NotImplementedError() 23 | 24 | async def remove_by_key(self, key: str): 25 | raise NotImplementedError() 26 | 27 | async def set_by_key(self, key: str, value: Any, ttl: Optional[timedelta]): 28 | raise NotImplementedError() 29 | 30 | async def set_by_keys(self, data: Dict[str, Any], ttl: Optional[timedelta]): 31 | raise NotImplementedError() 32 | 33 | def get_sync(self, node: Node, serializer: Optional[Serializer]) -> Any: 34 | raise NotImplementedError() 35 | 36 | def get_all_sync( 37 | self, 38 | nodes: Sequence[Node], 39 | serializer: Optional[Serializer], 40 | ) -> Sequence[Tuple[Node, Any]]: 41 | raise NotImplementedError() 42 | 43 | def serialize(self, raw: Any, serializer: Optional[Serializer]) -> CachedData: 44 | data = raw["value"] 45 | if serializer is not None: 46 | data = serializer.loads(cast(bytes, raw["value"])) 47 | return CachedData( 48 | data=data, 49 | expire=raw["expire"], 50 | ) 51 | 52 | async def get(self, node: Node, serializer: Optional[Serializer]) -> Any: 53 | result = await self.get_by_key(node.full_key()) 54 | if result is None: 55 | return sentinel 56 | data = self.serialize(result, serializer) 57 | if data.expire is not None and data.expire.replace( 58 | tzinfo=timezone.utc 59 | ) <= datetime.now(timezone.utc): 60 | return sentinel 61 | return data.data 62 | 63 | def deserialize(self, raw: Any, serializer: Optional[Serializer]) -> Any: 64 | if serializer is not None: 65 | return serializer.dumps(raw) 66 | 67 | return raw 68 | 69 | async def set( 70 | self, 71 | node: Node, 72 | value: Any, 73 | ttl: Optional[timedelta], 74 | serializer: Optional[Serializer], 75 | ): 76 | v = self.deserialize(value, serializer) 77 | await self.set_by_key(node.full_key(), v, ttl) 78 | 79 | async def remove(self, node: Node): 80 | await self.remove_by_key(node.full_key()) 81 | 82 | async def get_all( 83 | self, 84 | nodes: Sequence[Node], 85 | serializer: Optional[Serializer], 86 | ) -> Sequence[Tuple[Node, Any]]: 87 | if len(nodes) == 0: 88 | return [] 89 | results = [] 90 | mapping = {} 91 | keys = [] 92 | for node in nodes: 93 | key = node.full_key() 94 | keys.append(key) 95 | mapping[key] = node 96 | gets = await self.get_by_keys(keys) 97 | for k, v in gets.items(): 98 | node = mapping[k] 99 | if v is None: 100 | continue 101 | data = self.serialize(v, serializer) 102 | if data.expire is not None and data.expire.replace( 103 | tzinfo=timezone.utc 104 | ) <= datetime.now(timezone.utc): 105 | continue 106 | results.append((node, data.data)) 107 | return results 108 | 109 | async def set_all( 110 | self, 111 | data: Sequence[Tuple[Node, Any]], 112 | ttl: Optional[timedelta], 113 | serializer: Optional[Serializer], 114 | ): 115 | update = {} 116 | for node, value in data: 117 | update[node.full_key()] = self.deserialize(value, serializer) 118 | 119 | await self.set_by_keys(update, ttl) 120 | 121 | async def close(self): 122 | return 123 | -------------------------------------------------------------------------------- /cacheme/storages/local.py: -------------------------------------------------------------------------------- 1 | from datetime import timedelta 2 | from typing import Any, Optional, Sequence, Tuple 3 | from urllib.parse import urlparse 4 | 5 | from theine import Cache 6 | 7 | from cacheme.interfaces import Node 8 | from cacheme.models import sentinel 9 | from cacheme.serializer import Serializer 10 | from cacheme.storages.base import BaseStorage 11 | 12 | 13 | class LocalStorage(BaseStorage): 14 | def __init__(self, size: int, address: str, **options): 15 | policy_name = urlparse(address).netloc 16 | self.cache: Cache = Cache(policy_name, size) 17 | 18 | async def connect(self): 19 | return 20 | 21 | async def get(self, node: Node, serializer: Optional[Serializer]) -> Any: 22 | return self.cache.get(node.full_key(), sentinel) 23 | 24 | def get_sync(self, node: Node, serializer: Optional[Serializer]) -> Any: 25 | return self.cache.get(node.full_key(), sentinel) 26 | 27 | async def set( 28 | self, 29 | node: Node, 30 | value: Any, 31 | ttl: Optional[timedelta], 32 | serializer: Optional[Serializer], 33 | ): 34 | self.cache.set(node.full_key(), value, ttl) 35 | 36 | async def remove(self, node: Node): 37 | self.cache.delete(node.full_key()) 38 | 39 | async def get_all( 40 | self, 41 | nodes: Sequence[Node], 42 | serializer: Optional[Serializer], 43 | ) -> Sequence[Tuple[Node, Any]]: 44 | if len(nodes) == 0: 45 | return [] 46 | results = [] 47 | for node in nodes: 48 | v = self.cache.get(node.full_key(), sentinel) 49 | if v != sentinel: 50 | results.append((node, v)) 51 | return results 52 | 53 | def get_all_sync( 54 | self, 55 | nodes: Sequence[Node], 56 | serializer: Optional[Serializer], 57 | ) -> Sequence[Tuple[Node, Any]]: 58 | if len(nodes) == 0: 59 | return [] 60 | results = [] 61 | for node in nodes: 62 | v = self.cache.get(node.full_key(), sentinel) 63 | if v != sentinel: 64 | results.append((node, v)) 65 | return results 66 | 67 | async def set_all( 68 | self, 69 | data: Sequence[Tuple[Node, Any]], 70 | ttl: Optional[timedelta], 71 | serializer: Optional[Serializer], 72 | ): 73 | for node, value in data: 74 | self.cache.set(node.full_key(), value, ttl) 75 | -------------------------------------------------------------------------------- /cacheme/storages/mongo.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime, timedelta, timezone 2 | from typing import Any, Dict, List, Optional 3 | 4 | import motor.motor_asyncio as mongo 5 | from pymongo import UpdateOne 6 | 7 | from cacheme.storages.base import BaseStorage 8 | 9 | 10 | class MongoStorage(BaseStorage): 11 | def __init__( 12 | self, address: str, database: str, collection: str, pool_size: int = 50 13 | ): 14 | super().__init__(address=address) 15 | self.address = address 16 | self.database = database 17 | self.collection = collection 18 | self.pool_size = pool_size 19 | 20 | async def connect(self): 21 | client = mongo.AsyncIOMotorClient(self.address, maxPoolSize=self.pool_size) 22 | self.table = client[self.database][self.collection] 23 | 24 | async def get_by_key(self, key: str) -> Any: 25 | return await self.table.find_one({"key": key}) 26 | 27 | async def set_by_key(self, key: str, value: Any, ttl: Optional[timedelta]): 28 | expire = None 29 | if ttl is not None: 30 | expire = datetime.now(timezone.utc) + ttl 31 | await self.table.update_one( 32 | {"key": key}, 33 | { 34 | "$set": { 35 | "value": value, 36 | "updated_at": datetime.now(timezone.utc), 37 | "expire": expire, 38 | } 39 | }, 40 | True, 41 | ) 42 | 43 | async def remove_by_key(self, key: str): 44 | await self.table.delete_one({"key": key}) 45 | 46 | async def get_by_keys(self, keys: List[str]) -> Dict[str, Any]: 47 | results = await self.table.find({"key": {"$in": keys}}).to_list(None) 48 | return {r["key"]: r for r in results} 49 | 50 | async def set_by_keys(self, data: Dict[str, Any], ttl: Optional[timedelta]): 51 | expire = None 52 | if ttl is not None: 53 | expire = datetime.now(timezone.utc) + ttl 54 | requests = [ 55 | UpdateOne( 56 | {"key": k}, 57 | { 58 | "$set": { 59 | "value": v, 60 | "updated_at": datetime.now(timezone.utc), 61 | "expire": expire, 62 | } 63 | }, 64 | True, 65 | ) 66 | for k, v in data.items() 67 | ] 68 | await self.table.bulk_write(requests) 69 | -------------------------------------------------------------------------------- /cacheme/storages/mysql.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime, timedelta, timezone 2 | from typing import Any, Dict, List, Optional 3 | from urllib.parse import urlparse 4 | 5 | import aiomysql 6 | 7 | from cacheme.storages.sqldb import SQLStorage 8 | 9 | 10 | class MySQLStorage(SQLStorage): 11 | def __init__(self, address: str, table: str, pool_size: int = 50): 12 | super().__init__(address, table=table) 13 | self.pool_size = pool_size 14 | self.table = table 15 | 16 | async def _connect(self): 17 | url = urlparse(self.address) 18 | db = url.path[1:] 19 | self.pool = await aiomysql.create_pool( 20 | host=url.hostname, 21 | port=url.port or 3306, 22 | user=url.username, 23 | password=url.password, 24 | db=db, 25 | autocommit=True, 26 | maxsize=self.pool_size, 27 | ) 28 | 29 | async def close(self): 30 | self.pool.close() 31 | await self.pool.wait_closed() 32 | 33 | async def execute_ddl(self, ddl): 34 | async with self.pool.acquire() as conn: 35 | async with conn.cursor() as cur: 36 | await cur.execute(ddl) 37 | 38 | async def get_by_key(self, key: str) -> Any: 39 | async with self.pool.acquire() as conn: 40 | async with conn.cursor(aiomysql.DictCursor) as cur: 41 | await cur.execute( 42 | f"select * from {self.table} where `key`=%s", 43 | (key,), 44 | ) 45 | return await cur.fetchone() 46 | 47 | async def set_by_key(self, key: str, value: Any, ttl: Optional[timedelta]): 48 | expire = None 49 | if ttl is not None: 50 | expire = datetime.now(timezone.utc) + ttl 51 | async with self.pool.acquire() as conn: 52 | async with conn.cursor() as cur: 53 | await cur.execute( 54 | f"insert into {self.table}(`key`, value, expire) values(%s,%s,%s) ON DUPLICATE KEY UPDATE value=VALUES(value), expire=VALUES(expire)", 55 | ( 56 | key, 57 | value, 58 | expire, 59 | ), 60 | ) 61 | 62 | async def get_by_keys(self, keys: List[str]) -> Dict[str, Any]: 63 | async with self.pool.acquire() as conn: 64 | async with conn.cursor(aiomysql.DictCursor) as cur: 65 | sql = "SELECT * FROM {} WHERE `key` in ({})".format( 66 | self.table, ", ".join("%s" for _ in keys) 67 | ) 68 | await cur.execute(sql, keys) 69 | result = await cur.fetchall() 70 | return {i["key"]: i for i in result} 71 | 72 | async def set_by_keys(self, data: Dict[str, Any], ttl: Optional[timedelta]): 73 | expire = None 74 | if ttl is not None: 75 | expire = datetime.now(timezone.utc) + ttl 76 | async with self.pool.acquire() as conn: 77 | async with conn.cursor() as cur: 78 | await cur.executemany( 79 | f"insert into {self.table}(`key`, value, expire) values(%s,%s,%s) ON DUPLICATE KEY UPDATE value=VALUES(value), expire=VALUES(expire)", 80 | [ 81 | ( 82 | key, 83 | value, 84 | expire, 85 | ) 86 | for key, value in data.items() 87 | ], 88 | ) 89 | 90 | async def remove_by_key(self, key: str): 91 | async with self.pool.acquire() as conn: 92 | async with conn.cursor(aiomysql.DictCursor) as cur: 93 | await cur.execute( 94 | f"delete from {self.table} where `key`=%s", 95 | (key,), 96 | ) 97 | -------------------------------------------------------------------------------- /cacheme/storages/postgres.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime, timedelta, timezone 2 | from typing import Any, Dict, List, Optional, cast 3 | 4 | from asyncpg.connection import asyncpg 5 | from asyncpg.pool import Pool 6 | 7 | from cacheme.storages.sqldb import SQLStorage 8 | 9 | 10 | class PostgresStorage(SQLStorage): 11 | def __init__(self, address: str, table: str, pool_size: int = 50): 12 | super().__init__(address, table=table) 13 | self.pool_size = pool_size 14 | self.pool = None 15 | self.table = table 16 | 17 | async def _connect(self): 18 | self.pool = await asyncpg.create_pool(dsn=self.address, max_size=self.pool_size) 19 | 20 | async def close(self): 21 | await cast(Pool, self.pool).close() 22 | 23 | async def execute_ddl(self, ddl): 24 | if self.pool is None: 25 | raise 26 | async with self.pool.acquire() as conn: 27 | await conn.execute(ddl) 28 | 29 | async def get_by_key(self, key: str) -> Any: 30 | if self.pool is None: 31 | raise 32 | async with self.pool.acquire() as conn: 33 | return await conn.fetchrow(f"select * from {self.table} where key=$1", key) 34 | 35 | async def set_by_key(self, key: str, value: Any, ttl: Optional[timedelta]): 36 | if self.pool is None: 37 | raise 38 | expire = None 39 | if ttl is not None: 40 | expire = datetime.now(timezone.utc) + ttl 41 | async with self.pool.acquire() as conn: 42 | await conn.execute( 43 | f"insert into {self.table}(key, value, expire) values($1,$2,$3) on conflict(key) do update set value=EXCLUDED.value, expire=EXCLUDED.expire", 44 | key, 45 | value, 46 | expire, 47 | ) 48 | 49 | async def get_by_keys(self, keys: List[str]) -> Dict[str, Any]: 50 | if self.pool is None: 51 | raise 52 | async with self.pool.acquire() as conn: 53 | records = await conn.fetch( 54 | f"select * from {self.table} where key=any($1::text[])", keys 55 | ) 56 | return {r["key"]: r for r in records} 57 | 58 | async def set_by_keys(self, data: Dict[str, Any], ttl: Optional[timedelta]): 59 | if self.pool is None: 60 | raise 61 | expire = None 62 | if ttl is not None: 63 | expire = datetime.now(timezone.utc) + ttl 64 | async with self.pool.acquire() as conn: 65 | await conn.executemany( 66 | f"insert into {self.table}(key, value, expire) values($1,$2,$3) on conflict(key) do update set value=EXCLUDED.value, expire=EXCLUDED.expire", 67 | [ 68 | ( 69 | key, 70 | value, 71 | expire, 72 | ) 73 | for key, value in data.items() 74 | ], 75 | ) 76 | 77 | async def remove_by_key(self, key: str): 78 | if self.pool is None: 79 | raise 80 | async with self.pool.acquire() as conn: 81 | return await conn.execute(f"delete from {self.table} where key=$1", key) 82 | -------------------------------------------------------------------------------- /cacheme/storages/redis.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime, timedelta, timezone 2 | from typing import Any, Dict, List, Optional, Union, cast 3 | 4 | import redis.asyncio as redis 5 | import redis.asyncio.cluster as redis_cluster 6 | from redis.asyncio.connection import BlockingConnectionPool 7 | 8 | from cacheme.interfaces import CachedData 9 | from cacheme.serializer import Serializer 10 | from cacheme.storages.base import BaseStorage 11 | 12 | 13 | class RedisStorage(BaseStorage): 14 | client: Union[redis.Redis, redis_cluster.RedisCluster] 15 | 16 | def __init__( 17 | self, address: str, pool_size: int = 100, cluster: bool = False, **options 18 | ): 19 | super().__init__(address=address) 20 | self.pool_size = pool_size 21 | self.cluster = cluster 22 | self.options = options 23 | 24 | async def connect(self): 25 | if self.cluster: 26 | self.client = redis_cluster.RedisCluster.from_url( 27 | self.address, 28 | max_connections=10 * self.pool_size, 29 | **self.options, 30 | ) 31 | else: 32 | self.client = await redis.from_url(self.address, **self.options) 33 | cast( 34 | redis.Redis, self.client 35 | ).connection_pool = BlockingConnectionPool.from_url( 36 | self.address, max_connections=self.pool_size, timeout=None 37 | ) 38 | 39 | async def get_by_key(self, key: str) -> Any: 40 | return await self.client.get(key) # type: ignore 41 | 42 | async def get_by_keys(self, keys: List[str]) -> Dict[str, Any]: 43 | values = await self.client.mget(keys) # type: ignore 44 | return {keys[i]: v for i, v in enumerate(values) if v is not None} 45 | 46 | def serialize(self, raw: Any, serializer: Optional[Serializer]) -> CachedData: 47 | if serializer is None: 48 | raise Exception("serializer is None") 49 | data = serializer.loads(cast(bytes, raw)) 50 | return CachedData(data=data["value"], expire=None) 51 | 52 | def deserialize(self, raw: Any, serializer: Optional[Serializer]) -> Any: 53 | value = {"value": raw, "updated_at": datetime.now(timezone.utc)} 54 | return super().deserialize(value, serializer) 55 | 56 | async def remove_by_key(self, key: str): 57 | await self.client.delete(key) # type: ignore 58 | 59 | async def set_by_key(self, key: str, value: Any, ttl: Optional[timedelta]): 60 | if ttl is not None: 61 | await self.client.setex(key, int(ttl.total_seconds()), value) # type: ignore 62 | else: 63 | await self.client.set(key, value) # type: ignore 64 | 65 | async def set_by_keys(self, data: Dict[str, Any], ttl: Optional[timedelta]): 66 | async with self.client.pipeline() as pipe: 67 | if ttl is not None: 68 | seconds = int(ttl.total_seconds()) 69 | for k, v in data.items(): 70 | pipe.setex(k, seconds, v) # type: ignore 71 | else: 72 | for k, v in data.items(): 73 | pipe.set(k, v) # type: ignore 74 | await pipe.execute() # type: ignore 75 | -------------------------------------------------------------------------------- /cacheme/storages/scripts/mongo.js: -------------------------------------------------------------------------------- 1 | db = connect('mongodb://localhost/myDatabase'); 2 | 3 | db.cacheme_data.create_index('key', (unique = True)); 4 | db.cacheme_data.create_index('expire'); 5 | -------------------------------------------------------------------------------- /cacheme/storages/scripts/mysql.sql: -------------------------------------------------------------------------------- 1 | CREATE TABLE cacheme_data ( 2 | id INTEGER NOT NULL AUTO_INCREMENT, 3 | `key` VARCHAR(512), 4 | value BLOB, 5 | expire DATETIME(6), 6 | updated_at DATETIME(6) DEFAULT now(6), 7 | PRIMARY KEY (id), 8 | UNIQUE (`key`) 9 | ); 10 | CREATE INDEX ix_cacheme_data_expire ON cacheme_data (expire); 11 | -------------------------------------------------------------------------------- /cacheme/storages/scripts/postgresql.sql: -------------------------------------------------------------------------------- 1 | CREATE TABLE cacheme_data ( 2 | id SERIAL NOT NULL, 3 | key VARCHAR(512), 4 | value BYTEA, 5 | expire TIMESTAMP WITH TIME ZONE, 6 | updated_at TIMESTAMP WITH TIME ZONE DEFAULT now(), 7 | PRIMARY KEY (id), 8 | UNIQUE (key) 9 | ); 10 | CREATE INDEX ix_cacheme_data_expire ON cacheme_data (expire); 11 | -------------------------------------------------------------------------------- /cacheme/storages/scripts/sqlite.sql: -------------------------------------------------------------------------------- 1 | CREATE TABLE cacheme_data ( 2 | id INTEGER NOT NULL, 3 | "key" VARCHAR(512), 4 | value BLOB, 5 | expire DATETIME, 6 | updated_at DATETIME DEFAULT (strftime('%Y-%m-%d %H:%M:%f', 'now')), 7 | PRIMARY KEY (id), 8 | UNIQUE ("key") 9 | ); 10 | CREATE INDEX ix_cacheme_data_expire ON cacheme_data (expire); 11 | -------------------------------------------------------------------------------- /cacheme/storages/sqldb.py: -------------------------------------------------------------------------------- 1 | import re 2 | 3 | from cacheme.storages.base import BaseStorage 4 | 5 | 6 | class SQLStorage(BaseStorage): 7 | def __init__(self, address: str, table: str): 8 | match = re.fullmatch(r".\w+", table) 9 | if match is None: 10 | raise Exception("invalid table name") 11 | self.address = address 12 | self.table = table 13 | super().__init__(address=address, table=table) 14 | 15 | async def _connect(self): 16 | raise NotImplementedError() 17 | 18 | async def connect(self): 19 | await self._connect() 20 | 21 | async def execute_ddl(self, ddl): 22 | raise NotImplementedError() 23 | -------------------------------------------------------------------------------- /cacheme/storages/sqlite.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import sqlite3 3 | import sys 4 | from datetime import datetime, timedelta, timezone 5 | from typing import Any, Dict, List, Optional, cast 6 | from urllib.parse import urlparse 7 | 8 | from cacheme.interfaces import CachedData 9 | from cacheme.serializer import Serializer 10 | from cacheme.storages.sqldb import SQLStorage 11 | 12 | 13 | class SQLiteStorage(SQLStorage): 14 | def __init__(self, address: str, table: str, pool_size: int = 10): 15 | super().__init__(address, table=table) 16 | url = urlparse(self.address) 17 | db = url.path[1:] 18 | self.db = db 19 | self.sem = asyncio.BoundedSemaphore(pool_size) 20 | self.pool: List[sqlite3.Connection] = [] 21 | self.table = table 22 | 23 | async def _connect(self): 24 | conn = sqlite3.connect( 25 | self.db, isolation_level=None, timeout=30, check_same_thread=False 26 | ) 27 | conn.row_factory = sqlite3.Row 28 | cur = conn.execute("pragma journal_mode=wal") 29 | cur.close() 30 | self.writer = conn 31 | 32 | async def execute_ddl(self, ddl): 33 | with sqlite3.connect(self.db, isolation_level=None) as conn: 34 | conn.execute(ddl) 35 | 36 | def serialize(self, raw: Any, serializer: Optional[Serializer]) -> CachedData: 37 | data = raw["value"] 38 | if serializer is not None: 39 | data = serializer.loads(cast(bytes, raw["value"])) 40 | updated_at = datetime.fromisoformat(raw["updated_at"]) 41 | expire = None 42 | if raw["expire"] != None: 43 | expire = datetime.fromisoformat(raw["expire"]).replace(tzinfo=timezone.utc) 44 | return CachedData( 45 | data=data, 46 | expire=expire, 47 | ) 48 | 49 | def get_connection(self) -> sqlite3.Connection: 50 | if len(self.pool) > 0: 51 | return self.pool.pop(0) 52 | conn = sqlite3.connect( 53 | self.db, isolation_level=None, timeout=30, check_same_thread=False 54 | ) 55 | conn.row_factory = sqlite3.Row 56 | return conn 57 | 58 | def sync_get_by_key( 59 | self, 60 | key: str, 61 | ) -> Any: 62 | conn = self.get_connection() 63 | cur = conn.execute( 64 | f"select * from {self.table} where key=?", 65 | (key,), 66 | ) 67 | data = cur.fetchone() 68 | cur.close() 69 | self.pool.append(conn) 70 | return data 71 | 72 | def sync_remove_by_key(self, key: str): 73 | cur = self.writer.execute( 74 | f"delete from {self.table} where key=?", 75 | (key,), 76 | ) 77 | cur.close() 78 | 79 | def sync_get_by_keys( 80 | self, 81 | keys: List[str], 82 | ) -> Dict[str, Any]: 83 | conn = self.get_connection() 84 | sql = ( 85 | f"SELECT * FROM {self.table} WHERE key in ({', '.join('?' for _ in keys)})" 86 | ) 87 | cur = conn.execute(sql, keys) 88 | data = cur.fetchall() 89 | cur.close() 90 | self.pool.append(conn) 91 | return {i["key"]: i for i in data} 92 | 93 | def sync_set_data( 94 | self, 95 | key: str, 96 | value: Any, 97 | expire: Optional[datetime], 98 | ): 99 | cur = self.writer.execute( 100 | f"insert into {self.table}(key, value, expire) values(?,?,?) on conflict(key) do update set value=EXCLUDED.value, expire=EXCLUDED.expire", 101 | ( 102 | key, 103 | value, 104 | expire, 105 | ), 106 | ) 107 | cur.close() 108 | 109 | def sync_set_data_batch( 110 | self, 111 | data: Dict[str, Any], 112 | expire: Optional[datetime], 113 | ): 114 | cur = self.writer.executemany( 115 | f"insert into {self.table}(key, value, expire) values(?,?,?) on conflict(key) do update set value=EXCLUDED.value, expire=EXCLUDED.expire", 116 | [ 117 | ( 118 | key, 119 | value, 120 | expire, 121 | ) 122 | for key, value in data.items() 123 | ], 124 | ) 125 | cur.close() 126 | 127 | async def get_by_key(self, key: str) -> Any: 128 | await self.sem.acquire() 129 | if sys.version_info >= (3, 9): 130 | data = await asyncio.to_thread(self.sync_get_by_key, key) 131 | else: 132 | loop = asyncio.get_running_loop() 133 | data = await loop.run_in_executor(None, self.sync_get_by_key, key) 134 | self.sem.release() 135 | return data 136 | 137 | async def set_by_key(self, key: str, value: Any, ttl: Optional[timedelta]): 138 | expire = None 139 | if ttl is not None: 140 | expire = datetime.now(timezone.utc) + ttl 141 | self.sync_set_data(key, value, expire) 142 | 143 | async def get_by_keys(self, keys: List[str]) -> Dict[str, Any]: 144 | await self.sem.acquire() 145 | if sys.version_info >= (3, 9): 146 | data = await asyncio.to_thread(self.sync_get_by_keys, keys) 147 | else: 148 | loop = asyncio.get_running_loop() 149 | data = await loop.run_in_executor(None, self.sync_get_by_keys, keys) 150 | self.sem.release() 151 | return data 152 | 153 | async def set_by_keys(self, data: Dict[str, Any], ttl: Optional[timedelta]): 154 | expire = None 155 | if ttl is not None: 156 | expire = datetime.now(timezone.utc) + ttl 157 | self.sync_set_data_batch(data, expire) 158 | 159 | async def remove_by_key(self, key: str): 160 | self.sync_remove_by_key(key) 161 | -------------------------------------------------------------------------------- /cacheme/utils.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Yiling-J/cacheme/f402c45267ad107a647395a74e67e53760c13755/cacheme/utils.py -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | name = "cacheme" 3 | version = "0.3.0" 4 | description = "async caching framework" 5 | authors = ["Yiling-J "] 6 | readme = "README.md" 7 | 8 | [tool.poetry.dependencies] 9 | python = "^3.7" 10 | msgpack = "^1.0.4" 11 | pydantic = "^1.10.4" 12 | typing-extensions = "^4.4.0" 13 | redis = { version = "^4.4.2", optional = true } 14 | motor = { version = "^3.1.1", optional = true } 15 | aiomysql = { version = "^0.1.1", optional = true } 16 | asyncpg = { version = "^0.27.0", optional = true } 17 | theine = "^0.3.0" 18 | 19 | [tool.poetry.group.dev.dependencies] 20 | msgpack = "^1.0.4" 21 | pydantic = "^1.10.4" 22 | typing-extensions = "^4.4.0" 23 | pytest = "^7.2.1" 24 | pytest-asyncio = "^0.20.3" 25 | pytest-benchmark = "^4.0.0" 26 | redis = "^4.4.2" 27 | types-redis = "^4.4.0.3" 28 | motor = "^3.1.1" 29 | aiomysql = "^0.1.1" 30 | asyncpg = "^0.27.0" 31 | mypy = "^0.991" 32 | aiocache = "^0.12.0" 33 | cashews = "^5.3.1" 34 | 35 | [tool.poetry.extras] 36 | redis = ["redis"] 37 | aiomysql = ["aiomysql"] 38 | motor = ["motor"] 39 | asyncpg = ["asyncpg"] 40 | 41 | [build-system] 42 | requires = ["poetry-core"] 43 | build-backend = "poetry.core.masonry.api" 44 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Yiling-J/cacheme/f402c45267ad107a647395a74e67e53760c13755/tests/__init__.py -------------------------------------------------------------------------------- /tests/__main__.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | 3 | testsuite = unittest.TestLoader().discover('.') 4 | unittest.TextTestRunner().run(testsuite) 5 | -------------------------------------------------------------------------------- /tests/test_core.py: -------------------------------------------------------------------------------- 1 | from asyncio import gather 2 | from dataclasses import dataclass 3 | from datetime import timedelta 4 | from unittest.mock import Mock 5 | 6 | import pytest 7 | 8 | from cacheme.core import ( 9 | Memoize, 10 | build_node, 11 | get, 12 | get_all, 13 | invalidate, 14 | nodes, 15 | refresh, 16 | stats, 17 | _awaits_len, 18 | ) 19 | from cacheme.data import register_storage 20 | from cacheme.models import Cache, DynamicNode, Node, sentinel, set_prefix 21 | from cacheme.serializer import MsgPackSerializer 22 | from cacheme.storages import Storage 23 | 24 | 25 | def node_cls(mock: Mock): 26 | @dataclass 27 | class FooNode(Node): 28 | user_id: str 29 | foo_id: str 30 | level: int 31 | 32 | def key(self) -> str: 33 | return f"{self.user_id}:{self.foo_id}:{self.level}" 34 | 35 | async def load(self) -> str: 36 | mock() 37 | return f"{self.user_id}-{self.foo_id}-{self.level}" 38 | 39 | class Meta(Node.Meta): 40 | version = "v1" 41 | caches = [Cache(storage="local", ttl=None)] 42 | serializer = MsgPackSerializer() 43 | 44 | return FooNode 45 | 46 | 47 | async def fn(a: int, b: str, m: Mock) -> str: 48 | m() 49 | return f"{a}/{b}/apple" 50 | 51 | 52 | @pytest.mark.asyncio 53 | async def test_memoize(): 54 | await register_storage("local", Storage(url="local://tlfu", size=50)) 55 | mock = Mock() 56 | FooNode = node_cls(mock) 57 | test_fn = Memoize(Node)(fn) 58 | test_fn.to_node(lambda a, b, m: FooNode(user_id=str(a), foo_id=b, level=10)) 59 | assert mock.call_count == 0 60 | result = await test_fn(1, "2", mock) 61 | assert result == "1/2/apple" 62 | assert mock.call_count == 1 63 | result = await test_fn(1, "2", mock) 64 | assert result == "1/2/apple" 65 | assert mock.call_count == 1 66 | 67 | class Bar: 68 | @Memoize(FooNode) 69 | async def fn(self, a: int, b: str, c: int, m: Mock) -> str: 70 | m() 71 | return f"{a}/{b}/{c}/orange" 72 | 73 | @fn.to_node 74 | def _(self, a: int, b: str, c: int, m: Mock) -> Node: 75 | return FooNode(user_id=str(a), foo_id=b, level=20) 76 | 77 | mock.reset_mock() 78 | b = Bar() 79 | assert mock.call_count == 0 80 | result = await b.fn(1, "2", 3, mock) 81 | assert result == "1/2/3/orange" 82 | assert mock.call_count == 1 83 | result = await b.fn(1, "2", 3, mock) 84 | assert result == "1/2/3/orange" 85 | assert mock.call_count == 1 86 | result = await b.fn(1, "2", 5, mock) 87 | assert result == "1/2/3/orange" 88 | assert mock.call_count == 1 89 | 90 | 91 | @pytest.mark.asyncio 92 | async def test_get(): 93 | await register_storage("local", Storage(url="local://tlfu", size=50)) 94 | mock = Mock() 95 | Node = node_cls(mock) 96 | result = await get(Node(user_id="a", foo_id="1", level=10)) 97 | assert mock.call_count == 1 98 | assert result == "a-1-10" 99 | result = await get(Node(user_id="a", foo_id="1", level=10)) 100 | assert mock.call_count == 1 101 | assert result == "a-1-10" 102 | 103 | 104 | @pytest.mark.asyncio 105 | async def test_get_override(): 106 | await register_storage("local", Storage(url="local://tlfu", size=50)) 107 | mock = Mock() 108 | FooNode = node_cls(mock) 109 | mock2 = Mock() 110 | 111 | async def override(node: Node) -> str: 112 | mock2() 113 | return f"{node.user_id}-{node.foo_id}-{node.level}-o" # type: ignore 114 | 115 | result = await get(FooNode(user_id="a", foo_id="1", level=10), override) 116 | assert mock.call_count == 0 117 | assert mock2.call_count == 1 118 | assert result == "a-1-10-o" 119 | result = await get(FooNode(user_id="a", foo_id="1", level=10), override) 120 | assert mock.call_count == 0 121 | assert mock2.call_count == 1 122 | assert result == "a-1-10-o" 123 | 124 | 125 | @pytest.mark.asyncio 126 | async def test_get_all(): 127 | await register_storage("local", Storage(url="local://tlfu", size=50)) 128 | mock = Mock() 129 | Node = node_cls(mock) 130 | nodes = [ 131 | Node(user_id="c", foo_id="2", level=1), 132 | Node(user_id="a", foo_id="1", level=1), 133 | Node(user_id="b", foo_id="3", level=1), 134 | ] 135 | results = await get_all(nodes) 136 | assert mock.call_count == 3 137 | assert results == ["c-2-1", "a-1-1", "b-3-1"] 138 | 139 | results = await get_all(nodes) 140 | assert mock.call_count == 3 141 | assert results == ["c-2-1", "a-1-1", "b-3-1"] 142 | nodes = [ 143 | Node(user_id="c", foo_id="2", level=1), 144 | Node(user_id="a", foo_id="1", level=1), 145 | Node(user_id="b", foo_id="4", level=1), 146 | ] 147 | results = await get_all(nodes) 148 | assert mock.call_count == 4 149 | assert results == ["c-2-1", "a-1-1", "b-4-1"] 150 | 151 | 152 | @pytest.mark.asyncio 153 | async def test_memoize_concurrency(): 154 | await register_storage("local", Storage(url="local://tlfu", size=50)) 155 | mock = Mock() 156 | Node = node_cls(mock) 157 | test_fn = Memoize(Node)(fn) 158 | test_fn.to_node(lambda a, b, m: Node(user_id=str(a), foo_id=b, level=10)) 159 | results = await gather(*[test_fn(a=1, b="2", m=mock) for _ in range(50)]) 160 | assert len(results) == 50 161 | for r in results: 162 | assert r == "1/2/apple" 163 | assert mock.call_count == 1 164 | assert _awaits_len() == 0 165 | 166 | 167 | @pytest.mark.asyncio 168 | async def test_get_concurrency(): 169 | await register_storage("local", Storage(url="local://tlfu", size=50)) 170 | mock = Mock() 171 | Node = node_cls(mock) 172 | results = await gather( 173 | *[get(Node(user_id="b", foo_id="a", level=10)) for _ in range(200)] 174 | ) 175 | assert len(results) == 200 176 | for r in results: 177 | assert r == "b-a-10" 178 | assert mock.call_count == 1 179 | assert _awaits_len() == 0 180 | 181 | 182 | @pytest.mark.asyncio 183 | async def test_get_all_concurrency(): 184 | await register_storage("local", Storage(url="local://tlfu", size=50)) 185 | mock = Mock() 186 | Node = node_cls(mock) 187 | nodes = [ 188 | Node(user_id="1", foo_id="2", level=10), 189 | Node(user_id="2", foo_id="2", level=10), 190 | Node(user_id="3", foo_id="2", level=10), 191 | ] 192 | results = await gather(*[get_all(nodes) for _ in range(200)]) 193 | assert len(results) == 200 194 | for r in results: 195 | assert r == ["1-2-10", "2-2-10", "3-2-10"] 196 | assert mock.call_count == 3 197 | assert _awaits_len() == 0 198 | 199 | 200 | @dataclass 201 | class StatsNode(Node): 202 | id: str 203 | 204 | def key(self) -> str: 205 | return f"{self.id}" 206 | 207 | async def load(self) -> str: 208 | return f"{self.id}" 209 | 210 | class Meta(Node.Meta): 211 | version = "v1" 212 | caches = [Cache(storage="local", ttl=None)] 213 | 214 | 215 | @pytest.mark.asyncio 216 | async def test_stats(): 217 | await register_storage("local", Storage(url="local://lru", size=100)) 218 | await get(StatsNode("a")) 219 | await get(StatsNode("b")) 220 | await get(StatsNode("c")) 221 | await get(StatsNode("a")) 222 | await get(StatsNode("d")) 223 | metrics = stats(StatsNode) 224 | assert metrics.request_count() == 5 225 | assert metrics.hit_count() == 1 226 | assert metrics.load_count() == 4 227 | assert metrics.hit_rate() == 1 / 5 228 | assert metrics.load_success_count() == 4 229 | assert metrics.miss_count() == 4 230 | assert metrics.miss_rate() == 4 / 5 231 | await get_all([StatsNode("a"), StatsNode("b"), StatsNode("f")]) 232 | assert metrics.request_count() == 8 233 | assert metrics.hit_count() == 3 234 | assert metrics.load_count() == 5 235 | 236 | 237 | @pytest.mark.asyncio 238 | async def test_invalidate(): 239 | await register_storage("local", Storage(url="local://tlfu", size=50)) 240 | mock = Mock() 241 | Node = node_cls(mock) 242 | await get(Node(user_id="a", foo_id="1", level=10)) 243 | await get(Node(user_id="a", foo_id="1", level=10)) 244 | assert mock.call_count == 1 245 | await invalidate(Node(user_id="a", foo_id="1", level=10)) 246 | assert mock.call_count == 1 247 | await get(Node(user_id="a", foo_id="1", level=10)) 248 | assert mock.call_count == 2 249 | 250 | 251 | @pytest.mark.asyncio 252 | async def test_refresh(): 253 | await register_storage("local", Storage(url="local://tlfu", size=50)) 254 | mock = Mock() 255 | Node = node_cls(mock) 256 | await get(Node(user_id="a", foo_id="1", level=10)) 257 | await get(Node(user_id="a", foo_id="1", level=10)) 258 | assert mock.call_count == 1 259 | await refresh(Node(user_id="a", foo_id="1", level=10)) 260 | assert mock.call_count == 2 261 | await get(Node(user_id="a", foo_id="1", level=10)) 262 | assert mock.call_count == 2 263 | 264 | 265 | def node_multi_cls(mock: Mock): 266 | @dataclass 267 | class FooNode(Node): 268 | id: str 269 | 270 | def key(self) -> str: 271 | return f"{self.id}" 272 | 273 | async def load(self) -> str: 274 | mock() 275 | return "test" 276 | 277 | class Meta(Node.Meta): 278 | version = "v1" 279 | caches = [ 280 | Cache(storage="local1", ttl=timedelta(seconds=10)), 281 | Cache(storage="local2", ttl=None), 282 | ] 283 | serializer = MsgPackSerializer() 284 | 285 | return FooNode 286 | 287 | 288 | @pytest.mark.asyncio 289 | async def test_multiple_storage(): 290 | storage1 = Storage(url="local://tlfu", size=50) 291 | storage2 = Storage(url="local://tlfu", size=50) 292 | await register_storage("local1", storage1) 293 | await register_storage("local2", storage2) 294 | mock = Mock() 295 | Node = node_multi_cls(mock) 296 | node = Node(id="1") 297 | result = await get(node) 298 | assert result == "test" 299 | assert mock.call_count == 1 300 | r1 = await storage1.get(node, None) 301 | assert r1 == "test" 302 | r2 = await storage2.get(node, None) 303 | assert r2 == "test" 304 | # invalidate node 305 | await invalidate(node) 306 | r1 = await storage1.get(node, None) 307 | assert r1 is sentinel 308 | r2 = await storage2.get(node, None) 309 | assert r2 is sentinel 310 | 311 | # test remove cache from local only 312 | result = await get(node) 313 | assert result == "test" 314 | assert mock.call_count == 2 315 | await storage1.remove(node) 316 | result = await get(node) 317 | assert result == "test" 318 | r1 = await storage1.get(node, None) 319 | assert r1 == "test" 320 | r2 = await storage2.get(node, None) 321 | assert r2 == "test" 322 | assert mock.call_count == 2 323 | 324 | 325 | def test_nodes(): 326 | test_nodes = nodes() 327 | assert len(test_nodes) > 0 328 | for n in test_nodes: 329 | assert type(n) != Node 330 | 331 | 332 | def test_set_prefix(): 333 | set_prefix("youcache") 334 | mock = Mock() 335 | Node = node_multi_cls(mock) 336 | node = Node(id="test") 337 | assert node.full_key() == "youcache:test:v1" 338 | 339 | 340 | @pytest.mark.asyncio 341 | async def test_build_node(): 342 | await register_storage("local", Storage(url="local://tlfu", size=50)) 343 | Node = build_node("DynamicFooNode", "v1", [Cache(storage="local", ttl=None)]) 344 | c = 0 345 | 346 | async def counter(node) -> int: 347 | nonlocal c 348 | c += 1 349 | return c 350 | 351 | for i in range(0, 10): 352 | result = await get(Node(key=f"foo:{i}"), load_fn=counter) 353 | assert result == i + 1 354 | assert c == 10 355 | for i in range(0, 10): 356 | result = await get(Node(key=f"foo:{i}"), load_fn=counter) 357 | assert result == i + 1 358 | assert c == 10 359 | 360 | # assert nodes/stats API 361 | assert Node in nodes() 362 | metrics = stats(Node) 363 | assert metrics.request_count() == 20 364 | assert metrics.hit_count() == 10 365 | 366 | # build with same name, should use existing one 367 | Node2 = build_node("DynamicFooNode", "v1", [Cache(storage="local", ttl=None)]) 368 | assert Node == Node2 369 | 370 | 371 | fn_dynamic_counter = 0 372 | 373 | 374 | @Memoize(build_node("DynamicBarNode", "v1", [Cache(storage="local", ttl=None)])) 375 | async def fn_dynamic(a: int) -> int: 376 | global fn_dynamic_counter 377 | fn_dynamic_counter += 1 378 | return a 379 | 380 | 381 | @fn_dynamic.to_node 382 | def _(a: int) -> DynamicNode: 383 | return DynamicNode(key=f"bar:{a}") 384 | 385 | 386 | @pytest.mark.asyncio 387 | async def test_build_node_decorator(): 388 | await register_storage("local", Storage(url="local://tlfu", size=50)) 389 | assert fn_dynamic_counter == 0 390 | result = await fn_dynamic(1) 391 | assert result == 1 392 | assert fn_dynamic_counter == 1 393 | result = await fn_dynamic(1) 394 | assert result == 1 395 | assert fn_dynamic_counter == 1 396 | result = await fn_dynamic(2) 397 | assert result == 2 398 | assert fn_dynamic_counter == 2 399 | -------------------------------------------------------------------------------- /tests/test_serializers.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass 2 | from datetime import datetime, timedelta 3 | 4 | import pytest 5 | from pydantic import BaseModel 6 | 7 | from cacheme.serializer import * 8 | 9 | TUPLE_TO_LIST = 1 10 | JSON_ONLY = 2 11 | PICKLE = 3 12 | JSON = 4 13 | MSGPACK = 5 14 | 15 | 16 | class Foo: 17 | a = 1 18 | b = 2 19 | 20 | def __init__(self, q): 21 | self.c = q 22 | 23 | def __eq__(self, other): 24 | return hash(self) == hash(other) 25 | 26 | def __hash__(self): 27 | return hash(f"{self.a}:{self.b}:{self.c}") 28 | 29 | 30 | @dataclass 31 | class Bar: 32 | a: int 33 | b: str 34 | 35 | 36 | class FooBar(BaseModel): 37 | id: int 38 | name = "foo bar" 39 | 40 | 41 | @pytest.mark.parametrize( 42 | "data", 43 | [ 44 | {"d": None, "s": [PICKLE, JSON, MSGPACK]}, 45 | {"d": True, "s": [PICKLE, JSON, MSGPACK]}, 46 | {"d": False, "s": [PICKLE, JSON, MSGPACK]}, 47 | {"d": [], "s": [PICKLE, JSON, MSGPACK]}, 48 | {"d": {}, "s": [PICKLE, JSON, MSGPACK]}, 49 | {"d": (), "s": [PICKLE, JSON, MSGPACK]}, 50 | {"d": 1, "s": [PICKLE, JSON, MSGPACK]}, 51 | {"d": 1.23, "s": [PICKLE, JSON, MSGPACK]}, 52 | {"d": "foo", "s": [PICKLE, JSON, MSGPACK]}, 53 | {"d": [1, 2, 3], "s": [PICKLE, JSON, MSGPACK]}, 54 | {"d": (1, 2, 3), "s": [PICKLE, JSON, MSGPACK]}, 55 | {"d": datetime.now(), "s": [PICKLE, MSGPACK]}, 56 | {"d": timedelta(seconds=20), "s": [PICKLE, MSGPACK]}, 57 | { 58 | "d": { 59 | "a": "a", 60 | "b": 2, 61 | "ll": [1, 2, "3", {"a": "b"}], 62 | }, 63 | "s": [PICKLE, JSON, MSGPACK], 64 | }, 65 | {"d": Foo(10), "s": [PICKLE]}, 66 | {"d": Bar(a=1, b="12"), "s": [PICKLE, JSON, MSGPACK]}, 67 | {"d": FooBar(id=12), "s": [PICKLE, JSON, MSGPACK]}, 68 | ], 69 | ) 70 | @pytest.mark.parametrize( 71 | "serializer_data", 72 | [ 73 | { 74 | "n": PICKLE, 75 | "s": [PickleSerializer(), CompressedPickleSerializer()], 76 | "tags": [], 77 | }, 78 | { 79 | "n": MSGPACK, 80 | "s": [MsgPackSerializer(), CompressedMsgPackSerializer()], 81 | "tags": [TUPLE_TO_LIST], 82 | }, 83 | { 84 | "n": JSON, 85 | "s": [JSONSerializer(), CompressedJSONSerializer()], 86 | "tags": [TUPLE_TO_LIST], 87 | }, 88 | ], 89 | ) 90 | def test_serializers(data, serializer_data): 91 | if serializer_data["n"] not in data["s"]: 92 | return 93 | value = data["d"] 94 | for serializer in serializer_data["s"]: 95 | deserialized = serializer.dumps(value) 96 | serialized = serializer.loads(deserialized) 97 | if TUPLE_TO_LIST in serializer_data["tags"] and isinstance(value, tuple): 98 | value = list(value) 99 | assert serialized == value 100 | -------------------------------------------------------------------------------- /tests/test_storages.py: -------------------------------------------------------------------------------- 1 | import os 2 | import random 3 | from asyncio import sleep 4 | from dataclasses import dataclass 5 | from datetime import timedelta 6 | from typing import List 7 | 8 | import pytest 9 | 10 | from cacheme.models import Node, sentinel 11 | from cacheme.serializer import PickleSerializer 12 | from cacheme.storages.local import LocalStorage 13 | from cacheme.storages.mongo import MongoStorage 14 | from cacheme.storages.mysql import MySQLStorage 15 | from cacheme.storages.postgres import PostgresStorage 16 | from cacheme.storages.redis import RedisStorage 17 | from cacheme.storages.sqlite import SQLiteStorage 18 | from tests.utils import setup_storage 19 | 20 | 21 | @dataclass 22 | class FooNode(Node): 23 | id: str 24 | 25 | def key(self) -> str: 26 | return f"{self.id}" 27 | 28 | class Meta(Node.Meta): 29 | version = "v1" 30 | storage = "local" 31 | 32 | 33 | @pytest.mark.parametrize( 34 | "storage", 35 | [ 36 | {"s": LocalStorage(200, "local://tlfu"), "local": True}, 37 | { 38 | "s": SQLiteStorage( 39 | f"sqlite:///test{random.randint(0, 50000)}", 40 | table="data", 41 | ), 42 | "local": True, 43 | }, 44 | { 45 | "s": MySQLStorage( 46 | "mysql://username:password@localhost:3306/test", 47 | table="data", 48 | ), 49 | "local": False, 50 | }, 51 | { 52 | "s": PostgresStorage( 53 | f"postgresql://username:password@127.0.0.1:5432/test", 54 | table="data", 55 | ), 56 | "local": False, 57 | }, 58 | { 59 | "s": RedisStorage( 60 | "redis://localhost:6379", 61 | ), 62 | "local": False, 63 | }, 64 | { 65 | "s": MongoStorage( 66 | "mongodb://test:password@localhost:27017", 67 | database="test", 68 | collection="data", 69 | ), 70 | "local": False, 71 | }, 72 | ], 73 | ) 74 | @pytest.mark.asyncio 75 | async def test_storages(storage): 76 | if storage["local"] is False and os.environ.get("CI") != "TRUE": 77 | return 78 | s = storage["s"] 79 | filename = "" 80 | if isinstance(s, SQLiteStorage): 81 | filename = s.address.split("///")[-1] 82 | await s.connect() 83 | await setup_storage(s) 84 | node = FooNode(id="foo") 85 | await s.set( 86 | node=node, 87 | value={"foo": "bar"}, 88 | ttl=timedelta(days=10), 89 | serializer=PickleSerializer(), 90 | ) 91 | result = await s.get(node, serializer=PickleSerializer()) 92 | assert result is not None 93 | assert result == {"foo": "bar"} 94 | 95 | # expire test 96 | node = FooNode(id="foo_expire") 97 | await s.set( 98 | node=node, 99 | value={"foo": "bar"}, 100 | ttl=timedelta(seconds=1), 101 | serializer=PickleSerializer(), 102 | ) 103 | await sleep(2) 104 | result = await s.get(node, serializer=PickleSerializer()) 105 | assert result == sentinel 106 | 107 | # get/set all 108 | nodes: List[Node] = [] 109 | result = await s.get_all(nodes, PickleSerializer()) 110 | assert result == [] 111 | data = [] 112 | for i in [3, 1, 2]: 113 | node = FooNode(id=f"foo-{i}") 114 | nodes.append(node) 115 | data.append((node, f"bar-{i}")) 116 | nodes.append(FooNode(id=f"foo-foo")) 117 | await s.set_all(data, ttl=timedelta(seconds=1), serializer=PickleSerializer()) 118 | result = await s.get_all(nodes, PickleSerializer()) 119 | assert len(result) == 3 120 | assert {r[0].key() for r in result} == {"foo-3", "foo-1", "foo-2"} 121 | assert {r[1] for r in result} == { 122 | "bar-3", 123 | "bar-1", 124 | "bar-2", 125 | } 126 | 127 | # invalidate 128 | node = FooNode(id="invalidate") 129 | await s.set( 130 | node=node, 131 | value={"foo": "bar"}, 132 | ttl=timedelta(days=10), 133 | serializer=PickleSerializer(), 134 | ) 135 | result = await s.get(node, serializer=PickleSerializer()) 136 | assert result != sentinel 137 | await s.remove(node) 138 | result = await s.get(node, serializer=PickleSerializer()) 139 | assert result == sentinel 140 | 141 | if filename != "": 142 | os.remove(filename) 143 | os.remove(f"{filename}-shm") 144 | os.remove(f"{filename}-wal") 145 | -------------------------------------------------------------------------------- /tests/utils.py: -------------------------------------------------------------------------------- 1 | from typing import Any 2 | from urllib.parse import urlparse 3 | 4 | import motor.motor_asyncio as mongo 5 | 6 | from cacheme.storages.mongo import MongoStorage 7 | from cacheme.storages.sqldb import SQLStorage 8 | 9 | 10 | async def setup_storage(storage: Any): 11 | if isinstance(storage, SQLStorage): 12 | url = urlparse(storage.address) 13 | with open(f"cacheme/storages/scripts/{url.scheme}.sql", "r") as f: 14 | sql = f.read() 15 | ddls = sql.split(";") 16 | for ddl in ddls: 17 | if ddl.strip() == "": 18 | continue 19 | ddl = ddl.replace("cacheme_data", storage.table) 20 | await storage.execute_ddl(ddl) 21 | 22 | if isinstance(storage, MongoStorage): 23 | client = mongo.AsyncIOMotorClient(storage.address) 24 | table = client[storage.database][storage.collection] 25 | await table.create_index("key", unique=True) 26 | await table.create_index("expire") 27 | --------------------------------------------------------------------------------