├── .github └── workflows │ ├── pypi-publish.yaml │ └── tests.yaml ├── LICENSE ├── README.md ├── litedict.py ├── setup.py └── tests.ipynb /.github/workflows/pypi-publish.yaml: -------------------------------------------------------------------------------- 1 | name: Publish Python distribution to PyPI 2 | on: [push] 3 | jobs: 4 | build-n-publish: 5 | name: Build and publish Python distributions to PyPI 6 | runs-on: ubuntu-18.04 7 | steps: 8 | - uses: actions/checkout@v2 9 | - name: Set up Python 3.7 10 | uses: actions/setup-python@v2 11 | with: 12 | python-version: 3.7 13 | - name: Install pypa/build 14 | run: >- 15 | python -m 16 | pip install 17 | build 18 | --user 19 | - name: Build a binary wheel and a source tarball 20 | run: >- 21 | python -m 22 | build 23 | --sdist 24 | --wheel 25 | --outdir dist/ 26 | . 27 | - name: Publish distribution to PyPI 28 | if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags') 29 | uses: pypa/gh-action-pypi-publish@master 30 | with: 31 | password: ${{ secrets.GH_PYPI_TOKEN }} 32 | -------------------------------------------------------------------------------- /.github/workflows/tests.yaml: -------------------------------------------------------------------------------- 1 | name: Tests 2 | on: [push] 3 | jobs: 4 | tests: 5 | name: ${{ matrix.python-version }} 6 | runs-on: ubuntu-latest 7 | strategy: 8 | fail-fast: false 9 | matrix: 10 | python-version: [3.7, 3.8, 3.9, "3.10"] 11 | steps: 12 | - uses: actions/checkout@v2 13 | - uses: actions/setup-python@v2 14 | with: 15 | python-version: ${{ matrix.python-version }} 16 | - name: pip deps 17 | run: | 18 | pip install jupyterlab nbconvert 19 | - name: runtests 20 | run: jupyter nbconvert --execute tests.ipynb --to notebook 21 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2021 Litements 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # litedict 2 | 3 | > Dictionary implemented on top of SQLite 4 | 5 | ## Why? 6 | 7 | You can use this to implement a persistent dictionary. It also uses some SQLite syntax to enable getting keys using pattern matching (see examples). 8 | 9 | ## Installation 10 | 11 | ``` 12 | pip install litedict 13 | ``` 14 | 15 | ## Alternatives 16 | 17 | * [RaRe-Technologies/sqlitedict](https://github.com/RaRe-Technologies/sqlitedict): This library uses a separate writing thread. Modern versions of SQLite are thread safe by default (serialized), so a separate writing thread is not strictly needed. It can be helpful to avoid DB locks, but it also adds extra complexity. That implementation is also missing some performance optimizations that are present in this repository. 18 | 19 | ## Examples 20 | 21 | The examples are taken from the tests in [`tests.ipynb`](./tests.ipynb) 22 | 23 | 24 | ```python 25 | from litedict import SQLDict 26 | 27 | TEST_1 = "key_test_1" 28 | TEST_2 = "key_test_2" 29 | ``` 30 | 31 | Basic functionality 32 | 33 | 34 | ```python 35 | d = SQLDict(":memory:") 36 | 37 | d[TEST_1] = "asdfoobar" 38 | 39 | assert d[TEST_1] == "asdfoobar" 40 | 41 | del d[TEST_1] 42 | 43 | assert d.get(TEST_1, None) is None 44 | 45 | # execute multiple instructions inside a transaction 46 | with d.transaction(): 47 | d["asd"] = "efg" 48 | d["foo"] = "bar" 49 | ``` 50 | 51 | Glob matching 52 | 53 | 54 | ```python 55 | d[TEST_1] = "asdfoobar" 56 | 57 | d[TEST_2] = "foobarasd" 58 | 59 | d["key_testx_3"] = "barasdfoo" 60 | 61 | assert d.glob("key_test*") == ["asdfoobar", "foobarasd", "barasdfoo"] 62 | 63 | assert d.glob("key_test_?") == ["asdfoobar", "foobarasd"] 64 | 65 | assert d.glob("key_tes[tx]*") == ["asdfoobar", "foobarasd", "barasdfoo"] 66 | ``` 67 | 68 | Numbers 69 | 70 | 71 | ```python 72 | d[TEST_1] = 1 73 | 74 | d[TEST_2] = 2 75 | 76 | assert d[TEST_1] + d[TEST_2] == 3 77 | ``` 78 | 79 | ## Benchmarks 80 | 81 | 82 | ```python 83 | from string import ascii_lowercase, printable 84 | from random import choice 85 | import random 86 | 87 | 88 | def random_string(string_length=10, fuzz=False, space=False): 89 | """Generate a random string of fixed length """ 90 | letters = ascii_lowercase 91 | letters = letters + " " if space else letters 92 | if fuzz: 93 | letters = printable 94 | return "".join(choice(letters) for i in range(string_length)) 95 | ``` 96 | 97 | 98 | ```python 99 | import gc 100 | 101 | import pickle 102 | 103 | import json 104 | ``` 105 | 106 | **Pickle** 107 | 108 | 109 | ```python 110 | d = SQLDict( 111 | ":memory:", 112 | encoder=lambda x: pickle.dumps(x).hex(), 113 | decoder=lambda x: pickle.loads(bytes.fromhex(x)), 114 | ) 115 | 116 | gc.collect() 117 | 118 | # %%timeit -n20000 -r10 119 | 120 | d[random_string(8)] = random_string(50) 121 | 122 | d.get(random_string(8), None) 123 | 124 | # 69.2 µs ± 4.84 µs per loop (mean ± std. dev. of 10 runs, 20000 loops each) 125 | ``` 126 | 127 | **Noop** 128 | 129 | ```python 130 | d = SQLDict( 131 | ":memory:", 132 | encoder=lambda x: x, 133 | decoder=lambda x: x, 134 | ) 135 | 136 | gc.collect() 137 | 138 | # %%timeit -n20000 -r10 139 | 140 | d[random_string(8)] = random_string(50) 141 | 142 | d.get(random_string(8), None) 143 | 144 | # 66.8 µs ± 2.41 µs per loop (mean ± std. dev. of 10 runs, 20000 loops each) 145 | ``` 146 | 147 | **JSON** 148 | 149 | ```python 150 | d = SQLDict( 151 | ":memory:", 152 | encoder=lambda x: json.dumps(x), 153 | decoder=lambda x: json.loads(x), 154 | ) 155 | 156 | gc.collect() 157 | 158 | # %%timeit -n20000 -r10 159 | 160 | d[random_string(8)] = random_string(50) 161 | 162 | d.get(random_string(8), None) 163 | 164 | # 68.6 µs ± 3.07 µs per loop (mean ± std. dev. of 10 runs, 20000 loops each) 165 | ``` 166 | 167 | **Pickle Python obj** 168 | 169 | 170 | ```python 171 | d = SQLDict( 172 | ":memory:", 173 | encoder=lambda x: pickle.dumps(x).hex(), 174 | decoder=lambda x: pickle.loads(bytes.fromhex(x)), 175 | ) 176 | 177 | gc.collect() 178 | 179 | class C: 180 | def __init__(self, x): 181 | self.x = x 182 | 183 | def pp(self): 184 | return x 185 | 186 | def f(self): 187 | def _f(y): 188 | return y * self.x ** 2 189 | 190 | return _f 191 | 192 | # %%timeit -n20000 -r10 193 | 194 | d[random_string(8)] = C(random.randint(1, 200)) 195 | 196 | d.get(random_string(8), None) 197 | 198 | # 41.1 µs ± 2.75 µs per loop (mean ± std. dev. of 10 runs, 20000 loops each) 199 | ``` 200 | 201 | 202 | **Dictionary** 203 | 204 | 205 | ```python 206 | d = {} 207 | 208 | gc.collect() 209 | 210 | # %%timeit -n20000 -r10 211 | 212 | d[random_string(8)] = random_string(50) 213 | 214 | d.get(random_string(8), None) 215 | 216 | # 53.1 µs ± 4.42 µs per loop (mean ± std. dev. of 10 runs, 20000 loops each) 217 | ``` 218 | 219 | ## Changelog 220 | 221 | * 0.3 222 | * Add transactions as part of the dictionary 223 | 224 | 225 | ## Meta 226 | 227 | 228 | Ricardo Ander-Egg Aguilar – [@ricardoanderegg](https://twitter.com/ricardoanderegg) – 229 | 230 | - [ricardoanderegg.com](http://ricardoanderegg.com/) 231 | - [github.com/polyrand](https://github.com/polyrand/) 232 | - [linkedin.com/in/ricardoanderegg](http://linkedin.com/in/ricardoanderegg) 233 | 234 | Distributed under the MIT license. See ``LICENSE`` for more information. 235 | 236 | ## Contributing 237 | 238 | The only hard rules for the project are: 239 | 240 | * No extra dependencies allowed 241 | * No extra files, everything must be inside the main module's `.py` file. 242 | * Tests must be inside the `tests.ipynb` notebook. -------------------------------------------------------------------------------- /litedict.py: -------------------------------------------------------------------------------- 1 | from collections.abc import MutableMapping 2 | import pathlib 3 | import json 4 | from typing import Callable 5 | import logging 6 | from operator import itemgetter 7 | from contextlib import contextmanager 8 | 9 | # The __init__ function and the following imports are adapted 10 | # from sqlite-utils by Simon Willison (@simonw) 11 | # written under the Apache 2 LICENSE 12 | # https://github.com/simonw/sqlite-utils/blob/main/sqlite_utils/db.py 13 | 14 | try: 15 | import pysqlite3 as sqlite3 16 | import pysqlite3.dbapi2 17 | 18 | OperationalError = pysqlite3.dbapi2.OperationalError 19 | except ImportError: 20 | import sqlite3 21 | 22 | OperationalError = sqlite3.OperationalError 23 | 24 | 25 | __version__ = "0.5" 26 | 27 | 28 | class SQLDict(MutableMapping): 29 | def __init__( 30 | self, 31 | filename_or_conn=None, 32 | memory=False, 33 | encoder: Callable = lambda x: json.dumps(x), 34 | decoder: Callable = lambda x: json.loads(x), 35 | **kwargs, 36 | ): 37 | 38 | self.conn: sqlite3.Connection 39 | 40 | assert (filename_or_conn is not None and not memory) or ( 41 | filename_or_conn is None and memory 42 | ), "Either specify a filename_or_conn or pass memory=True" 43 | if memory or filename_or_conn == ":memory:": 44 | self.conn = sqlite3.connect(":memory:", isolation_level=None, **kwargs) 45 | elif isinstance(filename_or_conn, (str, pathlib.Path)): 46 | self.conn = sqlite3.connect( 47 | str(filename_or_conn), isolation_level=None, **kwargs 48 | ) 49 | else: 50 | self.conn = filename_or_conn 51 | assert self.conn 52 | self.conn.isolation_level = None 53 | 54 | self.encoder = encoder 55 | self.decoder = decoder 56 | 57 | # store kwargs to pass them to new connections (used during backups) 58 | self._init_kwargs = kwargs 59 | 60 | with self.transaction(): 61 | # WITHOUT ROWID? 62 | self.conn.execute( 63 | "CREATE TABLE IF NOT EXISTS Dict (key text NOT NULL PRIMARY KEY, value)" 64 | ) 65 | 66 | # if fast: 67 | self.conn.execute("PRAGMA journal_mode = 'WAL';") 68 | self.conn.execute("PRAGMA temp_store = 2;") 69 | self.conn.execute("PRAGMA synchronous = 1;") 70 | self.conn.execute(f"PRAGMA cache_size = {-1 * 64_000};") 71 | 72 | def __setitem__(self, key, value): 73 | 74 | self.conn.execute( 75 | "INSERT OR REPLACE INTO Dict VALUES (?, ?)", (key, self.encoder(value)) 76 | ) 77 | 78 | def __getitem__(self, key): 79 | c = self.conn.execute("SELECT value FROM Dict WHERE Key=?", (key,)) 80 | row = c.fetchone() 81 | if row is None: 82 | raise KeyError(key) 83 | return self.decoder(row[0]) 84 | 85 | def __delitem__(self, key): 86 | 87 | if key not in self: 88 | raise KeyError(key) 89 | 90 | self.conn.execute("DELETE FROM Dict WHERE key=?", (key,)) 91 | 92 | def __len__(self): 93 | return next(self.conn.execute("SELECT COUNT(*) FROM Dict"))[0] 94 | 95 | def __iter__(self): 96 | for row in self.conn.execute("SELECT key FROM Dict"): 97 | yield row[0] 98 | 99 | def keys(self): 100 | for row in self.conn.execute("SELECT key FROM Dict"): 101 | yield row[0] 102 | 103 | def values(self): 104 | for row in self.conn.execute("SELECT value FROM Dict"): 105 | yield self.decoder(row[0]) 106 | 107 | def items(self): 108 | c = self.conn.execute("SELECT key, value FROM Dict") 109 | for row in c: 110 | yield (row[0], self.decoder(row[1])) 111 | 112 | def __repr__(self): 113 | return f"{type(self).__name__}(Connection={self.conn!r}, items={len(self)})" 114 | 115 | def glob(self, pat: str): 116 | c = self.conn.execute("SELECT value FROM Dict WHERE Key GLOB ?", (pat,)) 117 | rows = c.fetchall() 118 | if rows is None: 119 | raise KeyError(pat) 120 | return [self.decoder(x[0]) for x in rows] 121 | 122 | # SQLite works better in autocommit mode when using short DML (INSERT / UPDATE / DELETE) statements 123 | # source: https://charlesleifer.com/blog/going-fast-with-sqlite-and-python/ 124 | @contextmanager 125 | def transaction(self, mode="DEFERRED"): 126 | 127 | if mode not in {"DEFERRED", "IMMEDIATE", "EXCLUSIVE"}: 128 | raise ValueError(f"Transaction mode '{mode}' is not valid") 129 | # We must issue a "BEGIN" explicitly when running in auto-commit mode. 130 | self.conn.execute(f"BEGIN {mode}") 131 | try: 132 | # Yield control back to the caller. 133 | yield 134 | except BaseException: 135 | self.conn.rollback() # Roll back all changes if an exception occurs. 136 | raise 137 | else: 138 | self.conn.commit() 139 | 140 | def to_memory(self): 141 | """ 142 | Copy to memory. 143 | 144 | This closes the current connection and substitutes 145 | it with another in-memory one. 146 | """ 147 | 148 | def progress(status, remaining, total): 149 | logging.info(f"Copied {total-remaining} of {total} pages...") 150 | 151 | dest = sqlite3.connect(":memory:", isolation_level=None, **self._init_kwargs) 152 | self.conn.backup(dest, progress=progress) 153 | self.conn.close() 154 | self.conn = dest 155 | return self 156 | 157 | def to_disk(self, new_db_or_conn): 158 | """ 159 | Copy to disk file. 160 | 161 | This closes the current connection and substitutes 162 | it with another file-based one. 163 | """ 164 | 165 | def progress(status, remaining, total): 166 | logging.info(f"Copied {total-remaining} of {total} pages...") 167 | 168 | dest = sqlite3.connect( 169 | new_db_or_conn, isolation_level=None, **self._init_kwargs 170 | ) 171 | self.conn.backup(dest, progress=progress) 172 | self.conn = dest 173 | return self 174 | 175 | def vacuum(self): 176 | self.conn.execute("VACUUM;") 177 | 178 | def close(self): 179 | self.conn.close() 180 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | import setuptools 2 | from pathlib import Path 3 | import re 4 | 5 | 6 | def get_version(fn) -> str: 7 | code = Path(fn).read_text() 8 | match = re.search(r"__version__\s*=\s*['\"]([^'\"]+)['\"]", code) 9 | assert match is not None 10 | return match.group(1) 11 | 12 | 13 | version = get_version("./litedict.py") 14 | 15 | setuptools.setup( 16 | name="litedict", 17 | version=version, 18 | author="Ricardo Ander-Egg Aguilar", 19 | author_email="rsubacc@gmail.com", 20 | description="Simple dictionary built on top of SQLite", 21 | long_description=Path("README.md").read_text(), 22 | long_description_content_type="text/markdown", 23 | url="https://github.com/litements/litedict", 24 | py_modules=["litedict"], 25 | classifiers=[ 26 | "Operating System :: OS Independent", 27 | ], 28 | python_requires=">=3.7", 29 | ) 30 | -------------------------------------------------------------------------------- /tests.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": null, 6 | "id": "restricted-immigration", 7 | "metadata": {}, 8 | "outputs": [], 9 | "source": [ 10 | "%load_ext autoreload\n", 11 | "%autoreload 2" 12 | ] 13 | }, 14 | { 15 | "cell_type": "markdown", 16 | "id": "british-pearl", 17 | "metadata": {}, 18 | "source": [ 19 | "Download if file is not in folder." 20 | ] 21 | }, 22 | { 23 | "cell_type": "code", 24 | "execution_count": null, 25 | "id": "weird-depth", 26 | "metadata": {}, 27 | "outputs": [], 28 | "source": [ 29 | "from litedict import SQLDict" 30 | ] 31 | }, 32 | { 33 | "cell_type": "code", 34 | "execution_count": null, 35 | "id": "aerial-float", 36 | "metadata": {}, 37 | "outputs": [], 38 | "source": [ 39 | "TEST_1 = \"key_test_1\"\n", 40 | "TEST_2 = \"key_test_2\"" 41 | ] 42 | }, 43 | { 44 | "cell_type": "markdown", 45 | "id": "narrative-corpus", 46 | "metadata": {}, 47 | "source": [ 48 | "Basic functionality" 49 | ] 50 | }, 51 | { 52 | "cell_type": "code", 53 | "execution_count": null, 54 | "id": "secure-platinum", 55 | "metadata": {}, 56 | "outputs": [], 57 | "source": [ 58 | "d = SQLDict(\":memory:\")\n", 59 | "\n", 60 | "d[TEST_1] = \"asdfoobar\"\n", 61 | "\n", 62 | "assert d[TEST_1] == \"asdfoobar\"\n", 63 | "\n", 64 | "del d[TEST_1]\n", 65 | "\n", 66 | "assert d.get(TEST_1, None) is None" 67 | ] 68 | }, 69 | { 70 | "cell_type": "markdown", 71 | "id": "gross-stamp", 72 | "metadata": {}, 73 | "source": [ 74 | "Glob matching" 75 | ] 76 | }, 77 | { 78 | "cell_type": "code", 79 | "execution_count": null, 80 | "id": "sixth-consistency", 81 | "metadata": {}, 82 | "outputs": [], 83 | "source": [ 84 | "d[TEST_1] = \"asdfoobar\"\n", 85 | "\n", 86 | "d[TEST_2] = \"foobarasd\"\n", 87 | "\n", 88 | "d[\"key_testx_3\"] = \"barasdfoo\"\n", 89 | "\n", 90 | "assert d.glob(\"key_test*\") == [\"asdfoobar\", \"foobarasd\", \"barasdfoo\"]\n", 91 | "\n", 92 | "assert d.glob(\"key_test_?\") == [\"asdfoobar\", \"foobarasd\"]\n", 93 | "\n", 94 | "assert d.glob(\"key_tes[tx]*\") == [\"asdfoobar\", \"foobarasd\", \"barasdfoo\"]" 95 | ] 96 | }, 97 | { 98 | "cell_type": "markdown", 99 | "id": "novel-telling", 100 | "metadata": {}, 101 | "source": [ 102 | "Numbers" 103 | ] 104 | }, 105 | { 106 | "cell_type": "code", 107 | "execution_count": null, 108 | "id": "north-equipment", 109 | "metadata": {}, 110 | "outputs": [], 111 | "source": [ 112 | "d[TEST_1] = 1\n", 113 | "\n", 114 | "d[TEST_2] = 2\n", 115 | "\n", 116 | "assert d[TEST_1] + d[TEST_2] == 3" 117 | ] 118 | }, 119 | { 120 | "cell_type": "code", 121 | "execution_count": null, 122 | "id": "minute-distinction", 123 | "metadata": {}, 124 | "outputs": [], 125 | "source": [ 126 | "with d.transaction():\n", 127 | " d[\"asd\"] = \"efg\"\n", 128 | " d[\"foo\"] = \"bar\"\n", 129 | " assert d.conn.in_transaction" 130 | ] 131 | }, 132 | { 133 | "cell_type": "code", 134 | "execution_count": null, 135 | "id": "aquatic-receiver", 136 | "metadata": {}, 137 | "outputs": [], 138 | "source": [ 139 | "try:\n", 140 | " with d.transaction():\n", 141 | " d[\"failed\"] = \"no\"\n", 142 | "\n", 143 | " assert d.conn.in_transaction\n", 144 | "\n", 145 | " raise Exception\n", 146 | "except:\n", 147 | " # check the transaction succesfully rolled back\n", 148 | " assert d.get(\"failed\") is None" 149 | ] 150 | }, 151 | { 152 | "cell_type": "code", 153 | "execution_count": null, 154 | "id": "4f5619c0", 155 | "metadata": {}, 156 | "outputs": [ 157 | { 158 | "name": "stdout", 159 | "output_type": "stream", 160 | "text": [ 161 | "key_testx_3 \"barasdfoo\"\n", 162 | "key_test_1 1\n", 163 | "key_test_2 2\n", 164 | "asd \"efg\"\n", 165 | "foo \"bar\"\n" 166 | ] 167 | } 168 | ], 169 | "source": [ 170 | "for k, v in d.items():\n", 171 | " print(k, v)" 172 | ] 173 | }, 174 | { 175 | "cell_type": "markdown", 176 | "id": "991c7fb1", 177 | "metadata": {}, 178 | "source": [ 179 | "Test moving from/to disk/memory." 180 | ] 181 | }, 182 | { 183 | "cell_type": "code", 184 | "execution_count": null, 185 | "id": "95df7552", 186 | "metadata": {}, 187 | "outputs": [], 188 | "source": [ 189 | "import os\n", 190 | "import pickle\n", 191 | "\n", 192 | "fname = \"test_disk.db\"\n", 193 | "\n", 194 | "d = SQLDict(\n", 195 | " \":memory:\",\n", 196 | " encoder=lambda x: pickle.dumps(x).hex(),\n", 197 | " decoder=lambda x: pickle.loads(bytes.fromhex(x)),\n", 198 | ")\n", 199 | "\n", 200 | "d[\"foo\"] = \"bar\"\n", 201 | "d[\"asd\"] = 2\n", 202 | "\n", 203 | "d.to_disk(\"test_disk.db\")\n", 204 | "\n", 205 | "assert fname in os.listdir()\n", 206 | "assert \"foo\" in d\n", 207 | "assert \"asd\" in d\n", 208 | "\n", 209 | "d.close()\n", 210 | "del d\n", 211 | "\n", 212 | "\n", 213 | "d = SQLDict(\n", 214 | " fname,\n", 215 | " encoder=lambda x: pickle.dumps(x).hex(),\n", 216 | " decoder=lambda x: pickle.loads(bytes.fromhex(x)),\n", 217 | ")\n", 218 | "\n", 219 | "assert d[\"foo\"] == \"bar\"\n", 220 | "assert d[\"asd\"] == 2\n", 221 | "\n", 222 | "d.to_memory()\n", 223 | "\n", 224 | "os.unlink(fname)\n", 225 | "\n", 226 | "assert d[\"foo\"] == \"bar\"\n", 227 | "assert d[\"asd\"] == 2" 228 | ] 229 | }, 230 | { 231 | "cell_type": "markdown", 232 | "id": "authentic-colon", 233 | "metadata": {}, 234 | "source": [ 235 | "## Benchmarks" 236 | ] 237 | }, 238 | { 239 | "cell_type": "code", 240 | "execution_count": null, 241 | "id": "optional-settle", 242 | "metadata": {}, 243 | "outputs": [], 244 | "source": [ 245 | "from string import ascii_lowercase, printable\n", 246 | "from random import choice\n", 247 | "import random\n", 248 | "\n", 249 | "\n", 250 | "def random_string(string_length=10, fuzz=False, space=False):\n", 251 | " \"\"\"Generate a random string of fixed length \"\"\"\n", 252 | " letters = ascii_lowercase\n", 253 | " letters = letters + \" \" if space else letters\n", 254 | " if fuzz:\n", 255 | " letters = printable\n", 256 | " return \"\".join(choice(letters) for i in range(string_length))" 257 | ] 258 | }, 259 | { 260 | "cell_type": "code", 261 | "execution_count": null, 262 | "id": "devoted-cruise", 263 | "metadata": {}, 264 | "outputs": [], 265 | "source": [ 266 | "import gc\n", 267 | "\n", 268 | "import pickle\n", 269 | "\n", 270 | "import json" 271 | ] 272 | }, 273 | { 274 | "cell_type": "markdown", 275 | "id": "needed-scope", 276 | "metadata": {}, 277 | "source": [ 278 | "**Pickle**" 279 | ] 280 | }, 281 | { 282 | "cell_type": "code", 283 | "execution_count": null, 284 | "id": "sacred-track", 285 | "metadata": {}, 286 | "outputs": [], 287 | "source": [ 288 | "d = SQLDict(\n", 289 | " \":memory:\",\n", 290 | " encoder=lambda x: pickle.dumps(x).hex(),\n", 291 | " decoder=lambda x: pickle.loads(bytes.fromhex(x)),\n", 292 | ")" 293 | ] 294 | }, 295 | { 296 | "cell_type": "code", 297 | "execution_count": null, 298 | "id": "dense-ozone", 299 | "metadata": {}, 300 | "outputs": [ 301 | { 302 | "data": { 303 | "text/plain": [ 304 | "152" 305 | ] 306 | }, 307 | "execution_count": null, 308 | "metadata": {}, 309 | "output_type": "execute_result" 310 | } 311 | ], 312 | "source": [ 313 | "gc.collect()" 314 | ] 315 | }, 316 | { 317 | "cell_type": "code", 318 | "execution_count": null, 319 | "id": "sacred-tennis", 320 | "metadata": {}, 321 | "outputs": [ 322 | { 323 | "name": "stdout", 324 | "output_type": "stream", 325 | "text": [ 326 | "36.8 µs ± 928 ns per loop (mean ± std. dev. of 10 runs, 20,000 loops each)\n" 327 | ] 328 | } 329 | ], 330 | "source": [ 331 | "%%timeit -n20000 -r10\n", 332 | "\n", 333 | "d[random_string(8)] = random_string(50)\n", 334 | "\n", 335 | "d.get(random_string(8), None)" 336 | ] 337 | }, 338 | { 339 | "cell_type": "markdown", 340 | "id": "correct-smith", 341 | "metadata": {}, 342 | "source": [ 343 | "**Noop**" 344 | ] 345 | }, 346 | { 347 | "cell_type": "code", 348 | "execution_count": null, 349 | "id": "imported-university", 350 | "metadata": {}, 351 | "outputs": [], 352 | "source": [ 353 | "d = SQLDict(\n", 354 | " \":memory:\",\n", 355 | " encoder=lambda x: x,\n", 356 | " decoder=lambda x: x,\n", 357 | ")" 358 | ] 359 | }, 360 | { 361 | "cell_type": "code", 362 | "execution_count": null, 363 | "id": "tutorial-trauma", 364 | "metadata": {}, 365 | "outputs": [ 366 | { 367 | "data": { 368 | "text/plain": [ 369 | "3" 370 | ] 371 | }, 372 | "execution_count": null, 373 | "metadata": {}, 374 | "output_type": "execute_result" 375 | } 376 | ], 377 | "source": [ 378 | "gc.collect()" 379 | ] 380 | }, 381 | { 382 | "cell_type": "code", 383 | "execution_count": null, 384 | "id": "bulgarian-colombia", 385 | "metadata": {}, 386 | "outputs": [ 387 | { 388 | "name": "stdout", 389 | "output_type": "stream", 390 | "text": [ 391 | "35 µs ± 941 ns per loop (mean ± std. dev. of 10 runs, 20,000 loops each)\n" 392 | ] 393 | } 394 | ], 395 | "source": [ 396 | "%%timeit -n20000 -r10\n", 397 | "\n", 398 | "d[random_string(8)] = random_string(50)\n", 399 | "\n", 400 | "d.get(random_string(8), None)" 401 | ] 402 | }, 403 | { 404 | "cell_type": "markdown", 405 | "id": "hollow-briefs", 406 | "metadata": {}, 407 | "source": [ 408 | "**JSON**" 409 | ] 410 | }, 411 | { 412 | "cell_type": "code", 413 | "execution_count": null, 414 | "id": "frank-stable", 415 | "metadata": {}, 416 | "outputs": [], 417 | "source": [ 418 | "d = SQLDict(\n", 419 | " \":memory:\",\n", 420 | " encoder=lambda x: json.dumps(x),\n", 421 | " decoder=lambda x: json.loads(x),\n", 422 | ")" 423 | ] 424 | }, 425 | { 426 | "cell_type": "code", 427 | "execution_count": null, 428 | "id": "wooden-enlargement", 429 | "metadata": {}, 430 | "outputs": [ 431 | { 432 | "data": { 433 | "text/plain": [ 434 | "3" 435 | ] 436 | }, 437 | "execution_count": null, 438 | "metadata": {}, 439 | "output_type": "execute_result" 440 | } 441 | ], 442 | "source": [ 443 | "gc.collect()" 444 | ] 445 | }, 446 | { 447 | "cell_type": "code", 448 | "execution_count": null, 449 | "id": "after-essex", 450 | "metadata": {}, 451 | "outputs": [ 452 | { 453 | "name": "stdout", 454 | "output_type": "stream", 455 | "text": [ 456 | "35.9 µs ± 707 ns per loop (mean ± std. dev. of 10 runs, 20,000 loops each)\n" 457 | ] 458 | } 459 | ], 460 | "source": [ 461 | "%%timeit -n20000 -r10\n", 462 | "\n", 463 | "d[random_string(8)] = random_string(50)\n", 464 | "\n", 465 | "d.get(random_string(8), None)" 466 | ] 467 | }, 468 | { 469 | "cell_type": "markdown", 470 | "id": "standard-asian", 471 | "metadata": {}, 472 | "source": [ 473 | "**Pickle Python obj**" 474 | ] 475 | }, 476 | { 477 | "cell_type": "code", 478 | "execution_count": null, 479 | "id": "dressed-investor", 480 | "metadata": {}, 481 | "outputs": [], 482 | "source": [ 483 | "d = SQLDict(\n", 484 | " \":memory:\",\n", 485 | " encoder=lambda x: pickle.dumps(x).hex(),\n", 486 | " decoder=lambda x: pickle.loads(bytes.fromhex(x)),\n", 487 | ")" 488 | ] 489 | }, 490 | { 491 | "cell_type": "code", 492 | "execution_count": null, 493 | "id": "sustained-aviation", 494 | "metadata": {}, 495 | "outputs": [ 496 | { 497 | "data": { 498 | "text/plain": [ 499 | "3" 500 | ] 501 | }, 502 | "execution_count": null, 503 | "metadata": {}, 504 | "output_type": "execute_result" 505 | } 506 | ], 507 | "source": [ 508 | "gc.collect()" 509 | ] 510 | }, 511 | { 512 | "cell_type": "code", 513 | "execution_count": null, 514 | "id": "technological-filling", 515 | "metadata": {}, 516 | "outputs": [], 517 | "source": [ 518 | "class C:\n", 519 | " def __init__(self, x):\n", 520 | " self.x = x\n", 521 | "\n", 522 | " def pp(self):\n", 523 | " return x\n", 524 | "\n", 525 | " def f(self):\n", 526 | " def _f(y):\n", 527 | " return y * self.x ** 2\n", 528 | "\n", 529 | " return _f" 530 | ] 531 | }, 532 | { 533 | "cell_type": "code", 534 | "execution_count": null, 535 | "id": "direct-accent", 536 | "metadata": {}, 537 | "outputs": [ 538 | { 539 | "name": "stdout", 540 | "output_type": "stream", 541 | "text": [ 542 | "20.8 µs ± 574 ns per loop (mean ± std. dev. of 10 runs, 20,000 loops each)\n" 543 | ] 544 | } 545 | ], 546 | "source": [ 547 | "%%timeit -n20000 -r10\n", 548 | "\n", 549 | "d[random_string(8)] = C(random.randint(1, 200))\n", 550 | "\n", 551 | "d.get(random_string(8), None)" 552 | ] 553 | }, 554 | { 555 | "cell_type": "markdown", 556 | "id": "compliant-colony", 557 | "metadata": {}, 558 | "source": [ 559 | "**Dictionary**" 560 | ] 561 | }, 562 | { 563 | "cell_type": "code", 564 | "execution_count": null, 565 | "id": "ready-palestine", 566 | "metadata": {}, 567 | "outputs": [], 568 | "source": [ 569 | "d = {}" 570 | ] 571 | }, 572 | { 573 | "cell_type": "code", 574 | "execution_count": null, 575 | "id": "beginning-survey", 576 | "metadata": {}, 577 | "outputs": [ 578 | { 579 | "data": { 580 | "text/plain": [ 581 | "3" 582 | ] 583 | }, 584 | "execution_count": null, 585 | "metadata": {}, 586 | "output_type": "execute_result" 587 | } 588 | ], 589 | "source": [ 590 | "gc.collect()" 591 | ] 592 | }, 593 | { 594 | "cell_type": "code", 595 | "execution_count": null, 596 | "id": "civic-preservation", 597 | "metadata": {}, 598 | "outputs": [ 599 | { 600 | "name": "stdout", 601 | "output_type": "stream", 602 | "text": [ 603 | "26 µs ± 574 ns per loop (mean ± std. dev. of 10 runs, 20,000 loops each)\n" 604 | ] 605 | } 606 | ], 607 | "source": [ 608 | "%%timeit -n20000 -r10\n", 609 | "\n", 610 | "d[random_string(8)] = random_string(50)\n", 611 | "\n", 612 | "d.get(random_string(8), None)" 613 | ] 614 | } 615 | ], 616 | "metadata": {}, 617 | "nbformat": 4, 618 | "nbformat_minor": 5 619 | } 620 | --------------------------------------------------------------------------------