├── nbs ├── CNAME ├── images │ └── diagram.png ├── nbdev.yml ├── llms.txt ├── styles.css ├── _quarto.yml ├── test_update.ipynb ├── test_upsert.ipynb └── test_insert.ipynb ├── requirements.txt ├── fastlite ├── __init__.py ├── _modidx.py ├── core.py └── kw.py ├── MANIFEST.in ├── index_files └── figure-commonmark │ ├── cell-15-1-image.png │ ├── cell-16-1-image.png │ ├── cell-20-1-image.png │ ├── cell-21-1-image.png │ ├── cell-28-output-1.svg │ ├── cell-32-output-1.svg │ ├── cell-46-output-1.svg │ ├── cell-50-output-1.svg │ ├── cell-27-output-1.svg │ └── cell-31-output-1.svg ├── .github └── workflows │ ├── test.yaml.off │ └── deploy.yaml ├── .devcontainer ├── noop.txt ├── Dockerfile └── devcontainer.json ├── pyproject.toml ├── settings.ini ├── .gitignore ├── setup.py ├── CHANGELOG.md ├── LICENSE └── README.md /nbs/CNAME: -------------------------------------------------------------------------------- 1 | fastlite.answer.ai 2 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | fastcore>=1.5.41 2 | -------------------------------------------------------------------------------- /fastlite/__init__.py: -------------------------------------------------------------------------------- 1 | __version__ = "0.2.4" 2 | from .core import * 3 | from .kw import * 4 | -------------------------------------------------------------------------------- /nbs/images/diagram.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AnswerDotAI/fastlite/HEAD/nbs/images/diagram.png -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include settings.ini 2 | include LICENSE 3 | include CONTRIBUTING.md 4 | include README.md 5 | recursive-exclude * __pycache__ 6 | -------------------------------------------------------------------------------- /index_files/figure-commonmark/cell-15-1-image.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AnswerDotAI/fastlite/HEAD/index_files/figure-commonmark/cell-15-1-image.png -------------------------------------------------------------------------------- /index_files/figure-commonmark/cell-16-1-image.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AnswerDotAI/fastlite/HEAD/index_files/figure-commonmark/cell-16-1-image.png -------------------------------------------------------------------------------- /index_files/figure-commonmark/cell-20-1-image.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AnswerDotAI/fastlite/HEAD/index_files/figure-commonmark/cell-20-1-image.png -------------------------------------------------------------------------------- /index_files/figure-commonmark/cell-21-1-image.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AnswerDotAI/fastlite/HEAD/index_files/figure-commonmark/cell-21-1-image.png -------------------------------------------------------------------------------- /.github/workflows/test.yaml.off: -------------------------------------------------------------------------------- 1 | name: CI 2 | on: [workflow_dispatch, pull_request, push] 3 | 4 | jobs: 5 | test: 6 | runs-on: ubuntu-latest 7 | steps: [uses: fastai/workflows/nbdev-ci@master] 8 | -------------------------------------------------------------------------------- /.devcontainer/noop.txt: -------------------------------------------------------------------------------- 1 | This file is copied into the container along with environment.yml* from the 2 | parent folder. This is done to prevent the Dockerfile COPY instruction from 3 | failing if no environment.yml is found. -------------------------------------------------------------------------------- /nbs/nbdev.yml: -------------------------------------------------------------------------------- 1 | project: 2 | output-dir: _docs 3 | 4 | website: 5 | title: "fastlite" 6 | site-url: "https://AnswerDotAI.github.io/fastlite" 7 | description: "A bit of extra usability for sqlite" 8 | repo-branch: main 9 | repo-url: "https://github.com/AnswerDotAI/fastlite" 10 | -------------------------------------------------------------------------------- /.github/workflows/deploy.yaml: -------------------------------------------------------------------------------- 1 | name: Deploy to GitHub Pages 2 | 3 | permissions: 4 | contents: write 5 | pages: write 6 | 7 | on: 8 | push: 9 | branches: [ "main", "master" ] 10 | workflow_dispatch: 11 | jobs: 12 | deploy: 13 | runs-on: ubuntu-latest 14 | steps: [uses: fastai/workflows/quarto-ghp@master] 15 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["setuptools>=64.0"] 3 | build-backend = "setuptools.build_meta" 4 | 5 | [project] 6 | name="fastlite" 7 | requires-python=">=3.10" 8 | dynamic = [ "keywords", "description", "version", "dependencies", "optional-dependencies", "readme", "license", "authors", "classifiers", "entry-points", "scripts", "urls"] 9 | 10 | [tool.uv] 11 | cache-keys = [{ file = "pyproject.toml" }, { file = "settings.ini" }, { file = "setup.py" }] 12 | -------------------------------------------------------------------------------- /nbs/llms.txt: -------------------------------------------------------------------------------- 1 | # Fastlite and sqlite-utils 2 | 3 | > Fastlite builds on top of apswutils and sqlite-utils; generally you will want to use a combination of these 4 | 5 | ## Docs 6 | 7 | - [Fastlite docs](https://answerdotai.github.io/fastlite/index.html.md): Fastlite docs home page 8 | - [apswutils docs](https://answerdotai.github.io/apswutils/): apswutils docs 9 | - [sqlite-utils docs](https://sqlite-utils.datasette.io/en/stable/_sources/python-api.rst.txt): Full sqlite-utils documentation 10 | 11 | ## Optional 12 | 13 | - [APSW tour](https://rogerbinns.github.io/apsw/_sources/example.rst.txt) 14 | -------------------------------------------------------------------------------- /nbs/styles.css: -------------------------------------------------------------------------------- 1 | .cell { 2 | margin-bottom: 1rem; 3 | } 4 | 5 | .cell > .sourceCode { 6 | margin-bottom: 0; 7 | } 8 | 9 | .cell-output > pre { 10 | margin-bottom: 0; 11 | } 12 | 13 | .cell-output > pre, .cell-output > .sourceCode > pre, .cell-output-stdout > pre { 14 | margin-left: 0.8rem; 15 | margin-top: 0; 16 | background: none; 17 | border-left: 2px solid lightsalmon; 18 | border-top-left-radius: 0; 19 | border-top-right-radius: 0; 20 | } 21 | 22 | .cell-output > .sourceCode { 23 | border: none; 24 | } 25 | 26 | .cell-output > .sourceCode { 27 | background: none; 28 | margin-top: 0; 29 | } 30 | 31 | div.description { 32 | padding-left: 2px; 33 | padding-top: 5px; 34 | font-style: italic; 35 | font-size: 135%; 36 | opacity: 70%; 37 | } 38 | 39 | .retina { 40 | transform: scale(0.5); 41 | transform-origin: top left; 42 | } 43 | 44 | -------------------------------------------------------------------------------- /.devcontainer/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM mcr.microsoft.com/devcontainers/miniconda:0-3 2 | 3 | # Copy environment.yml (if found) to a temp location so we update the environment. Also 4 | # copy "noop.txt" so the COPY instruction does not fail if no environment.yml exists. 5 | COPY environment.yml* .devcontainer/noop.txt /tmp/conda-tmp/ 6 | RUN if [ -f "/tmp/conda-tmp/environment.yml" ]; then umask 0002 && /opt/conda/bin/conda env update -n base -f /tmp/conda-tmp/environment.yml; fi \ 7 | && rm -rf /tmp/conda-tmp 8 | 9 | # [Optional] Uncomment to install a different version of Python than the default 10 | # RUN conda install -y python=3.6 \ 11 | # && pip install --no-cache-dir pipx \ 12 | # && pipx reinstall-all 13 | 14 | # [Optional] Uncomment this section to install additional OS packages. 15 | # RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \ 16 | # && apt-get -y install --no-install-recommends 17 | -------------------------------------------------------------------------------- /.devcontainer/devcontainer.json: -------------------------------------------------------------------------------- 1 | // For format details, see https://aka.ms/devcontainer.json. For config options, see the 2 | // README at: https://github.com/devcontainers/templates/tree/main/src/miniconda 3 | { 4 | "name": "Miniconda (Python 3)", 5 | "build": { 6 | "context": "..", 7 | "dockerfile": "Dockerfile" 8 | }, 9 | 10 | // Features to add to the dev container. More info: https://containers.dev/features. 11 | // "features": {}, 12 | 13 | // Use 'forwardPorts' to make a list of ports inside the container available locally. 14 | // "forwardPorts": [], 15 | 16 | // Use 'postCreateCommand' to run commands after the container is created. 17 | "postCreateCommand": "pip3 install --user -r requirements.txt" 18 | 19 | // Configure tool-specific properties. 20 | // "customizations": {}, 21 | 22 | // Uncomment to connect as root instead. More info: https://aka.ms/dev-containers-non-root. 23 | // "remoteUser": "root" 24 | } 25 | -------------------------------------------------------------------------------- /settings.ini: -------------------------------------------------------------------------------- 1 | [DEFAULT] 2 | repo = fastlite 3 | lib_name = fastlite 4 | version = 0.2.4 5 | min_python = 3.10 6 | license = apache2 7 | black_formatting = False 8 | requirements = fastcore>=1.7.1 apswutils>=0.1.2 9 | dev_requirements = graphviz 10 | conda_user = fastai 11 | doc_path = _docs 12 | lib_path = fastlite 13 | nbs_path = nbs 14 | recursive = True 15 | tst_flags = notest 16 | put_version_in_init = True 17 | branch = main 18 | custom_sidebar = False 19 | doc_host = https://AnswerDotAI.github.io 20 | doc_baseurl = /fastlite 21 | git_url = https://github.com/AnswerDotAI/fastlite 22 | title = fastlite 23 | audience = Developers 24 | author = Jeremy Howard 25 | author_email = info@fast.ai 26 | copyright = 2024 onwards, Jeremy Howard 27 | description = A bit of extra usability for sqlite 28 | keywords = nbdev jupyter notebook python sqlite sql 29 | language = English 30 | status = 3 31 | user = AnswerDotAI 32 | readme_nb = index.ipynb 33 | allowed_metadata_keys = 34 | allowed_cell_metadata_keys = 35 | jupyter_hooks = False 36 | clean_ids = True 37 | clear_all = False 38 | cell_number = False 39 | skip_procs = 40 | update_pyproject = True 41 | 42 | -------------------------------------------------------------------------------- /nbs/_quarto.yml: -------------------------------------------------------------------------------- 1 | project: 2 | type: website 3 | resources: 4 | - "*.png" 5 | - "*.jpg" 6 | - "*.txt" 7 | preview: 8 | navigate: false 9 | port: 3000 10 | 11 | format: 12 | html: 13 | theme: cosmo 14 | css: styles.css 15 | toc: true 16 | code-tools: true 17 | code-block-bg: true 18 | code-block-border-left: "#31BAE9" 19 | highlight-style: arrow 20 | grid: 21 | sidebar-width: 180px 22 | body-width: 1800px 23 | margin-width: 150px 24 | gutter-width: 1.0rem 25 | keep-md: true 26 | commonmark: default 27 | 28 | website: 29 | site-url: "https://answerdotai.github.io/fastlite/" 30 | twitter-card: 31 | card-style: summary 32 | creator: "@jeremyphoward" 33 | site: "@jeremyphoward" 34 | open-graph: true 35 | repo-url: https://github.com/AnswerDotAI/fastlite 36 | repo-subdir: nbs 37 | repo-actions: [edit, issue, source] 38 | navbar: 39 | background: primary 40 | search: true 41 | right: 42 | - icon: github 43 | href: https://github.com/AnswerDotAI/fastlite 44 | aria-label: GitHub 45 | sidebar: 46 | style: floating 47 | 48 | metadata-files: [nbdev.yml, sidebar.yml] 49 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | _proc/ 2 | db_dc.py 3 | *-client_wal_index 4 | *-shm 5 | *-wal 6 | *.sqlite 7 | *.db 8 | sidebar.yml 9 | Gemfile.lock 10 | token 11 | _docs/ 12 | conda/ 13 | .last_checked 14 | .gitconfig 15 | *.bak 16 | *.log 17 | *~ 18 | ~* 19 | _tmp* 20 | tmp* 21 | tags 22 | 23 | # Byte-compiled / optimized / DLL files 24 | __pycache__/ 25 | *.py[cod] 26 | *$py.class 27 | 28 | # C extensions 29 | *.so 30 | 31 | # Distribution / packaging 32 | .Python 33 | env/ 34 | build/ 35 | develop-eggs/ 36 | dist/ 37 | downloads/ 38 | eggs/ 39 | .eggs/ 40 | lib/ 41 | lib64/ 42 | parts/ 43 | sdist/ 44 | var/ 45 | wheels/ 46 | *.egg-info/ 47 | .installed.cfg 48 | *.egg 49 | 50 | # PyInstaller 51 | # Usually these files are written by a python script from a template 52 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 53 | *.manifest 54 | *.spec 55 | 56 | # Installer logs 57 | pip-log.txt 58 | pip-delete-this-directory.txt 59 | 60 | # Unit test / coverage reports 61 | htmlcov/ 62 | .tox/ 63 | .coverage 64 | .coverage.* 65 | .cache 66 | nosetests.xml 67 | coverage.xml 68 | *.cover 69 | .hypothesis/ 70 | 71 | # Translations 72 | *.mo 73 | *.pot 74 | 75 | # Django stuff: 76 | *.log 77 | local_settings.py 78 | 79 | # Flask stuff: 80 | instance/ 81 | .webassets-cache 82 | 83 | # Scrapy stuff: 84 | .scrapy 85 | 86 | # Sphinx documentation 87 | docs/_build/ 88 | 89 | # PyBuilder 90 | target/ 91 | 92 | # Jupyter Notebook 93 | .ipynb_checkpoints 94 | 95 | # pyenv 96 | .python-version 97 | 98 | # celery beat schedule file 99 | celerybeat-schedule 100 | 101 | # SageMath parsed files 102 | *.sage.py 103 | 104 | # dotenv 105 | .env 106 | 107 | # virtualenv 108 | .venv 109 | venv/ 110 | ENV/ 111 | 112 | # Spyder project settings 113 | .spyderproject 114 | .spyproject 115 | 116 | # Rope project settings 117 | .ropeproject 118 | 119 | # mkdocs documentation 120 | /site 121 | 122 | # mypy 123 | .mypy_cache/ 124 | 125 | .vscode 126 | *.swp 127 | 128 | # osx generated files 129 | .DS_Store 130 | .DS_Store? 131 | .Trashes 132 | ehthumbs.db 133 | Thumbs.db 134 | .idea 135 | 136 | # pytest 137 | .pytest_cache 138 | 139 | # tools/trust-doc-nbs 140 | docs_src/.last_checked 141 | 142 | # symlinks to fastai 143 | docs_src/fastai 144 | tools/fastai 145 | 146 | # link checker 147 | checklink/cookies.txt 148 | 149 | # .gitconfig is now autogenerated 150 | .gitconfig 151 | 152 | _docs 153 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | from pkg_resources import parse_version 2 | from configparser import ConfigParser 3 | import setuptools, shlex 4 | assert parse_version(setuptools.__version__)>=parse_version('36.2') 5 | 6 | # note: all settings are in settings.ini; edit there, not here 7 | config = ConfigParser(delimiters=['=']) 8 | config.read('settings.ini', encoding='utf-8') 9 | cfg = config['DEFAULT'] 10 | 11 | cfg_keys = 'version description keywords author author_email'.split() 12 | expected = cfg_keys + "lib_name user branch license status min_python audience language".split() 13 | for o in expected: assert o in cfg, "missing expected setting: {}".format(o) 14 | setup_cfg = {o:cfg[o] for o in cfg_keys} 15 | 16 | licenses = { 17 | 'apache2': ('Apache Software License 2.0','OSI Approved :: Apache Software License'), 18 | 'mit': ('MIT License', 'OSI Approved :: MIT License'), 19 | 'gpl2': ('GNU General Public License v2', 'OSI Approved :: GNU General Public License v2 (GPLv2)'), 20 | 'gpl3': ('GNU General Public License v3', 'OSI Approved :: GNU General Public License v3 (GPLv3)'), 21 | 'bsd3': ('BSD License', 'OSI Approved :: BSD License'), 22 | } 23 | statuses = [ '1 - Planning', '2 - Pre-Alpha', '3 - Alpha', 24 | '4 - Beta', '5 - Production/Stable', '6 - Mature', '7 - Inactive' ] 25 | py_versions = '3.6 3.7 3.8 3.9 3.10 3.11 3.12'.split() 26 | 27 | requirements = shlex.split(cfg.get('requirements', '')) 28 | if cfg.get('pip_requirements'): requirements += shlex.split(cfg.get('pip_requirements', '')) 29 | min_python = cfg['min_python'] 30 | lic = licenses.get(cfg['license'].lower(), (cfg['license'], None)) 31 | dev_requirements = (cfg.get('dev_requirements') or '').split() 32 | 33 | setuptools.setup( 34 | name = cfg['lib_name'], 35 | license = lic[0], 36 | classifiers = [ 37 | 'Development Status :: ' + statuses[int(cfg['status'])], 38 | 'Intended Audience :: ' + cfg['audience'].title(), 39 | 'Natural Language :: ' + cfg['language'].title(), 40 | ] + ['Programming Language :: Python :: '+o for o in py_versions[py_versions.index(min_python):]] + (['License :: ' + lic[1] ] if lic[1] else []), 41 | url = cfg['git_url'], 42 | packages = setuptools.find_packages(), 43 | include_package_data = True, 44 | install_requires = requirements, 45 | extras_require={ 'dev': dev_requirements }, 46 | dependency_links = cfg.get('dep_links','').split(), 47 | python_requires = '>=' + cfg['min_python'], 48 | long_description = open('README.md', encoding='utf-8').read(), 49 | long_description_content_type = 'text/markdown', 50 | zip_safe = False, 51 | entry_points = { 52 | 'console_scripts': cfg.get('console_scripts','').split(), 53 | 'nbdev': [f'{cfg.get("lib_path")}={cfg.get("lib_path")}._modidx:d'] 54 | }, 55 | **setup_cfg) 56 | 57 | 58 | -------------------------------------------------------------------------------- /fastlite/_modidx.py: -------------------------------------------------------------------------------- 1 | # Autogenerated by nbdev 2 | 3 | d = { 'settings': { 'branch': 'main', 4 | 'doc_baseurl': '/fastlite', 5 | 'doc_host': 'https://AnswerDotAI.github.io', 6 | 'git_url': 'https://github.com/AnswerDotAI/fastlite', 7 | 'lib_path': 'fastlite'}, 8 | 'syms': { 'fastlite.core': { 'fastlite.core.Database.create': ('core.html#database.create', 'fastlite/core.py'), 9 | 'fastlite.core.Database.get_tables': ('core.html#database.get_tables', 'fastlite/core.py'), 10 | 'fastlite.core.Database.import_file': ('core.html#database.import_file', 'fastlite/core.py'), 11 | 'fastlite.core.Database.link_dcs': ('core.html#database.link_dcs', 'fastlite/core.py'), 12 | 'fastlite.core.Database.q': ('core.html#database.q', 'fastlite/core.py'), 13 | 'fastlite.core.Database.set_classes': ('core.html#database.set_classes', 'fastlite/core.py'), 14 | 'fastlite.core.Database.t': ('core.html#database.t', 'fastlite/core.py'), 15 | 'fastlite.core.Database.v': ('core.html#database.v', 'fastlite/core.py'), 16 | 'fastlite.core.Table.__call__': ('core.html#table.__call__', 'fastlite/core.py'), 17 | 'fastlite.core.Table.__str__': ('core.html#table.__str__', 'fastlite/core.py'), 18 | 'fastlite.core.Table.c': ('core.html#table.c', 'fastlite/core.py'), 19 | 'fastlite.core.Table.selectone': ('core.html#table.selectone', 'fastlite/core.py'), 20 | 'fastlite.core.View.__call__': ('core.html#view.__call__', 'fastlite/core.py'), 21 | 'fastlite.core.View.__str__': ('core.html#view.__str__', 'fastlite/core.py'), 22 | 'fastlite.core.View.c': ('core.html#view.c', 'fastlite/core.py'), 23 | 'fastlite.core.View.selectone': ('core.html#view.selectone', 'fastlite/core.py'), 24 | 'fastlite.core._Col': ('core.html#_col', 'fastlite/core.py'), 25 | 'fastlite.core._Col.__init__': ('core.html#_col.__init__', 'fastlite/core.py'), 26 | 'fastlite.core._Col.__iter__': ('core.html#_col.__iter__', 'fastlite/core.py'), 27 | 'fastlite.core._Col.__repr__': ('core.html#_col.__repr__', 'fastlite/core.py'), 28 | 'fastlite.core._Col.__str__': ('core.html#_col.__str__', 'fastlite/core.py'), 29 | 'fastlite.core._ColsGetter': ('core.html#_colsgetter', 'fastlite/core.py'), 30 | 'fastlite.core._ColsGetter.__call__': ('core.html#_colsgetter.__call__', 'fastlite/core.py'), 31 | 'fastlite.core._ColsGetter.__contains__': ('core.html#_colsgetter.__contains__', 'fastlite/core.py'), 32 | 'fastlite.core._ColsGetter.__dir__': ('core.html#_colsgetter.__dir__', 'fastlite/core.py'), 33 | 'fastlite.core._ColsGetter.__getattr__': ('core.html#_colsgetter.__getattr__', 'fastlite/core.py'), 34 | 'fastlite.core._ColsGetter.__init__': ('core.html#_colsgetter.__init__', 'fastlite/core.py'), 35 | 'fastlite.core._ColsGetter.__repr__': ('core.html#_colsgetter.__repr__', 'fastlite/core.py'), 36 | 'fastlite.core._Getter': ('core.html#_getter', 'fastlite/core.py'), 37 | 'fastlite.core._Getter.__contains__': ('core.html#_getter.__contains__', 'fastlite/core.py'), 38 | 'fastlite.core._Getter.__getattr__': ('core.html#_getter.__getattr__', 'fastlite/core.py'), 39 | 'fastlite.core._Getter.__getitem__': ('core.html#_getter.__getitem__', 'fastlite/core.py'), 40 | 'fastlite.core._Getter.__init__': ('core.html#_getter.__init__', 'fastlite/core.py'), 41 | 'fastlite.core._Getter.__iter__': ('core.html#_getter.__iter__', 'fastlite/core.py'), 42 | 'fastlite.core._Getter.__repr__': ('core.html#_getter.__repr__', 'fastlite/core.py'), 43 | 'fastlite.core._TablesGetter': ('core.html#_tablesgetter', 'fastlite/core.py'), 44 | 'fastlite.core._TablesGetter.__dir__': ('core.html#_tablesgetter.__dir__', 'fastlite/core.py'), 45 | 'fastlite.core._ViewsGetter': ('core.html#_viewsgetter', 'fastlite/core.py'), 46 | 'fastlite.core._ViewsGetter.__dir__': ('core.html#_viewsgetter.__dir__', 'fastlite/core.py'), 47 | 'fastlite.core._dataclass': ('core.html#_dataclass', 'fastlite/core.py'), 48 | 'fastlite.core._edge': ('core.html#_edge', 'fastlite/core.py'), 49 | 'fastlite.core._enum_types': ('core.html#_enum_types', 'fastlite/core.py'), 50 | 'fastlite.core._get_flds': ('core.html#_get_flds', 'fastlite/core.py'), 51 | 'fastlite.core._is_enum': ('core.html#_is_enum', 'fastlite/core.py'), 52 | 'fastlite.core._parse_typ': ('core.html#_parse_typ', 'fastlite/core.py'), 53 | 'fastlite.core._row': ('core.html#_row', 'fastlite/core.py'), 54 | 'fastlite.core._tnode': ('core.html#_tnode', 'fastlite/core.py'), 55 | 'fastlite.core.all_dcs': ('core.html#all_dcs', 'fastlite/core.py'), 56 | 'fastlite.core.create_mod': ('core.html#create_mod', 'fastlite/core.py'), 57 | 'fastlite.core.diagram': ('core.html#diagram', 'fastlite/core.py'), 58 | 'fastlite.core.get_typ': ('core.html#get_typ', 'fastlite/core.py')}, 59 | 'fastlite.kw': {}}} 60 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Release notes 2 | 3 | 4 | 5 | ## 0.2.3 6 | 7 | ### Bugs Squashed 8 | 9 | - Use UNSET for default values ([#75](https://github.com/AnswerDotAI/fastlite/issues/75)) 10 | 11 | 12 | ## 0.2.2 13 | 14 | ### New Features 15 | 16 | - Add `__all__` to fastlite.kw ([#74](https://github.com/AnswerDotAI/fastlite/issues/74)) 17 | - Allow `flags` to modify SQLite connection ([#73](https://github.com/AnswerDotAI/fastlite/pull/73)), thanks to [@gautam-e](https://github.com/gautam-e) 18 | 19 | 20 | ## 0.2.1 21 | 22 | ### New Features 23 | 24 | - Auto handle UNSET values in update kwargs ([#71](https://github.com/AnswerDotAI/fastlite/issues/71)) 25 | 26 | 27 | ## 0.2.0 28 | 29 | ### Breaking Changes 30 | 31 | - `fetchone` renamed to `selectone`, and now raises exception for non-unique return 32 | 33 | ### New Features 34 | 35 | - Check for multi row return in `selectone` ([#70](https://github.com/AnswerDotAI/fastlite/issues/70)) 36 | 37 | 38 | ## 0.1.4 39 | 40 | 41 | ### Bugs Squashed 42 | 43 | - upsert method doesn't support composite primary keys ([#69](https://github.com/AnswerDotAI/fastlite/issues/69)) 44 | 45 | 46 | ## 0.1.3 47 | 48 | ### New Features 49 | 50 | - Add `link_dcs` and have `create_mod` output `__all__` ([#64](https://github.com/AnswerDotAI/fastlite/issues/64)) 51 | 52 | 53 | ## 0.1.2 54 | 55 | ### New Features 56 | 57 | - Add optional default value for `get` ([#55](https://github.com/AnswerDotAI/fastlite/pull/55)), thanks to [@tom-pollak](https://github.com/tom-pollak) 58 | - Add table iter; Add `set_classes` and `get_tables` ([#51](https://github.com/AnswerDotAI/fastlite/issues/51)) 59 | - Add `table.fetchone` ([#50](https://github.com/AnswerDotAI/fastlite/issues/50)) 60 | 61 | 62 | ## 0.1.1 63 | 64 | ### Breaking Changes 65 | 66 | - fastlite has been rewritten to now use apsw instead of sqlite3 ([#47](https://github.com/AnswerDotAI/fastlite/pull/47)), thanks to [@pydanny](https://github.com/pydanny) 67 | - The key driver of this is that we found major concurrency and performance regressions in the python 3.12 sqlite3 module. However, there are many other good reasons also to switch to apsw. The python stdlib sqlite3 module is designed to focus on compatibility with the Python DB API, where apsw is designed to focus on compatibility with sqlite itself. We have found in production applications that with apsw's design it is far easier to get good performance and reliability compared to the stdlib module. 68 | 69 | ### New Features 70 | 71 | - Use new `sqlite_minutils.Table.result` attribute ([#45](https://github.com/AnswerDotAI/fastlite/pull/45)), thanks to [@pydanny](https://github.com/pydanny) 72 | - Make `get_last` defensive ([#39](https://github.com/AnswerDotAI/fastlite/pull/39)), thanks to [@pydanny](https://github.com/pydanny) 73 | - Rewrite insert() function to take advantage of RETURNING data ([#37](https://github.com/AnswerDotAI/fastlite/pull/37)), thanks to [@pydanny](https://github.com/pydanny) 74 | 75 | ### Bugs Squashed 76 | 77 | - Table.insert() with Falsy value generates an error ([#42](https://github.com/AnswerDotAI/fastlite/issues/42)) 78 | 79 | 80 | ## 0.0.13 81 | 82 | ### New Features 83 | 84 | - Add `xtra` param to all methods that use `xtra` instance var ([#34](https://github.com/AnswerDotAI/fastlite/issues/34)) 85 | 86 | 87 | ## 0.0.12 88 | 89 | ### Breaking Changes 90 | 91 | - Bump sqlite-minutils dependency to use sqlite transactions 92 | 93 | ### New Features 94 | 95 | - add enum support ([#32](https://github.com/AnswerDotAI/fastlite/pull/32)), thanks to [@hamelsmu](https://github.com/hamelsmu) 96 | 97 | 98 | ## 0.0.11 99 | 100 | ### New Features 101 | 102 | - add `import_file` ([#24](https://github.com/AnswerDotAI/fastlite/issues/24)) 103 | 104 | 105 | ## 0.0.10 106 | 107 | ### New Features 108 | 109 | - add markdown to doc output ([#22](https://github.com/AnswerDotAI/fastlite/issues/22)) 110 | - Use fastcore asdict instead of dataclasses asdict ([#17](https://github.com/AnswerDotAI/fastlite/pull/17)), thanks to [@pydanny](https://github.com/pydanny) 111 | 112 | ### Bugs Squashed 113 | 114 | - Fix `__contains__` on tuple pk searches ([#20](https://github.com/AnswerDotAI/fastlite/pull/20)), thanks to [@pydanny](https://github.com/pydanny) 115 | - Compound primary keys fail on `__contains__` when done with tuple ([#19](https://github.com/AnswerDotAI/fastlite/issues/19)) 116 | 117 | 118 | ## 0.0.9 119 | 120 | ### New Features 121 | 122 | - sqlite-minutil 3.37 compatibility 123 | 124 | 125 | ## 0.0.8 126 | 127 | ### New Features 128 | 129 | - Use flexiclass ([#16](https://github.com/AnswerDotAI/fastlite/issues/16)) 130 | - Add `select` param ([#15](https://github.com/AnswerDotAI/fastlite/issues/15)) 131 | 132 | 133 | ## 0.0.7 134 | 135 | ### New Features 136 | 137 | - `Database.create` for creating a table from a class ([#12](https://github.com/AnswerDotAI/fastlite/issues/12)) 138 | 139 | 140 | ## 0.0.6 141 | 142 | 143 | ### Bugs Squashed 144 | 145 | - Fix `None` checks in fastlite.kw ([#11](https://github.com/AnswerDotAI/fastlite/issues/11)) 146 | 147 | 148 | ## 0.0.5 149 | 150 | ### New Features 151 | 152 | - Switch to sqlite-minutils ([#10](https://github.com/AnswerDotAI/fastlite/issues/10)) 153 | 154 | 155 | ## 0.0.4 156 | 157 | ### New Features 158 | 159 | - Filter table callable using `xtra` ([#9](https://github.com/AnswerDotAI/fastlite/issues/9)) 160 | 161 | 162 | ## 0.0.3 163 | 164 | ### New Features 165 | 166 | - Add `ids_and_rows_where` and use it to work around sqlite-utils rowid bug ([#6](https://github.com/AnswerDotAI/fastlite/issues/6)) 167 | - Add `get_last` and use it to set `last_pk` correctly and return updated row ([#5](https://github.com/AnswerDotAI/fastlite/issues/5)) 168 | 169 | 170 | ## 0.0.2 171 | 172 | ### New Features 173 | 174 | - `xtra` field support 175 | - Auto-get pks for `update`, and return updated record ([#4](https://github.com/AnswerDotAI/fastlite/issues/4)) 176 | - Return updated value on insert/upsert ([#3](https://github.com/AnswerDotAI/fastlite/issues/3)) 177 | 178 | 179 | ## 0.0.1 180 | 181 | - Initial release 182 | 183 | -------------------------------------------------------------------------------- /index_files/figure-commonmark/cell-28-output-1.svg: -------------------------------------------------------------------------------- 1 | 2 | 4 | 6 | 7 | 9 | 10 | G 11 | 12 | 13 | 14 | Artist 15 | 16 | 17 | Artist 18 | 19 | 20 | ArtistId 🔑 21 | 22 | Name 23 | 24 | 25 | 26 | 27 | Album 28 | 29 | 30 | Album 31 | 32 | 33 | AlbumId 🔑 34 | 35 | Title 36 | 37 | ArtistId 38 | 39 | 40 | 41 | 42 | Album:ArtistId->Artist:ArtistId 43 | 44 | 45 | 46 | 47 | 48 | Track 49 | 50 | 51 | Track 52 | 53 | 54 | TrackId 🔑 55 | 56 | Name 57 | 58 | AlbumId 59 | 60 | MediaTypeId 61 | 62 | GenreId 63 | 64 | Composer 65 | 66 | Milliseconds 67 | 68 | Bytes 69 | 70 | UnitPrice 71 | 72 | 73 | 74 | 75 | Track:AlbumId->Album:AlbumId 76 | 77 | 78 | 79 | 80 | 81 | Genre 82 | 83 | 84 | Genre 85 | 86 | 87 | GenreId 🔑 88 | 89 | Name 90 | 91 | 92 | 93 | 94 | Track:GenreId->Genre:GenreId 95 | 96 | 97 | 98 | 99 | 100 | MediaType 101 | 102 | 103 | MediaType 104 | 105 | 106 | MediaTypeId 🔑 107 | 108 | Name 109 | 110 | 111 | 112 | 113 | Track:MediaTypeId->MediaType:MediaTypeId 114 | 115 | 116 | 117 | 118 | 119 | -------------------------------------------------------------------------------- /index_files/figure-commonmark/cell-32-output-1.svg: -------------------------------------------------------------------------------- 1 | 2 | 4 | 6 | 7 | 9 | 10 | G 11 | 12 | 13 | 14 | Artist 15 | 16 | 17 | Artist 18 | 19 | 20 | ArtistId 🔑 21 | 22 | Name 23 | 24 | 25 | 26 | 27 | Album 28 | 29 | 30 | Album 31 | 32 | 33 | AlbumId 🔑 34 | 35 | Title 36 | 37 | ArtistId 38 | 39 | 40 | 41 | 42 | Album:ArtistId->Artist:ArtistId 43 | 44 | 45 | 46 | 47 | 48 | Track 49 | 50 | 51 | Track 52 | 53 | 54 | TrackId 🔑 55 | 56 | Name 57 | 58 | AlbumId 59 | 60 | MediaTypeId 61 | 62 | GenreId 63 | 64 | Composer 65 | 66 | Milliseconds 67 | 68 | Bytes 69 | 70 | UnitPrice 71 | 72 | 73 | 74 | 75 | Track:AlbumId->Album:AlbumId 76 | 77 | 78 | 79 | 80 | 81 | Genre 82 | 83 | 84 | Genre 85 | 86 | 87 | GenreId 🔑 88 | 89 | Name 90 | 91 | 92 | 93 | 94 | Track:GenreId->Genre:GenreId 95 | 96 | 97 | 98 | 99 | 100 | MediaType 101 | 102 | 103 | MediaType 104 | 105 | 106 | MediaTypeId 🔑 107 | 108 | Name 109 | 110 | 111 | 112 | 113 | Track:MediaTypeId->MediaType:MediaTypeId 114 | 115 | 116 | 117 | 118 | 119 | -------------------------------------------------------------------------------- /index_files/figure-commonmark/cell-46-output-1.svg: -------------------------------------------------------------------------------- 1 | 2 | 4 | 6 | 7 | 9 | 10 | G 11 | 12 | 13 | 14 | Artist 15 | 16 | 17 | Artist 18 | 19 | 20 | ArtistId 🔑 21 | 22 | Name 23 | 24 | 25 | 26 | 27 | Album 28 | 29 | 30 | Album 31 | 32 | 33 | AlbumId 🔑 34 | 35 | Title 36 | 37 | ArtistId 38 | 39 | 40 | 41 | 42 | Album:ArtistId->Artist:ArtistId 43 | 44 | 45 | 46 | 47 | 48 | Track 49 | 50 | 51 | Track 52 | 53 | 54 | TrackId 🔑 55 | 56 | Name 57 | 58 | AlbumId 59 | 60 | MediaTypeId 61 | 62 | GenreId 63 | 64 | Composer 65 | 66 | Milliseconds 67 | 68 | Bytes 69 | 70 | UnitPrice 71 | 72 | 73 | 74 | 75 | Track:AlbumId->Album:AlbumId 76 | 77 | 78 | 79 | 80 | 81 | Genre 82 | 83 | 84 | Genre 85 | 86 | 87 | GenreId 🔑 88 | 89 | Name 90 | 91 | 92 | 93 | 94 | Track:GenreId->Genre:GenreId 95 | 96 | 97 | 98 | 99 | 100 | MediaType 101 | 102 | 103 | MediaType 104 | 105 | 106 | MediaTypeId 🔑 107 | 108 | Name 109 | 110 | 111 | 112 | 113 | Track:MediaTypeId->MediaType:MediaTypeId 114 | 115 | 116 | 117 | 118 | 119 | -------------------------------------------------------------------------------- /index_files/figure-commonmark/cell-50-output-1.svg: -------------------------------------------------------------------------------- 1 | 2 | 4 | 6 | 7 | 9 | 10 | G 11 | 12 | 13 | 14 | Artist 15 | 16 | 17 | Artist 18 | 19 | 20 | ArtistId 🔑 21 | 22 | Name 23 | 24 | 25 | 26 | 27 | Album 28 | 29 | 30 | Album 31 | 32 | 33 | AlbumId 🔑 34 | 35 | Title 36 | 37 | ArtistId 38 | 39 | 40 | 41 | 42 | Album:ArtistId->Artist:ArtistId 43 | 44 | 45 | 46 | 47 | 48 | Track 49 | 50 | 51 | Track 52 | 53 | 54 | TrackId 🔑 55 | 56 | Name 57 | 58 | AlbumId 59 | 60 | MediaTypeId 61 | 62 | GenreId 63 | 64 | Composer 65 | 66 | Milliseconds 67 | 68 | Bytes 69 | 70 | UnitPrice 71 | 72 | 73 | 74 | 75 | Track:AlbumId->Album:AlbumId 76 | 77 | 78 | 79 | 80 | 81 | Genre 82 | 83 | 84 | Genre 85 | 86 | 87 | GenreId 🔑 88 | 89 | Name 90 | 91 | 92 | 93 | 94 | Track:GenreId->Genre:GenreId 95 | 96 | 97 | 98 | 99 | 100 | MediaType 101 | 102 | 103 | MediaType 104 | 105 | 106 | MediaTypeId 🔑 107 | 108 | Name 109 | 110 | 111 | 112 | 113 | Track:MediaTypeId->MediaType:MediaTypeId 114 | 115 | 116 | 117 | 118 | 119 | -------------------------------------------------------------------------------- /nbs/test_update.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "id": "fd325418", 6 | "metadata": {}, 7 | "source": [ 8 | "# Test Update Operations" 9 | ] 10 | }, 11 | { 12 | "cell_type": "markdown", 13 | "id": "417f2c4e", 14 | "metadata": {}, 15 | "source": [ 16 | "## Setup" 17 | ] 18 | }, 19 | { 20 | "cell_type": "code", 21 | "execution_count": null, 22 | "id": "ad470f25", 23 | "metadata": {}, 24 | "outputs": [], 25 | "source": [ 26 | "from fastlite import *\n", 27 | "from dataclasses import is_dataclass" 28 | ] 29 | }, 30 | { 31 | "cell_type": "markdown", 32 | "id": "e4788661", 33 | "metadata": {}, 34 | "source": [ 35 | "Note: Make sure to use fastlite's `database()` here" 36 | ] 37 | }, 38 | { 39 | "cell_type": "code", 40 | "execution_count": null, 41 | "id": "97dd1b48", 42 | "metadata": {}, 43 | "outputs": [], 44 | "source": [ 45 | "db = database(':memory:')" 46 | ] 47 | }, 48 | { 49 | "cell_type": "code", 50 | "execution_count": null, 51 | "id": "5102a3ac", 52 | "metadata": {}, 53 | "outputs": [], 54 | "source": [ 55 | "class People: id: int; name: str" 56 | ] 57 | }, 58 | { 59 | "cell_type": "code", 60 | "execution_count": null, 61 | "id": "9188c149", 62 | "metadata": {}, 63 | "outputs": [], 64 | "source": [ 65 | "people = db.create(People, pk='id')" 66 | ] 67 | }, 68 | { 69 | "cell_type": "markdown", 70 | "id": "6c99cbae", 71 | "metadata": {}, 72 | "source": [ 73 | "## Test Single Updates" 74 | ] 75 | }, 76 | { 77 | "cell_type": "markdown", 78 | "id": "dbc67ac6", 79 | "metadata": {}, 80 | "source": [ 81 | "Here we test `update()`" 82 | ] 83 | }, 84 | { 85 | "cell_type": "markdown", 86 | "id": "a0673d88", 87 | "metadata": {}, 88 | "source": [ 89 | "### Test Cases for `update()` Where Nothing Is Updated" 90 | ] 91 | }, 92 | { 93 | "cell_type": "markdown", 94 | "id": "eb45e038", 95 | "metadata": {}, 96 | "source": [ 97 | "Test that calling `insert()` without any parameters doesn't change anything, and returns nothing" 98 | ] 99 | }, 100 | { 101 | "cell_type": "code", 102 | "execution_count": null, 103 | "id": "fba0c4f7", 104 | "metadata": {}, 105 | "outputs": [ 106 | { 107 | "data": { 108 | "text/plain": [ 109 | "{}" 110 | ] 111 | }, 112 | "execution_count": null, 113 | "metadata": {}, 114 | "output_type": "execute_result" 115 | } 116 | ], 117 | "source": [ 118 | "people.update()" 119 | ] 120 | }, 121 | { 122 | "cell_type": "markdown", 123 | "id": "0355fe0a", 124 | "metadata": {}, 125 | "source": [ 126 | "Test None doesn't change anything." 127 | ] 128 | }, 129 | { 130 | "cell_type": "code", 131 | "execution_count": null, 132 | "id": "ace59c88", 133 | "metadata": {}, 134 | "outputs": [], 135 | "source": [ 136 | "count = people.count\n", 137 | "assert people.update(None) == {}\n", 138 | "assert people.count == count" 139 | ] 140 | }, 141 | { 142 | "cell_type": "markdown", 143 | "id": "2ab1795b", 144 | "metadata": {}, 145 | "source": [ 146 | "Test empty dict doesn't change anything " 147 | ] 148 | }, 149 | { 150 | "cell_type": "code", 151 | "execution_count": null, 152 | "id": "a93ec70a", 153 | "metadata": {}, 154 | "outputs": [], 155 | "source": [ 156 | "count = people.count\n", 157 | "assert people.update({}) == {}\n", 158 | "assert people.count == count" 159 | ] 160 | }, 161 | { 162 | "cell_type": "code", 163 | "execution_count": null, 164 | "id": "79cd5186", 165 | "metadata": {}, 166 | "outputs": [], 167 | "source": [ 168 | "# Test empty dataclass doesn't change anything\n", 169 | "PersonDC = people.dataclass()\n", 170 | "count = people.count\n", 171 | "assert people.update(PersonDC()) == {}\n", 172 | "assert people.count == count" 173 | ] 174 | }, 175 | { 176 | "cell_type": "code", 177 | "execution_count": null, 178 | "id": "aa988175", 179 | "metadata": {}, 180 | "outputs": [], 181 | "source": [ 182 | "# Test empty class instance doesn't change anything\n", 183 | "class EmptyPerson: pass\n", 184 | "count = people.count\n", 185 | "assert people.update(EmptyPerson()) == {}\n", 186 | "assert people.count == count" 187 | ] 188 | }, 189 | { 190 | "cell_type": "markdown", 191 | "id": "811bc666", 192 | "metadata": {}, 193 | "source": [ 194 | "### Single Update Types" 195 | ] 196 | }, 197 | { 198 | "cell_type": "markdown", 199 | "id": "157baebb", 200 | "metadata": {}, 201 | "source": [ 202 | "Test update with `dict`. Result should include the Updated value" 203 | ] 204 | }, 205 | { 206 | "cell_type": "code", 207 | "execution_count": null, 208 | "id": "1fdd0aaf", 209 | "metadata": {}, 210 | "outputs": [], 211 | "source": [ 212 | "person = people.insert(name='Alice')\n", 213 | "adict = dict(id=person.id, name='Bob')\n", 214 | "assert people.update(adict).name == 'Bob'\n", 215 | "assert people[person.id].name == 'Bob'" 216 | ] 217 | }, 218 | { 219 | "cell_type": "markdown", 220 | "id": "d58b023f", 221 | "metadata": {}, 222 | "source": [ 223 | "Fetch record from database to confirm it has changed" 224 | ] 225 | }, 226 | { 227 | "cell_type": "code", 228 | "execution_count": null, 229 | "id": "e5753017", 230 | "metadata": {}, 231 | "outputs": [], 232 | "source": [ 233 | "assert people[person.id].name == 'Bob'" 234 | ] 235 | }, 236 | { 237 | "cell_type": "markdown", 238 | "id": "447e13c9", 239 | "metadata": {}, 240 | "source": [ 241 | "Test update with dataclass" 242 | ] 243 | }, 244 | { 245 | "cell_type": "code", 246 | "execution_count": null, 247 | "id": "c736aa0f", 248 | "metadata": {}, 249 | "outputs": [], 250 | "source": [ 251 | "dc = People(id=person.id, name='Bobby')\n", 252 | "assert is_dataclass(dc) is True\n", 253 | "assert people.update(dc).name == 'Bobby'\n", 254 | "assert people[person.id].name == 'Bobby'" 255 | ] 256 | }, 257 | { 258 | "cell_type": "markdown", 259 | "id": "0b4eb6df", 260 | "metadata": {}, 261 | "source": [ 262 | "Test with regular class" 263 | ] 264 | }, 265 | { 266 | "cell_type": "code", 267 | "execution_count": null, 268 | "id": "cfd90ab0", 269 | "metadata": {}, 270 | "outputs": [], 271 | "source": [ 272 | "class Student: pass\n", 273 | "student = Student()\n", 274 | "student.name = 'Charlo'\n", 275 | "student.id = person.id\n", 276 | "\n", 277 | "assert people.update(student).name == 'Charlo'\n", 278 | "assert people[student.id].name == 'Charlo'" 279 | ] 280 | }, 281 | { 282 | "cell_type": "markdown", 283 | "id": "26a9c38a", 284 | "metadata": {}, 285 | "source": [ 286 | "### None and Empty String Handling" 287 | ] 288 | }, 289 | { 290 | "cell_type": "markdown", 291 | "id": "9abadc7e", 292 | "metadata": {}, 293 | "source": [ 294 | "SQLite makes a clear distinction between NULL (represented as None in Python) and an empty string (''). Unlike some popular Python ORMs, fastlite preserves this distinction because:\n", 295 | "\n", 296 | "1. NULL represents \"unknown\" or \"missing\" data\n", 297 | "2. Empty string represents \"known to be empty\"\n", 298 | "\n", 299 | "These are semantically different concepts, and maintaining this distinction allows users to make appropriate queries (e.g. `WHERE name IS NULL` vs `WHERE name = ''`). The fact that fastlite preserves this distinction in both directions (Python->SQLite and SQLite->Python) is good database design." 300 | ] 301 | }, 302 | { 303 | "cell_type": "markdown", 304 | "id": "37ad998d", 305 | "metadata": {}, 306 | "source": [ 307 | "Test updating a record with name set to None" 308 | ] 309 | }, 310 | { 311 | "cell_type": "code", 312 | "execution_count": null, 313 | "id": "5a968d13", 314 | "metadata": {}, 315 | "outputs": [], 316 | "source": [ 317 | "result = people.update(dict(id=person.id, name=None))\n", 318 | "assert result.name is None\n", 319 | "assert people[person.id].name == None" 320 | ] 321 | }, 322 | { 323 | "cell_type": "markdown", 324 | "id": "dd0c180d", 325 | "metadata": {}, 326 | "source": [ 327 | "Test with empty string" 328 | ] 329 | }, 330 | { 331 | "cell_type": "code", 332 | "execution_count": null, 333 | "id": "92d53608", 334 | "metadata": {}, 335 | "outputs": [], 336 | "source": [ 337 | "result = people.update(dict(id=person.id, name=''))\n", 338 | "assert result.name == ''\n", 339 | "assert people[person.id].name == ''" 340 | ] 341 | }, 342 | { 343 | "cell_type": "markdown", 344 | "id": "d855c6a8", 345 | "metadata": {}, 346 | "source": [ 347 | "### Other Cases" 348 | ] 349 | }, 350 | { 351 | "cell_type": "markdown", 352 | "id": "1ee61d32", 353 | "metadata": {}, 354 | "source": [ 355 | "Test with special characters" 356 | ] 357 | }, 358 | { 359 | "cell_type": "code", 360 | "execution_count": null, 361 | "id": "972bab86", 362 | "metadata": {}, 363 | "outputs": [], 364 | "source": [ 365 | "assert people.update(dict(id=person.id, name='O\\'Connor')).name == \"O'Connor\"\n", 366 | "assert people[person.id].name == \"O'Connor\"\n", 367 | "assert people.update(dict(id=person.id, name='José')).name == \"José\"\n", 368 | "assert people[person.id].name == \"José\"" 369 | ] 370 | }, 371 | { 372 | "cell_type": "markdown", 373 | "id": "f1209e4b", 374 | "metadata": {}, 375 | "source": [ 376 | "Test that extra fields raise `fastlite.SqlError`, which is a shim for `apsw.SqlError`:" 377 | ] 378 | }, 379 | { 380 | "cell_type": "code", 381 | "execution_count": null, 382 | "id": "963008b6", 383 | "metadata": {}, 384 | "outputs": [], 385 | "source": [ 386 | "try:\n", 387 | " p = people.update(dict(id=person.id, name='Extra', age=25, title='Dr'))\n", 388 | "except SQLError as e:\n", 389 | " assert e.args[0] == 'no such column: age'" 390 | ] 391 | } 392 | ], 393 | "metadata": { 394 | "kernelspec": { 395 | "display_name": "python3", 396 | "language": "python", 397 | "name": "python3" 398 | } 399 | }, 400 | "nbformat": 4, 401 | "nbformat_minor": 5 402 | } 403 | -------------------------------------------------------------------------------- /fastlite/core.py: -------------------------------------------------------------------------------- 1 | """Source code for fastlite""" 2 | 3 | # AUTOGENERATED! DO NOT EDIT! File to edit: ../nbs/00_core.ipynb. 4 | 5 | # %% auto 0 6 | __all__ = ['all_dcs', 'create_mod', 'get_typ', 'diagram'] 7 | 8 | # %% ../nbs/00_core.ipynb 9 | from dataclasses import dataclass, field, make_dataclass, fields, Field, is_dataclass, MISSING 10 | from typing import Any,Union,Optional, get_args 11 | from enum import Enum 12 | from inspect import get_annotations 13 | 14 | from fastcore.utils import * 15 | from fastcore.xml import highlight 16 | from fastcore.xtras import hl_md, dataclass_src 17 | from apswutils.db import * 18 | from apswutils.utils import rows_from_file,TypeTracker,Format 19 | from apswutils.db import NotFoundError 20 | import types 21 | 22 | try: from graphviz import Source 23 | except ImportError: pass 24 | 25 | # %% ../nbs/00_core.ipynb 26 | class _Getter: 27 | "Abstract class with dynamic attributes providing access to DB objects" 28 | def __init__(self, db): self.db = db 29 | # NB: Define `__dir__` in subclass to get list of objects 30 | def __repr__(self): return ", ".join(dir(self)) 31 | def __contains__(self, s): return (s if isinstance(s,str) else s.name) in dir(self) 32 | def __iter__(self): return iter(self[dir(self)]) 33 | def __getitem__(self, idxs): 34 | if isinstance(idxs,str): return self.db.table(idxs) 35 | return [self.db.table(o) for o in idxs] 36 | def __getattr__(self, k): 37 | if k[0]=='_': raise AttributeError 38 | return self.db[k] 39 | 40 | class _TablesGetter(_Getter): 41 | def __dir__(self): return [o for o in self.db.table_names() if not o.startswith('sqlite_')] 42 | 43 | @patch(as_prop=True) 44 | def t(self:Database): return _TablesGetter(self) 45 | 46 | # %% ../nbs/00_core.ipynb 47 | class _Col: 48 | def __init__(self, t, c): self.t,self.c = t,c 49 | def __str__(self): return f'"{self.t}"."{self.c}"' 50 | def __repr__(self): return self.c 51 | def __iter__(self): return iter(self.c) 52 | 53 | class _ColsGetter: 54 | def __init__(self, tbl): self.tbl = tbl 55 | def __dir__(self): return map(repr, self()) 56 | def __call__(self): return [_Col(self.tbl.name,o.name) for o in self.tbl.columns] 57 | def __contains__(self, s): return (s if isinstance(s,str) else s.c) in self.tbl.columns_dict 58 | def __repr__(self): return ", ".join(dir(self)) 59 | 60 | def __getattr__(self, k): 61 | if k[0]=='_': raise AttributeError 62 | return _Col(self.tbl.name, k) 63 | 64 | @patch(as_prop=True) 65 | def c(self:Table): return _ColsGetter(self) 66 | 67 | @patch(as_prop=True) 68 | def c(self:View): return _ColsGetter(self) 69 | 70 | # %% ../nbs/00_core.ipynb 71 | @patch 72 | def __str__(self:Table): return f'"{self.name}"' 73 | 74 | @patch 75 | def __str__(self:View): return f'"{self.name}"' 76 | 77 | # %% ../nbs/00_core.ipynb 78 | @patch 79 | def q(self:Database, sql: str, params=None): 80 | return list(self.query(sql, params=params)) 81 | 82 | # %% ../nbs/00_core.ipynb 83 | def _get_flds(tbl): 84 | return [(k, v|None, field(default=UNSET)) 85 | for k,v in tbl.columns_dict.items()] 86 | 87 | def _dataclass(self:Table, store=True, suf='')->type: 88 | "Create a `dataclass` with the types and defaults of this table" 89 | res = make_dataclass(self.name.title()+suf, _get_flds(self)) 90 | flexiclass(res) 91 | if store: self.cls = res 92 | return res 93 | 94 | Table.dataclass = _dataclass 95 | 96 | # %% ../nbs/00_core.ipynb 97 | def all_dcs(db, with_views=False, store=True, suf=''): 98 | "dataclasses for all objects in `db`" 99 | return [o.dataclass(store=store, suf=suf) for o in list(db.t) + (db.views if with_views else [])] 100 | 101 | # %% ../nbs/00_core.ipynb 102 | def create_mod(db, mod_fn, with_views=False, store=True, suf=''): 103 | "Create module for dataclasses for `db`" 104 | mod_fn = str(mod_fn) 105 | if not mod_fn.endswith('.py'): mod_fn+='.py' 106 | dcs = all_dcs(db, with_views, store=store, suf=suf) 107 | strlist = ', '.join([f'"{o.__name__}"' for o in dcs]) 108 | with open(mod_fn, 'w') as f: 109 | print(f'__all__ = [{strlist}]', file=f) 110 | print('from dataclasses import dataclass', file=f) 111 | print('from fastlite.kw import UNSET', file=f) 112 | for o in dcs: print(dataclass_src(o), file=f) 113 | 114 | # %% ../nbs/00_core.ipynb 115 | @patch 116 | def link_dcs(self:Database, mod): 117 | "Set the internal dataclass type links for tables using `mod` (created via `create_mod`)" 118 | for o in mod.__all__: self.t[o.lower()].cls = getattr(mod, o) 119 | 120 | # %% ../nbs/00_core.ipynb 121 | @patch 122 | def __call__( 123 | self:(Table|View), 124 | where:str|None=None, # SQL where fragment to use, for example `id > ?` 125 | where_args: Iterable|dict|NoneType=None, # Parameters to use with `where`; iterable for `id>?`, or dict for `id>:id` 126 | order_by: str|None=None, # Column or fragment of SQL to order by 127 | limit:int|None=None, # Number of rows to limit to 128 | offset:int|None=None, # SQL offset 129 | select:str = "*", # Comma-separated list of columns to select 130 | with_pk:bool=False, # Return tuple of (pk,row)? 131 | as_cls:bool=True, # Convert returned dict to stored dataclass? 132 | xtra:dict|None=None, # Extra constraints 133 | **kwargs)->list: 134 | "Shortcut for `rows_where` or `pks_and_rows_where`, depending on `with_pk`" 135 | f = getattr(self, 'pks_and_rows_where' if with_pk else 'rows_where') 136 | if not xtra: xtra = getattr(self, 'xtra_id', {}) 137 | if xtra: 138 | xw = ' and '.join(f"[{k}] = {v!r}" for k,v in xtra.items()) 139 | where = f'{xw} and {where}' if where else xw 140 | res = f(where=where, where_args=where_args, order_by=order_by, limit=limit, offset=offset, select=select, **kwargs) 141 | if as_cls and hasattr(self,'cls'): 142 | if with_pk: res = ((k,self.cls(**v)) for k,v in res) 143 | else: res = (self.cls(**o) for o in res) 144 | return list(res) 145 | 146 | # %% ../nbs/00_core.ipynb 147 | @patch 148 | def selectone( 149 | self:(Table|View), 150 | where:str|None=None, # SQL where fragment to use, for example `id > ?` 151 | where_args: Iterable|dict|NoneType=None, # Parameters to use with `where`; iterable for `id>?`, or dict for `id>:id` 152 | select:str = "*", # Comma-separated list of columns to select 153 | as_cls:bool=True, # Convert returned dict to stored dataclass? 154 | xtra:dict|None=None, # Extra constraints 155 | **kwargs)->list: 156 | "Shortcut for `__call__` that returns exactly one item" 157 | res = self(where=where, where_args=where_args, select=select, as_cls=as_cls, xtra=xtra, limit=2) 158 | if len(res)==0: raise NotFoundError 159 | elif len(res) > 1: raise ValueError(f"Not unique: {len(res)} results") 160 | return res[0] 161 | 162 | # %% ../nbs/00_core.ipynb 163 | @patch 164 | def set_classes(self:Database, glb): 165 | "Add set all table dataclasses using types in namespace `glb`" 166 | for tbl in self.t: tbl.cls = glb[tbl.name.title()] 167 | 168 | # %% ../nbs/00_core.ipynb 169 | @patch 170 | def get_tables(self:Database, glb): 171 | "Add objects for all table objects to namespace `glb`" 172 | for tbl in self.t: glb[tbl.name.lower()+'s'] = tbl 173 | 174 | # %% ../nbs/00_core.ipynb 175 | class _ViewsGetter(_Getter): 176 | def __dir__(self): return self.db.view_names() 177 | 178 | @patch(as_prop=True) 179 | def v(self:Database): return _ViewsGetter(self) 180 | 181 | # %% ../nbs/00_core.ipynb 182 | def _parse_typ(t): return t if not (_args:= get_args(t)) else first(_args, bool) 183 | 184 | # %% ../nbs/00_core.ipynb 185 | def _is_enum(o): return isinstance(o, type) and issubclass(o, Enum) 186 | def _enum_types(e): return {type(v.value) for v in e} 187 | 188 | def get_typ(t): 189 | "Get the underlying type." 190 | t = _parse_typ(t) # incase Union[Enum,None] 191 | if _is_enum(t) and len(types:=_enum_types(t)) == 1: return first(types) 192 | return t 193 | 194 | # %% ../nbs/00_core.ipynb 195 | @patch 196 | def create( 197 | self: Database, 198 | cls=None, # Dataclass to create table from 199 | name=None, # Name of table to create 200 | pk='id', # Column(s) to use as a primary key 201 | foreign_keys=None, # Foreign key definitions 202 | defaults=None, # Database table defaults 203 | column_order=None, # Which columns should come first 204 | not_null=None, # Columns that should be created as ``NOT NULL`` 205 | hash_id=None, # Column to be used as a primary key using hash 206 | hash_id_columns=None, # Columns used when calculating hash 207 | extracts=None, # Columns to be extracted during inserts 208 | if_not_exists=False, # Use `CREATE TABLE IF NOT EXISTS` 209 | replace=False, # Drop and replace table if it already exists 210 | ignore=True, # Silently do nothing if table already exists 211 | transform=False, # If table exists transform it to fit schema 212 | strict=False, # Apply STRICT mode to table 213 | ): 214 | "Create table from `cls`, default name to snake-case version of class name" 215 | flexiclass(cls) 216 | if name is None: name = camel2snake(cls.__name__) 217 | typs = {o.name: get_typ(o.type) for o in fields(cls)} 218 | res = self.create_table( 219 | name, typs, defaults=defaults, 220 | pk=pk, foreign_keys=foreign_keys, column_order=column_order, not_null=not_null, 221 | hash_id=hash_id, hash_id_columns=hash_id_columns, extracts=extracts, transform=transform, 222 | if_not_exists=if_not_exists, replace=replace, ignore=ignore, strict=strict) 223 | res.cls = cls 224 | return res 225 | 226 | # %% ../nbs/00_core.ipynb 227 | @patch 228 | def import_file(self:Database, table_name, file, format=None, pk=None, alter=False): 229 | "Import path or handle `file` to new table `table_name`" 230 | if isinstance(file, str): file = file.encode() 231 | if isinstance(file, bytes): file = io.BytesIO(file) 232 | with maybe_open(file) as fp: rows, format_used = rows_from_file(fp, format=format) 233 | tracker = TypeTracker() 234 | rows = tracker.wrap(rows) 235 | tbl = self[table_name] 236 | tbl.insert_all(rows, alter=alter) 237 | tbl.transform(types=tracker.types) 238 | if pk: tbl.transform(pk=pk) 239 | return tbl 240 | 241 | # %% ../nbs/00_core.ipynb 242 | def _edge(tbl): 243 | return "\n".join(f"{fk.table}:{fk.column} -> {fk.other_table}:{fk.other_column};" 244 | for fk in tbl.foreign_keys) 245 | 246 | def _row(col): 247 | xtra = " 🔑" if col.is_pk else "" 248 | bg = ' bgcolor="#ffebcd"' if col.is_pk else "" 249 | return f' {col.name}{xtra}' 250 | 251 | def _tnode(tbl): 252 | rows = "\n".join(_row(o) for o in tbl.columns) 253 | res = f""" 254 | 255 | {rows} 256 |
{tbl.name}
""" 257 | return f"{tbl.name} [label=<{res}>];\n" 258 | 259 | # %% ../nbs/00_core.ipynb 260 | def diagram(tbls, ratio=0.7, size="10", neato=False, render=True): 261 | layout = "\nlayout=neato;\noverlap=prism;\noverlap_scaling=0.5;""" if neato else "" 262 | edges = "\n".join(map(_edge, tbls)) 263 | tnodes = "\n".join(map(_tnode, tbls)) 264 | 265 | res = f"""digraph G {{ 266 | rankdir=LR;{layout} 267 | size="{size}"; 268 | ratio={ratio}; 269 | node [shape=plaintext] 270 | 271 | {tnodes} 272 | 273 | {edges} 274 | }} 275 | """ 276 | return Source(res) if render else res 277 | -------------------------------------------------------------------------------- /nbs/test_upsert.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "id": "fd325418", 6 | "metadata": {}, 7 | "source": [ 8 | "# Test Upsert Operations" 9 | ] 10 | }, 11 | { 12 | "cell_type": "markdown", 13 | "id": "417f2c4e", 14 | "metadata": {}, 15 | "source": [ 16 | "## Setup" 17 | ] 18 | }, 19 | { 20 | "cell_type": "code", 21 | "execution_count": null, 22 | "id": "ad470f25", 23 | "metadata": {}, 24 | "outputs": [], 25 | "source": [ 26 | "from fastlite import *" 27 | ] 28 | }, 29 | { 30 | "cell_type": "markdown", 31 | "id": "e4788661", 32 | "metadata": {}, 33 | "source": [ 34 | "Note: Make sure to use fastlite's `database()` here" 35 | ] 36 | }, 37 | { 38 | "cell_type": "code", 39 | "execution_count": null, 40 | "id": "97dd1b48", 41 | "metadata": {}, 42 | "outputs": [], 43 | "source": [ 44 | "db = database(':memory:')" 45 | ] 46 | }, 47 | { 48 | "cell_type": "code", 49 | "execution_count": null, 50 | "id": "5102a3ac", 51 | "metadata": {}, 52 | "outputs": [], 53 | "source": [ 54 | "class People: id: int; name: str" 55 | ] 56 | }, 57 | { 58 | "cell_type": "code", 59 | "execution_count": null, 60 | "id": "9188c149", 61 | "metadata": {}, 62 | "outputs": [], 63 | "source": [ 64 | "people = db.create(People, pk='id')" 65 | ] 66 | }, 67 | { 68 | "cell_type": "markdown", 69 | "id": "6c99cbae", 70 | "metadata": {}, 71 | "source": [ 72 | "## Test Single Upserts" 73 | ] 74 | }, 75 | { 76 | "cell_type": "markdown", 77 | "id": "dbc67ac6", 78 | "metadata": {}, 79 | "source": [ 80 | "Here we test `upsert()`" 81 | ] 82 | }, 83 | { 84 | "cell_type": "markdown", 85 | "id": "a0673d88", 86 | "metadata": {}, 87 | "source": [ 88 | "### Test Cases for `upsert()` Where Nothing Is Inserted" 89 | ] 90 | }, 91 | { 92 | "cell_type": "markdown", 93 | "id": "eb45e038", 94 | "metadata": {}, 95 | "source": [ 96 | "Test that calling `upsert()` without any parameters doesn't change anything, and returns nothing" 97 | ] 98 | }, 99 | { 100 | "cell_type": "code", 101 | "execution_count": null, 102 | "id": "fba0c4f7", 103 | "metadata": {}, 104 | "outputs": [ 105 | { 106 | "data": { 107 | "text/plain": [ 108 | "{}" 109 | ] 110 | }, 111 | "execution_count": null, 112 | "metadata": {}, 113 | "output_type": "execute_result" 114 | } 115 | ], 116 | "source": [ 117 | "people.upsert()" 118 | ] 119 | }, 120 | { 121 | "cell_type": "markdown", 122 | "id": "0355fe0a", 123 | "metadata": {}, 124 | "source": [ 125 | "Test None doesn't change anything." 126 | ] 127 | }, 128 | { 129 | "cell_type": "code", 130 | "execution_count": null, 131 | "id": "ace59c88", 132 | "metadata": {}, 133 | "outputs": [], 134 | "source": [ 135 | "count = people.count\n", 136 | "assert people.upsert(None) == {}\n", 137 | "assert people.count == count" 138 | ] 139 | }, 140 | { 141 | "cell_type": "markdown", 142 | "id": "2ab1795b", 143 | "metadata": {}, 144 | "source": [ 145 | "Test empty dict doesn't change anything " 146 | ] 147 | }, 148 | { 149 | "cell_type": "code", 150 | "execution_count": null, 151 | "id": "a93ec70a", 152 | "metadata": {}, 153 | "outputs": [], 154 | "source": [ 155 | "count = people.count\n", 156 | "assert people.upsert({}) == {}\n", 157 | "assert people.count == count" 158 | ] 159 | }, 160 | { 161 | "cell_type": "code", 162 | "execution_count": null, 163 | "id": "79cd5186", 164 | "metadata": {}, 165 | "outputs": [], 166 | "source": [ 167 | "# Test empty dataclass doesn't change anything\n", 168 | "PersonDC = people.dataclass()\n", 169 | "count = people.count\n", 170 | "assert people.upsert(PersonDC()) == {}\n", 171 | "assert people.count == count" 172 | ] 173 | }, 174 | { 175 | "cell_type": "code", 176 | "execution_count": null, 177 | "id": "aa988175", 178 | "metadata": {}, 179 | "outputs": [], 180 | "source": [ 181 | "# Test empty class instance doesn't change anything\n", 182 | "class EmptyPerson: pass\n", 183 | "count = people.count\n", 184 | "assert people.upsert(EmptyPerson()) == {}\n", 185 | "assert people.count == count" 186 | ] 187 | }, 188 | { 189 | "cell_type": "markdown", 190 | "id": "811bc666", 191 | "metadata": {}, 192 | "source": [ 193 | "### Single Insert Types" 194 | ] 195 | }, 196 | { 197 | "cell_type": "markdown", 198 | "id": "157baebb", 199 | "metadata": {}, 200 | "source": [ 201 | "Test upsert with keyword argument without id. Result should be a MissingPrimaryKey error" 202 | ] 203 | }, 204 | { 205 | "cell_type": "code", 206 | "execution_count": null, 207 | "id": "1fdd0aaf", 208 | "metadata": {}, 209 | "outputs": [ 210 | { 211 | "name": "stdout", 212 | "output_type": "stream", 213 | "text": [ 214 | "Correct throwing of key error\n" 215 | ] 216 | } 217 | ], 218 | "source": [ 219 | "try: people.upsert(name='Alice')\n", 220 | "except (MissingPrimaryKey,KeyError): print('Correct throwing of key error')" 221 | ] 222 | }, 223 | { 224 | "cell_type": "markdown", 225 | "id": "e1300c26", 226 | "metadata": {}, 227 | "source": [ 228 | "Use upsert to insert a new record via a dataclass. Since it can't find the id, it adds the record" 229 | ] 230 | }, 231 | { 232 | "cell_type": "code", 233 | "execution_count": null, 234 | "id": "de73d39a", 235 | "metadata": {}, 236 | "outputs": [], 237 | "source": [ 238 | "person = people.upsert(People(name='Alice', id=people.count+1))" 239 | ] 240 | }, 241 | { 242 | "cell_type": "markdown", 243 | "id": "447e13c9", 244 | "metadata": {}, 245 | "source": [ 246 | "Test upsert that updates with dataclass. Since it can find the id, it updates the record." 247 | ] 248 | }, 249 | { 250 | "cell_type": "code", 251 | "execution_count": null, 252 | "id": "c736aa0f", 253 | "metadata": {}, 254 | "outputs": [], 255 | "source": [ 256 | "assert people.upsert(People(name='Bobba', id=person.id)).name == 'Bobba'" 257 | ] 258 | }, 259 | { 260 | "cell_type": "markdown", 261 | "id": "77e6e4c0", 262 | "metadata": {}, 263 | "source": [ 264 | "Use upsert to insert a new record via a class. Since it can't find the id, it adds the record" 265 | ] 266 | }, 267 | { 268 | "cell_type": "code", 269 | "execution_count": null, 270 | "id": "dd80748f", 271 | "metadata": {}, 272 | "outputs": [], 273 | "source": [ 274 | "count = people.count\n", 275 | "class Student: pass\n", 276 | "student = Student()\n", 277 | "student.name = 'Daniel Greenfeld'\n", 278 | "student.id = people.count+1\n", 279 | "\n", 280 | "assert people.upsert(student).name == 'Daniel Greenfeld'\n", 281 | "assert people.count == count+1" 282 | ] 283 | }, 284 | { 285 | "cell_type": "markdown", 286 | "id": "0b4eb6df", 287 | "metadata": {}, 288 | "source": [ 289 | "Test upsert that updates with class. Since it can find the id, it updates the record." 290 | ] 291 | }, 292 | { 293 | "cell_type": "code", 294 | "execution_count": null, 295 | "id": "cfd90ab0", 296 | "metadata": {}, 297 | "outputs": [], 298 | "source": [ 299 | "count = people.count\n", 300 | "student = Student()\n", 301 | "student.name = 'Daniel Roy Greenfeld'\n", 302 | "student.id = person.id\n", 303 | "\n", 304 | "assert people.upsert(student).name == 'Daniel Roy Greenfeld'\n", 305 | "assert people.count == count" 306 | ] 307 | }, 308 | { 309 | "cell_type": "markdown", 310 | "id": "26a9c38a", 311 | "metadata": {}, 312 | "source": [ 313 | "### None and Empty String Handling" 314 | ] 315 | }, 316 | { 317 | "cell_type": "markdown", 318 | "id": "37ad998d", 319 | "metadata": {}, 320 | "source": [ 321 | "Test upserting a record with name set to None. First assert checks the method result, the second assert tests that the database was altered correctly." 322 | ] 323 | }, 324 | { 325 | "cell_type": "code", 326 | "execution_count": null, 327 | "id": "5a968d13", 328 | "metadata": {}, 329 | "outputs": [], 330 | "source": [ 331 | "result = people.upsert(People(name=None, id=person.id))\n", 332 | "assert result.name is None\n", 333 | "assert people[person.id].name is None" 334 | ] 335 | }, 336 | { 337 | "cell_type": "markdown", 338 | "id": "dd0c180d", 339 | "metadata": {}, 340 | "source": [ 341 | "Test with empty string." 342 | ] 343 | }, 344 | { 345 | "cell_type": "code", 346 | "execution_count": null, 347 | "id": "92d53608", 348 | "metadata": {}, 349 | "outputs": [], 350 | "source": [ 351 | "result = people.upsert(People(name='', id=person.id))\n", 352 | "assert result.name == ''\n", 353 | "assert people[person.id].name == ''" 354 | ] 355 | }, 356 | { 357 | "cell_type": "markdown", 358 | "id": "d855c6a8", 359 | "metadata": {}, 360 | "source": [ 361 | "### Other Cases" 362 | ] 363 | }, 364 | { 365 | "cell_type": "markdown", 366 | "id": "1ee61d32", 367 | "metadata": {}, 368 | "source": [ 369 | "Test upserts with special characters. Let's do updates first" 370 | ] 371 | }, 372 | { 373 | "cell_type": "code", 374 | "execution_count": null, 375 | "id": "972bab86", 376 | "metadata": {}, 377 | "outputs": [], 378 | "source": [ 379 | "assert people.upsert(People(name='O\\'Connor', id=person.id)).name == \"O'Connor\"\n", 380 | "assert people[person.id].name == \"O'Connor\"\n", 381 | "assert people.upsert(People(name='José', id=person.id)).name == 'José'\n", 382 | "assert people[person.id].name == \"José\"" 383 | ] 384 | }, 385 | { 386 | "cell_type": "markdown", 387 | "id": "b1069ca8", 388 | "metadata": {}, 389 | "source": [ 390 | "Now test special characters with upserts that insert." 391 | ] 392 | }, 393 | { 394 | "cell_type": "code", 395 | "execution_count": null, 396 | "id": "2b702435", 397 | "metadata": {}, 398 | "outputs": [], 399 | "source": [ 400 | "person = people.upsert(People(name='O\\'Connor', id=people.count+1))\n", 401 | "assert person.name == \"O'Connor\"\n", 402 | "assert people[person.id].name == \"O'Connor\"\n", 403 | "person = people.upsert(People(name='José', id=people.count+1))\n", 404 | "assert person.name == \"José\"\n", 405 | "assert people[person.id].name == \"José\"" 406 | ] 407 | }, 408 | { 409 | "cell_type": "markdown", 410 | "id": "f27e986a", 411 | "metadata": {}, 412 | "source": [ 413 | "Test dict upsert" 414 | ] 415 | }, 416 | { 417 | "cell_type": "code", 418 | "execution_count": null, 419 | "id": "45a4c2aa", 420 | "metadata": {}, 421 | "outputs": [], 422 | "source": [ 423 | "assert people.upsert({'name': 'Dict Test', 'id': person.id}).name == 'Dict Test'" 424 | ] 425 | }, 426 | { 427 | "cell_type": "markdown", 428 | "id": "f1209e4b", 429 | "metadata": {}, 430 | "source": [ 431 | "Test that extra fields raise `fastlite.SqlError`" 432 | ] 433 | }, 434 | { 435 | "cell_type": "code", 436 | "execution_count": null, 437 | "id": "963008b6", 438 | "metadata": {}, 439 | "outputs": [], 440 | "source": [ 441 | "try:\n", 442 | " p = people.upsert(dict(name='Extra', age=25, title='Dr', id=person.id))\n", 443 | "except SQLError as e:\n", 444 | " assert e.args[0] == 'no such column: age'" 445 | ] 446 | } 447 | ], 448 | "metadata": { 449 | "kernelspec": { 450 | "display_name": "python3", 451 | "language": "python", 452 | "name": "python3" 453 | } 454 | }, 455 | "nbformat": 4, 456 | "nbformat_minor": 5 457 | } 458 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /fastlite/kw.py: -------------------------------------------------------------------------------- 1 | from dataclasses import MISSING 2 | from typing import Any,Union,Tuple,List,Iterable 3 | from fastcore.utils import * 4 | from apswutils.db import Database,Table,DEFAULT,ForeignKeysType,Default,Queryable,NotFoundError 5 | from apsw import SQLError, Connection 6 | from enum import Enum 7 | 8 | __all__ = ['MissingPrimaryKey', 'opt_bool', 'database', 'SQLError', 'Connection', 'Database', 'Table', 'DEFAULT', 'Default', 'Queryable', 'NotFoundError', 'MISSING'] 9 | 10 | class MissingPrimaryKey(Exception): pass 11 | 12 | opt_bool = Union[bool, Default, None] 13 | 14 | def database(path, wal=True, flags=None)->Any: 15 | path = Path(path) 16 | path.parent.mkdir(exist_ok=True) 17 | conn_or_path = Connection(str(path), flags=flags) if flags else path 18 | db = Database(conn_or_path) 19 | if wal: db.enable_wal() 20 | return db 21 | 22 | @patch 23 | def xtra(self:Table, **kwargs): 24 | "Set `xtra_id`" 25 | self.xtra_id = kwargs 26 | 27 | @patch 28 | def get_last(self:Table, 29 | as_cls:bool=True, # Display as Row object 30 | legacy:bool=True # If True, use last_rowid. If False, use Table.result attribute 31 | ): 32 | if legacy: 33 | assert self.last_rowid is not None 34 | row = first(self.rows_where('_rowid_=?', (self.last_rowid,))) 35 | assert row, f"Couldn't find {self.last_rowid}" 36 | else: 37 | row = self.result[-1] if len(self.result) else {} 38 | vals = [row[pk] for pk in self.pks] 39 | self.last_pk = vals[0] if len(vals)==1 else vals 40 | if as_cls and hasattr(self,'cls'): row = self.cls(**row) 41 | return row 42 | 43 | 44 | @patch 45 | def ids_and_rows_where( 46 | self:Table, 47 | where: Optional[str] = None, # SQL where fragment to use, for example ``id > ?`` 48 | where_args: Optional[Union[Iterable, dict]] = None, # Parameters to use with that fragment 49 | order_by: Optional[str] = None, # Column or fragment of SQL to order by 50 | limit: Optional[int] = None, # Number of rows to limit to 51 | offset: Optional[int] = None, # SQL offset 52 | select: str = '*', # Comma-separated list of columns to select - defaults to ``*`` 53 | ) -> Generator[Tuple[Any, Dict], None, None]: 54 | "Like `.rows_where()` but returns `(rowid, row)` pairs." 55 | #cs = [c.name for c in self.columns] 56 | #select = ",".join("[{}]".format(c) for c in cs) 57 | select = "_rowid_ as __rid, " + select 58 | for row in self.rows_where(select=select, where=where, where_args=where_args, order_by=order_by, limit=limit, offset=offset): 59 | yield row.pop('__rid'), row 60 | 61 | @patch 62 | def get(self:Table, pk_values: list|tuple|str|int, as_cls:bool=True, xtra:dict|None=None, default:Any=UNSET)->Any: 63 | if not isinstance(pk_values, (list, tuple)): pk_values = [pk_values] 64 | last_pk = pk_values[0] if len(self.pks) == 1 else pk_values 65 | if not xtra: xtra = getattr(self, 'xtra_id', {}) 66 | vals = list(pk_values) + list(xtra.values()) 67 | pks = self.pks + list(xtra.keys()) 68 | if len(pks)!=len(vals): raise NotFoundError(f"Need {len(pks)} pk") 69 | wheres = ["[{}] = ?".format(pk_name) for pk_name in pks] 70 | item = first(self.ids_and_rows_where(" and ".join(wheres), vals)) 71 | if not item: 72 | if default is UNSET: raise NotFoundError() 73 | return default 74 | rid,row = item 75 | self.last_pk,self.last_rowid = last_pk,rid 76 | if as_cls and hasattr(self,'cls'): row = self.cls(**row) 77 | return row 78 | 79 | @patch 80 | def __getitem__(self:Table, pk_values): return self.get(pk_values) 81 | 82 | 83 | @patch 84 | def create( 85 | self:Table, 86 | columns: Dict[str, Any]=None, pk: Any=None, foreign_keys=None, 87 | column_order: List[str]|None=None, not_null: Iterable[str]|None=None, defaults: Dict[str, Any]|None=None, 88 | hash_id: str|None=None, hash_id_columns: Iterable[str]|None=None, 89 | extracts: Union[Dict[str, str], List[str], NoneType]=None, 90 | if_not_exists: bool = False, replace: bool = False, ignore: bool = False, 91 | transform: bool = False, strict: bool = False, 92 | **kwargs): 93 | if not columns: columns={} 94 | columns = {**columns, **kwargs} 95 | return self._orig_create( 96 | columns, pk=pk, foreign_keys=foreign_keys, column_order=column_order, not_null=not_null, 97 | defaults=defaults, hash_id=hash_id, hash_id_columns=hash_id_columns, extracts=extracts, 98 | if_not_exists=if_not_exists, replace=replace, ignore=ignore, transform=transform, strict=strict) 99 | 100 | @patch 101 | def transform( 102 | self:Table, *, 103 | types: dict|None=None, rename: dict|None=None, drop: Iterable|None=None, pk: Any|None=DEFAULT, 104 | not_null: Iterable[str]|None=None, defaults: Dict[str, Any]|None=None, 105 | drop_foreign_keys: Iterable[str]|None=None, add_foreign_keys: ForeignKeysType|None=None, 106 | foreign_keys: ForeignKeysType|None=None, 107 | column_order: List[str]|None=None, keep_table: str|None=None, 108 | **kwargs) -> Table: 109 | if not types: types={} 110 | types = {**types, **kwargs} 111 | return self._orig_transform( 112 | types=types, rename=rename, drop=drop, pk=pk, not_null=not_null, defaults=defaults, 113 | drop_foreign_keys=drop_foreign_keys, add_foreign_keys=add_foreign_keys, foreign_keys=foreign_keys, 114 | column_order=column_order, keep_table=keep_table) 115 | 116 | @patch 117 | def transform_sql( 118 | self:Table, *, 119 | types: dict|None=None, rename: dict|None=None, drop: Iterable|None=None, pk: Any|None=DEFAULT, 120 | not_null: Iterable[str]|None=None, defaults: Dict[str, Any]|None=None, 121 | drop_foreign_keys: Iterable[str]|None=None, add_foreign_keys: ForeignKeysType|None=None, 122 | foreign_keys: ForeignKeysType|None=None, 123 | column_order: List[str]|None=None, keep_table: str|None=None, 124 | **kwargs) -> List[str]: 125 | if not types: types={} 126 | types = {**types, **kwargs} 127 | return self._orig_transform_sql( 128 | types=types, rename=rename, drop=drop, pk=pk, not_null=not_null, defaults=defaults, 129 | drop_foreign_keys=drop_foreign_keys, add_foreign_keys=add_foreign_keys, foreign_keys=foreign_keys, 130 | column_order=column_order, keep_table=keep_table) 131 | 132 | def _process_row(row): 133 | if row is None: return {} 134 | return {k:(v.value if isinstance(v, Enum) else v) for k,v in asdict(row).items() if v is not UNSET} 135 | 136 | @patch 137 | def update(self:Table, updates: dict|None=None, pk_values: list|tuple|str|int|float|None=None, 138 | alter: bool=False, conversions: dict|None=None, xtra:dict|None=None, **kwargs) -> Any: 139 | if not updates: updates={} 140 | updates = _process_row(updates) 141 | if not xtra: xtra = getattr(self, 'xtra_id', {}) 142 | updates = {**updates, **kwargs, **xtra} 143 | updates = _process_row(updates) 144 | if not updates: return {} 145 | if pk_values is None: pk_values = [updates[o] for o in self.pks] 146 | self._orig_update(pk_values, updates=updates, alter=alter, conversions=conversions) 147 | return self.get_last(legacy=False) 148 | 149 | 150 | @patch 151 | def insert_all( 152 | self:Table, 153 | records: Dict[str, Any]=None, pk=DEFAULT, foreign_keys=DEFAULT, 154 | column_order: Union[List[str], Default, None]=DEFAULT, 155 | not_null: Union[Iterable[str], Default, None]=DEFAULT, 156 | defaults: Union[Dict[str, Any], Default, None]=DEFAULT, 157 | batch_size=DEFAULT, 158 | hash_id: Union[str, Default, None]=DEFAULT, 159 | hash_id_columns: Union[Iterable[str], Default, None]=DEFAULT, 160 | alter: opt_bool=DEFAULT, ignore: opt_bool=DEFAULT, replace: opt_bool=DEFAULT, truncate=False, 161 | extracts: Union[Dict[str, str], List[str], Default, None]=DEFAULT, 162 | conversions: Union[Dict[str, str], Default, None]=DEFAULT, 163 | columns: Union[Dict[str, Any], Default, None]=DEFAULT, 164 | strict: opt_bool=DEFAULT, 165 | upsert:bool=False, analyze:bool=False, xtra:dict|None=None, 166 | **kwargs) -> Table: 167 | if not xtra: xtra = getattr(self,'xtra_id',{}) 168 | records = [_process_row(o) for o in records] 169 | records = [x for x in records if x] 170 | if not any(records): 171 | self.result = [] 172 | return self 173 | records = [{**o, **xtra} for o in records] 174 | return self._orig_insert_all( 175 | records=records, pk=pk, foreign_keys=foreign_keys, column_order=column_order, not_null=not_null, 176 | defaults=defaults, batch_size=batch_size, hash_id=hash_id, hash_id_columns=hash_id_columns, alter=alter, 177 | ignore=ignore, replace=replace, truncate=truncate, extracts=extracts, conversions=conversions, 178 | columns=columns, strict=strict, upsert=upsert, analyze=analyze) 179 | 180 | 181 | @patch 182 | def insert( 183 | self:Table, 184 | record: Dict[str, Any]=None, pk=DEFAULT, foreign_keys=DEFAULT, 185 | column_order: Union[List[str], Default, None]=DEFAULT, 186 | not_null: Union[Iterable[str], Default, None]=DEFAULT, 187 | defaults: Union[Dict[str, Any], Default, None]=DEFAULT, 188 | hash_id: Union[str, Default, None]=DEFAULT, 189 | hash_id_columns: Union[Iterable[str], Default, None]=DEFAULT, 190 | alter: opt_bool=DEFAULT, 191 | ignore: opt_bool=DEFAULT, 192 | replace: opt_bool=DEFAULT, 193 | extracts: Union[Dict[str, str], List[str], Default, None]=DEFAULT, 194 | conversions: Union[Dict[str, str], Default, None]=DEFAULT, 195 | columns: Union[Dict[str, Any], Default, None]=DEFAULT, 196 | strict: opt_bool=DEFAULT, 197 | **kwargs) -> Any: 198 | record = _process_row(record) 199 | record = {**record, **kwargs} 200 | if not record: return {} 201 | self._orig_insert( 202 | record=record, pk=pk, foreign_keys=foreign_keys, column_order=column_order, not_null=not_null, 203 | defaults=defaults, hash_id=hash_id, hash_id_columns=hash_id_columns, alter=alter, ignore=ignore, 204 | replace=replace, extracts=extracts, conversions=conversions, columns=columns, strict=strict) 205 | return self.get_last(legacy=False) 206 | 207 | 208 | @patch 209 | def upsert( 210 | self:Table, 211 | record:Any=None, pk=DEFAULT, foreign_keys=DEFAULT, 212 | column_order: Union[List[str], Default, None]=DEFAULT, 213 | not_null: Union[Iterable[str], Default, None]=DEFAULT, 214 | defaults: Union[Dict[str, Any], Default, None]=DEFAULT, 215 | hash_id: Union[str, Default]|None=DEFAULT, 216 | hash_id_columns: Union[Iterable[str], Default, None]=DEFAULT, 217 | alter: Union[bool, Default]|None=DEFAULT, 218 | extracts: Union[Dict[str, str], List[str], Default, None]=DEFAULT, 219 | conversions: Union[Dict[str, str], Default, None]=DEFAULT, 220 | columns: Union[Dict[str, Any], Default, None]=DEFAULT, 221 | strict: Union[bool, Default]|None=DEFAULT, 222 | **kwargs) -> Any: 223 | record = _process_row(record) 224 | record = {**record, **kwargs} 225 | if not record: return {} 226 | if pk==DEFAULT: pk=self.pks 227 | self._orig_upsert( 228 | record=record, pk=pk, foreign_keys=foreign_keys, column_order=column_order, not_null=not_null, 229 | defaults=defaults, hash_id=hash_id, hash_id_columns=hash_id_columns, alter=alter, 230 | extracts=extracts, conversions=conversions, columns=columns, strict=strict) 231 | return self.get_last(legacy=False) 232 | 233 | 234 | @patch 235 | def lookup( 236 | self:Table, 237 | lookup_values: Dict[str, Any], 238 | extra_values: Dict[str, Any]|None=None, 239 | pk: str|None = "id", 240 | foreign_keys: ForeignKeysType|None=None, 241 | column_order: List[str]|None=None, 242 | not_null: Iterable[str]|None=None, 243 | defaults: Dict[str, Any]|None=None, 244 | extracts: Union[Dict[str, str], List[str], None]=None, 245 | conversions: Dict[str, str]|None=None, 246 | columns: Dict[str, Any]|None=None, 247 | strict: bool|None = False, 248 | **kwargs): 249 | if not lookup_values: lookup_values={} 250 | lookup_values = {**lookup_values, **kwargs} 251 | return self._orig_lookup( 252 | lookup_values=lookup_values, extra_values=extra_values, pk=pk, foreign_keys=foreign_keys, 253 | column_order=column_order, not_null=not_null, defaults=defaults, extracts=extracts, 254 | conversions=conversions, columns=columns, strict=strict) 255 | 256 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # fastlite 2 | 3 | 4 | 5 | 6 | `fastlite` provides some little quality-of-life improvements for 7 | interactive use of the wonderful 8 | [sqlite-utils](https://sqlite-utils.datasette.io/) library. It’s likely 9 | to be particularly of interest to folks using Jupyter. 10 | 11 | ## Install 12 | 13 | pip install fastlite 14 | 15 | ## Overview 16 | 17 | ``` python 18 | from fastlite import * 19 | from fastcore.utils import * 20 | from fastcore.net import urlsave 21 | ``` 22 | 23 | We demonstrate `fastlite`‘s features here using the ’chinook’ sample 24 | database. 25 | 26 | ``` python 27 | url = 'https://github.com/lerocha/chinook-database/raw/master/ChinookDatabase/DataSources/Chinook_Sqlite.sqlite' 28 | path = Path('chinook.sqlite') 29 | if not path.exists(): urlsave(url, path) 30 | 31 | db = database("chinook.sqlite") 32 | ``` 33 | 34 | Databases have a `t` property that lists all tables: 35 | 36 | ``` python 37 | dt = db.t 38 | dt 39 | ``` 40 | 41 | Album, Artist, Customer, Employee, Genre, Invoice, InvoiceLine, MediaType, Playlist, PlaylistTrack, Track 42 | 43 | You can use this to grab a single table…: 44 | 45 | ``` python 46 | artist = dt.artists 47 | artist 48 | ``` 49 | 50 | 51 | 52 | ``` python 53 | artist = dt.Artist 54 | artist 55 | ``` 56 | 57 |
58 | 59 | …or multiple tables at once: 60 | 61 | ``` python 62 | dt['Artist','Album','Track','Genre','MediaType'] 63 | ``` 64 | 65 | [
, 66 |
, 67 |
, 68 |
, 69 |
] 70 | 71 | It also provides auto-complete in Jupyter, IPython, and nearly any other 72 | interactive Python environment: 73 | 74 | 76 | 77 | You can check if a table is in the database already: 78 | 79 | ``` python 80 | 'Artist' in dt 81 | ``` 82 | 83 | True 84 | 85 | Column work in a similar way to tables, using the `c` property: 86 | 87 | ``` python 88 | ac = artist.c 89 | ac 90 | ``` 91 | 92 | ArtistId, Name 93 | 94 | Auto-complete works for columns too: 95 | 96 | 98 | 99 | Columns, tables, and view stringify in a format suitable for including 100 | in SQL statements. That means you can use auto-complete in f-strings. 101 | 102 | ``` python 103 | qry = f"select * from {artist} where {ac.Name} like 'AC/%'" 104 | print(qry) 105 | ``` 106 | 107 | select * from "Artist" where "Artist"."Name" like 'AC/%' 108 | 109 | You can view the results of a select query using `q`: 110 | 111 | ``` python 112 | db.q(qry) 113 | ``` 114 | 115 | [{'ArtistId': 1, 'Name': 'AC/DC'}] 116 | 117 | Views can be accessed through the `v` property: 118 | 119 | ``` python 120 | album = dt.Album 121 | 122 | acca_sql = f"""select {album}.* 123 | from {album} join {artist} using (ArtistId) 124 | where {ac.Name} like 'AC/%'""" 125 | 126 | db.create_view("AccaDaccaAlbums", acca_sql, replace=True) 127 | acca_dacca = db.q(f"select * from {db.v.AccaDaccaAlbums}") 128 | acca_dacca 129 | ``` 130 | 131 | [{'AlbumId': 1, 132 | 'Title': 'For Those About To Rock We Salute You', 133 | 'ArtistId': 1}, 134 | {'AlbumId': 4, 'Title': 'Let There Be Rock', 'ArtistId': 1}] 135 | 136 | ## Dataclass support 137 | 138 | A `dataclass` type with the names, types, and defaults of the tables is 139 | created using `dataclass()`: 140 | 141 | ``` python 142 | album_dc = album.dataclass() 143 | ``` 144 | 145 | Let’s try it: 146 | 147 | ``` python 148 | album_obj = album_dc(**acca_dacca[0]) 149 | album_obj 150 | ``` 151 | 152 | Album(AlbumId=1, Title='For Those About To Rock We Salute You', ArtistId=1) 153 | 154 | You can get the definition of the dataclass using fastcore’s 155 | `dataclass_src` – everything is treated as nullable, in order to handle 156 | auto-generated database values: 157 | 158 | ``` python 159 | src = dataclass_src(album_dc) 160 | hl_md(src, 'python') 161 | ``` 162 | 163 | ``` python 164 | @dataclass 165 | class Album: 166 | AlbumId: int | None = None 167 | Title: str | None = None 168 | ArtistId: int | None = None 169 | ``` 170 | 171 | Because `dataclass()` is dynamic, you won’t get auto-complete in editors 172 | like vscode – it’ll only work in dynamic environments like Jupyter and 173 | IPython. For editor support, you can export the full set of dataclasses 174 | to a module, which you can then import from: 175 | 176 | ``` python 177 | create_mod(db, 'db_dc') 178 | ``` 179 | 180 | ``` python 181 | from db_dc import Track 182 | Track() 183 | ``` 184 | 185 | Track(TrackId=None, Name=None, AlbumId=None, MediaTypeId=None, GenreId=None, Composer=None, Milliseconds=None, Bytes=None, UnitPrice=None) 186 | 187 | Indexing into a table does a query on primary key: 188 | 189 | ``` python 190 | dt.Track[1] 191 | ``` 192 | 193 | Track(TrackId=1, Name='For Those About To Rock (We Salute You)', AlbumId=1, MediaTypeId=1, GenreId=1, Composer='Angus Young, Malcolm Young, Brian Johnson', Milliseconds=343719, Bytes=11170334, UnitPrice=0.99) 194 | 195 | There’s a shortcut to select from a table – just call it as a function. 196 | If you’ve previously called `dataclass()`, returned iterms will be 197 | constructed using that class by default. There’s lots of params you can 198 | check out, such as `limit`: 199 | 200 | ``` python 201 | album(limit=2) 202 | ``` 203 | 204 | [Album(AlbumId=1, Title='For Those About To Rock We Salute You', ArtistId=1), 205 | Album(AlbumId=2, Title='Balls to the Wall', ArtistId=2)] 206 | 207 | Pass a truthy value as `with_pk` and you’ll get tuples of primary keys 208 | and records: 209 | 210 | ``` python 211 | album(with_pk=1, limit=2) 212 | ``` 213 | 214 | [(1, 215 | Album(AlbumId=1, Title='For Those About To Rock We Salute You', ArtistId=1)), 216 | (2, Album(AlbumId=2, Title='Balls to the Wall', ArtistId=2))] 217 | 218 | Indexing also uses the dataclass by default: 219 | 220 | ``` python 221 | album[5] 222 | ``` 223 | 224 | Album(AlbumId=5, Title='Big Ones', ArtistId=3) 225 | 226 | If you set `xtra` fields, then indexing is also filtered by those. As a 227 | result, for instance in this case, nothing is returned since album 5 is 228 | not created by artist 1: 229 | 230 | ``` python 231 | album.xtra(ArtistId=1) 232 | 233 | try: album[5] 234 | except NotFoundError: print("Not found") 235 | ``` 236 | 237 | Not found 238 | 239 | The same filtering is done when using the table as a callable: 240 | 241 | ``` python 242 | album() 243 | ``` 244 | 245 | [Album(AlbumId=1, Title='For Those About To Rock We Salute You', ArtistId=1), 246 | Album(AlbumId=4, Title='Let There Be Rock', ArtistId=1)] 247 | 248 | ## Core design 249 | 250 | The following methods accept `**kwargs`, passing them along to the first 251 | `dict` param: 252 | 253 | - `create` 254 | - `transform` 255 | - `transform_sql` 256 | - `update` 257 | - `insert` 258 | - `upsert` 259 | - `lookup` 260 | 261 | We can access a table that doesn’t actually exist yet: 262 | 263 | ``` python 264 | cats = dt.cats 265 | cats 266 | ``` 267 | 268 |
269 | 270 | We can use keyword arguments to now create that table: 271 | 272 | ``` python 273 | cats.create(id=int, name=str, weight=float, uid=int, pk='id') 274 | hl_md(cats.schema, 'sql') 275 | ``` 276 | 277 | ``` sql 278 | CREATE TABLE [cats] ( 279 | [id] INTEGER PRIMARY KEY, 280 | [name] TEXT, 281 | [weight] FLOAT, 282 | [uid] INTEGER 283 | ) 284 | ``` 285 | 286 | It we set `xtra` then the additional fields are used for `insert`, 287 | `update`, and `delete`: 288 | 289 | ``` python 290 | cats.xtra(uid=2) 291 | cat = cats.insert(name='meow', weight=6) 292 | ``` 293 | 294 | The inserted row is returned, including the xtra ‘uid’ field. 295 | 296 | ``` python 297 | cat 298 | ``` 299 | 300 | {'id': 1, 'name': 'meow', 'weight': 6.0, 'uid': 2} 301 | 302 | Using `**` in `update` here doesn’t actually achieve anything, since we 303 | can just pass a `dict` directly – it’s just to show that it works: 304 | 305 | ``` python 306 | cat['name'] = "moo" 307 | cat['uid'] = 1 308 | cats.update(**cat) 309 | cats() 310 | ``` 311 | 312 | [{'id': 1, 'name': 'moo', 'weight': 6.0, 'uid': 2}] 313 | 314 | Attempts to update or insert with xtra fields are ignored. 315 | 316 | An error is raised if there’s an attempt to update a record not matching 317 | `xtra` fields: 318 | 319 | ``` python 320 | cats.xtra(uid=1) 321 | try: cats.update(**cat) 322 | except NotFoundError: print("Not found") 323 | ``` 324 | 325 | Not found 326 | 327 | This all also works with dataclasses: 328 | 329 | ``` python 330 | cats.xtra(uid=2) 331 | cats.dataclass() 332 | cat = cats[1] 333 | cat 334 | ``` 335 | 336 | Cats(id=1, name='moo', weight=6.0, uid=2) 337 | 338 | ``` python 339 | cats.drop() 340 | cats 341 | ``` 342 | 343 |
344 | 345 | Alternatively, you can create a table from a class. If it’s not already 346 | a dataclass, it will be converted into one. In either case, the 347 | dataclass will be created (or modified) so that `None` can be passed to 348 | any field (this is needed to support fields such as automatic row ids). 349 | 350 | ``` python 351 | class Cat: id:int; name:str; weight:float; uid:int 352 | ``` 353 | 354 | ``` python 355 | cats = db.create(Cat) 356 | ``` 357 | 358 | ``` python 359 | hl_md(cats.schema, 'sql') 360 | ``` 361 | 362 | ``` sql 363 | CREATE TABLE [cat] ( 364 | [id] INTEGER PRIMARY KEY, 365 | [name] TEXT, 366 | [weight] FLOAT, 367 | [uid] INTEGER 368 | ) 369 | ``` 370 | 371 | ``` python 372 | cat = Cat(name='咪咪', weight=9) 373 | cats.insert(cat) 374 | ``` 375 | 376 | Cat(id=1, name='咪咪', weight=9.0, uid=None) 377 | 378 | ``` python 379 | cats.drop() 380 | ``` 381 | 382 | ## Manipulating data 383 | 384 | We try to make the following methods as flexible as possible. Wherever 385 | possible, they support Python dictionaries, dataclasses, and classes. 386 | 387 | ### .insert() 388 | 389 | Creates a record. Returns an instance of the updated record. 390 | 391 | Insert using a dictionary. 392 | 393 | ``` python 394 | cats.insert({'name': 'Rex', 'weight': 12.2}) 395 | ``` 396 | 397 | Cat(id=1, name='Rex', weight=12.2, uid=UNSET) 398 | 399 | Insert using a dataclass. 400 | 401 | ``` python 402 | CatDC = cats.dataclass() 403 | cats.insert(CatDC(name='Tom', weight=10.2)) 404 | ``` 405 | 406 | Cat(id=2, name='Tom', weight=10.2) 407 | 408 | Insert using a standard Python class 409 | 410 | ``` python 411 | cat = cats.insert(Cat(name='Jerry', weight=5.2)) 412 | ``` 413 | 414 | ### .update() 415 | 416 | Updates a record using a Python dict, dataclass, or object, and returns 417 | an instance of the updated record. 418 | 419 | Updating from a Python dict: 420 | 421 | ``` python 422 | cats.update(dict(id=cat.id, name='Jerry', weight=6.2)) 423 | ``` 424 | 425 | Cat(id=3, name='Jerry', weight=6.2) 426 | 427 | Updating from a dataclass: 428 | 429 | ``` python 430 | cats.update(CatDC(id=cat.id, name='Jerry', weight=6.3)) 431 | ``` 432 | 433 | Cat(id=3, name='Jerry', weight=6.3) 434 | 435 | Updating using a class: 436 | 437 | ``` python 438 | cats.update(Cat(id=cat.id, name='Jerry', weight=5.7)) 439 | ``` 440 | 441 | Cat(id=3, name='Jerry', weight=5.7) 442 | 443 | ### .delete() 444 | 445 | Removing data is done by providing the primary key value of the record. 446 | 447 | ``` python 448 | # Farewell Jerry! 449 | cats.delete(cat.id) 450 | ``` 451 | 452 |
453 | 454 | ### Multi-field primary keys 455 | 456 | Pass a collection of strings to create a multi-field pk: 457 | 458 | ``` python 459 | class PetFood: catid:int; food:str; qty:int 460 | petfoods = db.create(PetFood, pk=['catid','food']) 461 | print(petfoods.schema) 462 | ``` 463 | 464 | CREATE TABLE [pet_food] ( 465 | [catid] INTEGER, 466 | [food] TEXT, 467 | [qty] INTEGER, 468 | PRIMARY KEY ([catid], [food]) 469 | ) 470 | 471 | You can index into these using multiple values: 472 | 473 | ``` python 474 | pf = petfoods.insert(PetFood(1, 'tuna', 2)) 475 | petfoods[1,'tuna'] 476 | ``` 477 | 478 | PetFood(catid=1, food='tuna', qty=2) 479 | 480 | Updates work in the usual way: 481 | 482 | ``` python 483 | pf.qty=3 484 | petfoods.update(pf) 485 | ``` 486 | 487 | PetFood(catid=1, food='tuna', qty=3) 488 | 489 | You can also use `upsert` to update if the key exists, or insert 490 | otherwise: 491 | 492 | ``` python 493 | pf.qty=1 494 | petfoods.upsert(pf) 495 | petfoods() 496 | ``` 497 | 498 | [PetFood(catid=1, food='tuna', qty=1)] 499 | 500 | ``` python 501 | pf.food='salmon' 502 | petfoods.upsert(pf) 503 | petfoods() 504 | ``` 505 | 506 | [PetFood(catid=1, food='tuna', qty=1), PetFood(catid=1, food='salmon', qty=1)] 507 | 508 | `delete` takes a tuple of keys: 509 | 510 | ``` python 511 | petfoods.delete((1, 'tuna')) 512 | petfoods() 513 | ``` 514 | 515 | [PetFood(catid=1, food='salmon', qty=1)] 516 | 517 | ## Diagrams 518 | 519 | If you have [graphviz](https://pypi.org/project/graphviz/) installed, 520 | you can create database diagrams. Pass a subset of tables to just 521 | diagram those. You can also adjust the size and aspect ratio. 522 | 523 | ``` python 524 | diagram(db.t['Artist','Album','Track','Genre','MediaType'], size=8, ratio=0.4) 525 | ``` 526 | 527 | ![](index_files/figure-commonmark/cell-50-output-1.svg) 528 | 529 | ### Importing CSV/TSV/etc 530 | 531 | ------------------------------------------------------------------------ 532 | 533 | source 536 | 537 | ### Database.import_file 538 | 539 | > Database.import_file (table_name, file, format=None, pk=None, 540 | > alter=False) 541 | 542 | *Import path or handle `file` to new table `table_name`* 543 | 544 | You can pass a file name, string, bytes, or open file handle to 545 | `import_file` to import a CSV: 546 | 547 | ``` python 548 | db = Database(":memory:") 549 | csv_data = """id,name,age 550 | 1,Alice,30 551 | 2,Bob,25 552 | 3,Charlie,35""" 553 | 554 | table = db.import_file("people", csv_data) 555 | table() 556 | ``` 557 | 558 | [{'id': 1, 'name': 'Alice', 'age': 30}, 559 | {'id': 2, 'name': 'Bob', 'age': 25}, 560 | {'id': 3, 'name': 'Charlie', 'age': 35}] 561 | -------------------------------------------------------------------------------- /nbs/test_insert.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "id": "fd325418", 6 | "metadata": {}, 7 | "source": [ 8 | "# Test Insert Operations" 9 | ] 10 | }, 11 | { 12 | "cell_type": "markdown", 13 | "id": "417f2c4e", 14 | "metadata": {}, 15 | "source": [ 16 | "## Setup" 17 | ] 18 | }, 19 | { 20 | "cell_type": "code", 21 | "execution_count": null, 22 | "id": "ad470f25", 23 | "metadata": {}, 24 | "outputs": [], 25 | "source": [ 26 | "from fastlite import *" 27 | ] 28 | }, 29 | { 30 | "cell_type": "markdown", 31 | "id": "e4788661", 32 | "metadata": {}, 33 | "source": [ 34 | "Note: Make sure to use fastlite's `database()` here" 35 | ] 36 | }, 37 | { 38 | "cell_type": "code", 39 | "execution_count": null, 40 | "id": "97dd1b48", 41 | "metadata": {}, 42 | "outputs": [], 43 | "source": [ 44 | "db = database(':memory:')" 45 | ] 46 | }, 47 | { 48 | "cell_type": "code", 49 | "execution_count": null, 50 | "id": "5102a3ac", 51 | "metadata": {}, 52 | "outputs": [], 53 | "source": [ 54 | "class People: id: int; name: str" 55 | ] 56 | }, 57 | { 58 | "cell_type": "code", 59 | "execution_count": null, 60 | "id": "9188c149", 61 | "metadata": {}, 62 | "outputs": [], 63 | "source": [ 64 | "people = db.create(People, pk='id')" 65 | ] 66 | }, 67 | { 68 | "cell_type": "markdown", 69 | "id": "6c99cbae", 70 | "metadata": {}, 71 | "source": [ 72 | "## Test Single Inserts" 73 | ] 74 | }, 75 | { 76 | "cell_type": "markdown", 77 | "id": "dbc67ac6", 78 | "metadata": {}, 79 | "source": [ 80 | "Here we test `insert()`" 81 | ] 82 | }, 83 | { 84 | "cell_type": "markdown", 85 | "id": "a0673d88", 86 | "metadata": {}, 87 | "source": [ 88 | "### Test Cases for `insert()` Where Nothing Is Inserted" 89 | ] 90 | }, 91 | { 92 | "cell_type": "markdown", 93 | "id": "eb45e038", 94 | "metadata": {}, 95 | "source": [ 96 | "Test that calling `insert()` without any parameters doesn't change anything, and returns nothing" 97 | ] 98 | }, 99 | { 100 | "cell_type": "code", 101 | "execution_count": null, 102 | "id": "fba0c4f7", 103 | "metadata": {}, 104 | "outputs": [ 105 | { 106 | "data": { 107 | "text/plain": [ 108 | "{}" 109 | ] 110 | }, 111 | "execution_count": null, 112 | "metadata": {}, 113 | "output_type": "execute_result" 114 | } 115 | ], 116 | "source": [ 117 | "people.insert()" 118 | ] 119 | }, 120 | { 121 | "cell_type": "markdown", 122 | "id": "0355fe0a", 123 | "metadata": {}, 124 | "source": [ 125 | "Test None doesn't change anything." 126 | ] 127 | }, 128 | { 129 | "cell_type": "code", 130 | "execution_count": null, 131 | "id": "ace59c88", 132 | "metadata": {}, 133 | "outputs": [], 134 | "source": [ 135 | "count = people.count\n", 136 | "assert people.insert(None) == {}\n", 137 | "assert people.count == count" 138 | ] 139 | }, 140 | { 141 | "cell_type": "markdown", 142 | "id": "2ab1795b", 143 | "metadata": {}, 144 | "source": [ 145 | "Test empty dict doesn't change anything " 146 | ] 147 | }, 148 | { 149 | "cell_type": "code", 150 | "execution_count": null, 151 | "id": "a93ec70a", 152 | "metadata": {}, 153 | "outputs": [], 154 | "source": [ 155 | "count = people.count\n", 156 | "assert people.insert({}) == {}\n", 157 | "assert people.count == count" 158 | ] 159 | }, 160 | { 161 | "cell_type": "code", 162 | "execution_count": null, 163 | "id": "79cd5186", 164 | "metadata": {}, 165 | "outputs": [], 166 | "source": [ 167 | "# Test empty dataclass doesn't change anything\n", 168 | "PersonDC = people.dataclass()\n", 169 | "count = people.count\n", 170 | "assert people.insert(PersonDC()) == {}\n", 171 | "assert people.count == count" 172 | ] 173 | }, 174 | { 175 | "cell_type": "code", 176 | "execution_count": null, 177 | "id": "aa988175", 178 | "metadata": {}, 179 | "outputs": [], 180 | "source": [ 181 | "# Test empty class instance doesn't change anything\n", 182 | "class EmptyPerson: pass\n", 183 | "count = people.count\n", 184 | "assert people.insert(EmptyPerson()) == {}\n", 185 | "assert people.count == count" 186 | ] 187 | }, 188 | { 189 | "cell_type": "markdown", 190 | "id": "811bc666", 191 | "metadata": {}, 192 | "source": [ 193 | "### Single Insert Types" 194 | ] 195 | }, 196 | { 197 | "cell_type": "markdown", 198 | "id": "157baebb", 199 | "metadata": {}, 200 | "source": [ 201 | "Test insert with keyword argument. Result should be the inserted item." 202 | ] 203 | }, 204 | { 205 | "cell_type": "code", 206 | "execution_count": null, 207 | "id": "1fdd0aaf", 208 | "metadata": {}, 209 | "outputs": [], 210 | "source": [ 211 | "assert people.insert(name='Alice').name == 'Alice'" 212 | ] 213 | }, 214 | { 215 | "cell_type": "markdown", 216 | "id": "447e13c9", 217 | "metadata": {}, 218 | "source": [ 219 | "Test insert with dataclass" 220 | ] 221 | }, 222 | { 223 | "cell_type": "code", 224 | "execution_count": null, 225 | "id": "c736aa0f", 226 | "metadata": {}, 227 | "outputs": [], 228 | "source": [ 229 | "assert people.insert(People(name='Bobba')).name == 'Bobba'" 230 | ] 231 | }, 232 | { 233 | "cell_type": "markdown", 234 | "id": "0b4eb6df", 235 | "metadata": {}, 236 | "source": [ 237 | "Test with regular class" 238 | ] 239 | }, 240 | { 241 | "cell_type": "code", 242 | "execution_count": null, 243 | "id": "cfd90ab0", 244 | "metadata": {}, 245 | "outputs": [], 246 | "source": [ 247 | "class Student: pass\n", 248 | "student = Student()\n", 249 | "student.name = 'Charlo'\n", 250 | "\n", 251 | "assert people.insert(student).name == 'Charlo'" 252 | ] 253 | }, 254 | { 255 | "cell_type": "markdown", 256 | "id": "38ff8b74", 257 | "metadata": {}, 258 | "source": [ 259 | "Verify count is 3" 260 | ] 261 | }, 262 | { 263 | "cell_type": "code", 264 | "execution_count": null, 265 | "id": "72a25f8d", 266 | "metadata": {}, 267 | "outputs": [], 268 | "source": [ 269 | "assert people.count == 3" 270 | ] 271 | }, 272 | { 273 | "cell_type": "markdown", 274 | "id": "26a9c38a", 275 | "metadata": {}, 276 | "source": [ 277 | "### None and Empty String Handling" 278 | ] 279 | }, 280 | { 281 | "cell_type": "markdown", 282 | "id": "9abadc7e", 283 | "metadata": {}, 284 | "source": [ 285 | "SQLite makes a clear distinction between NULL (represented as None in Python) and an empty string (''). Unlike some popular Python ORMs, fastlite preserves this distinction because:\n", 286 | "\n", 287 | "1. NULL represents \"unknown\" or \"missing\" data\n", 288 | "2. Empty string represents \"known to be empty\"\n", 289 | "\n", 290 | "These are semantically different concepts, and maintaining this distinction allows users to make appropriate queries (e.g. `WHERE name IS NULL` vs `WHERE name = ''`). The fact that fastlite preserves this distinction in both directions (Python->SQLite and SQLite->Python) is good database design." 291 | ] 292 | }, 293 | { 294 | "cell_type": "markdown", 295 | "id": "37ad998d", 296 | "metadata": {}, 297 | "source": [ 298 | "Test inserting a record with name set to None" 299 | ] 300 | }, 301 | { 302 | "cell_type": "code", 303 | "execution_count": null, 304 | "id": "5a968d13", 305 | "metadata": {}, 306 | "outputs": [], 307 | "source": [ 308 | "result = people.insert(name=None)\n", 309 | "assert result.name is None" 310 | ] 311 | }, 312 | { 313 | "cell_type": "markdown", 314 | "id": "dd0c180d", 315 | "metadata": {}, 316 | "source": [ 317 | "Test with empty string" 318 | ] 319 | }, 320 | { 321 | "cell_type": "code", 322 | "execution_count": null, 323 | "id": "92d53608", 324 | "metadata": {}, 325 | "outputs": [], 326 | "source": [ 327 | "result = people.insert(name='')\n", 328 | "assert result.name == ''" 329 | ] 330 | }, 331 | { 332 | "cell_type": "code", 333 | "execution_count": null, 334 | "id": "51cb29b1", 335 | "metadata": {}, 336 | "outputs": [], 337 | "source": [ 338 | "assert people.get(pk_values=4).name == None" 339 | ] 340 | }, 341 | { 342 | "cell_type": "markdown", 343 | "id": "46d8230c", 344 | "metadata": {}, 345 | "source": [ 346 | "Remember, `get()` is for getting single items. The following would not work here. `pk_values` can be a list only for tables with compound primary keys.\n", 347 | "\n", 348 | "```python\n", 349 | "# people.get(pk_values=[4,5])\n", 350 | "```" 351 | ] 352 | }, 353 | { 354 | "cell_type": "markdown", 355 | "id": "d855c6a8", 356 | "metadata": {}, 357 | "source": [ 358 | "### Other Cases" 359 | ] 360 | }, 361 | { 362 | "cell_type": "markdown", 363 | "id": "1ee61d32", 364 | "metadata": {}, 365 | "source": [ 366 | "Test with special characters" 367 | ] 368 | }, 369 | { 370 | "cell_type": "code", 371 | "execution_count": null, 372 | "id": "972bab86", 373 | "metadata": {}, 374 | "outputs": [], 375 | "source": [ 376 | "assert people.insert(name='O\\'Connor').name == \"O'Connor\"\n", 377 | "assert people.insert(name='José').name == 'José'" 378 | ] 379 | }, 380 | { 381 | "cell_type": "markdown", 382 | "id": "f3261fa3", 383 | "metadata": {}, 384 | "source": [ 385 | "Test id auto-increment" 386 | ] 387 | }, 388 | { 389 | "cell_type": "code", 390 | "execution_count": null, 391 | "id": "55364dd6", 392 | "metadata": {}, 393 | "outputs": [], 394 | "source": [ 395 | "p1 = people.insert(name='Test1')\n", 396 | "p2 = people.insert(name='Test2') \n", 397 | "assert p2.id == p1.id + 1" 398 | ] 399 | }, 400 | { 401 | "cell_type": "markdown", 402 | "id": "f27e986a", 403 | "metadata": {}, 404 | "source": [ 405 | "Test dict insert" 406 | ] 407 | }, 408 | { 409 | "cell_type": "code", 410 | "execution_count": null, 411 | "id": "45a4c2aa", 412 | "metadata": {}, 413 | "outputs": [], 414 | "source": [ 415 | "assert people.insert({'name': 'Dict Test'}).name == 'Dict Test'" 416 | ] 417 | }, 418 | { 419 | "cell_type": "markdown", 420 | "id": "f1209e4b", 421 | "metadata": {}, 422 | "source": [ 423 | "Test that extra fields raise `apsw.SqlError`" 424 | ] 425 | }, 426 | { 427 | "cell_type": "code", 428 | "execution_count": null, 429 | "id": "07c034e9", 430 | "metadata": {}, 431 | "outputs": [], 432 | "source": [ 433 | "from sqlite3 import OperationalError" 434 | ] 435 | }, 436 | { 437 | "cell_type": "code", 438 | "execution_count": null, 439 | "id": "963008b6", 440 | "metadata": {}, 441 | "outputs": [], 442 | "source": [ 443 | "try:\n", 444 | " p = people.insert(name='Extra', age=25, title='Dr')\n", 445 | "except SQLError as e:\n", 446 | " assert e.args[0] == 'table people has no column named age'" 447 | ] 448 | }, 449 | { 450 | "cell_type": "markdown", 451 | "id": "7d7d252b", 452 | "metadata": {}, 453 | "source": [ 454 | "## Test Multiple Inserts" 455 | ] 456 | }, 457 | { 458 | "cell_type": "markdown", 459 | "id": "04de34fb", 460 | "metadata": {}, 461 | "source": [ 462 | "Here we test `insert_all()`" 463 | ] 464 | }, 465 | { 466 | "cell_type": "markdown", 467 | "id": "46bc9961", 468 | "metadata": {}, 469 | "source": [ 470 | "### Test cases for `insert_all()` where nothing is changed" 471 | ] 472 | }, 473 | { 474 | "cell_type": "markdown", 475 | "id": "eeb1fdda", 476 | "metadata": {}, 477 | "source": [ 478 | "Test empty list doesn't change anything" 479 | ] 480 | }, 481 | { 482 | "cell_type": "code", 483 | "execution_count": null, 484 | "id": "c8a95079", 485 | "metadata": {}, 486 | "outputs": [], 487 | "source": [ 488 | "count = people.count\n", 489 | "people.insert_all([])\n", 490 | "assert people.count == count" 491 | ] 492 | }, 493 | { 494 | "cell_type": "markdown", 495 | "id": "f46e99a2", 496 | "metadata": {}, 497 | "source": [ 498 | "Test other empty iterables don't change anything" 499 | ] 500 | }, 501 | { 502 | "cell_type": "code", 503 | "execution_count": null, 504 | "id": "cee37620", 505 | "metadata": {}, 506 | "outputs": [], 507 | "source": [ 508 | "count = people.count\n", 509 | "people.insert_all(iter([])) # empty iterator\n", 510 | "people.insert_all(set()) # empty set\n", 511 | "people.insert_all(tuple()) # empty tuple\n", 512 | "assert people.count == count" 513 | ] 514 | }, 515 | { 516 | "cell_type": "markdown", 517 | "id": "3dcde075", 518 | "metadata": {}, 519 | "source": [ 520 | "Test that lists of `None` don't change anything." 521 | ] 522 | }, 523 | { 524 | "cell_type": "code", 525 | "execution_count": null, 526 | "id": "98118662", 527 | "metadata": {}, 528 | "outputs": [], 529 | "source": [ 530 | "count = people.count\n", 531 | "assert people.insert_all([None, None]) == people\n", 532 | "assert people.result == []\n", 533 | "assert people.count == count" 534 | ] 535 | }, 536 | { 537 | "cell_type": "markdown", 538 | "id": "43a78fc8", 539 | "metadata": {}, 540 | "source": [ 541 | "### Test cases for `insert_all()` where records are inserted" 542 | ] 543 | }, 544 | { 545 | "cell_type": "markdown", 546 | "id": "8677a8a4", 547 | "metadata": {}, 548 | "source": [ 549 | "Test that a list containing both None and a valid records only inserts the valid record." 550 | ] 551 | }, 552 | { 553 | "cell_type": "code", 554 | "execution_count": null, 555 | "id": "96632dfb", 556 | "metadata": {}, 557 | "outputs": [], 558 | "source": [ 559 | "count = people.count\n", 560 | "people.insert_all([None, None, None, None, None, dict(name='Dermot')])\n", 561 | "assert people.count == count + 1" 562 | ] 563 | }, 564 | { 565 | "cell_type": "markdown", 566 | "id": "7d7ea003", 567 | "metadata": {}, 568 | "source": [ 569 | "Test list of dicts" 570 | ] 571 | }, 572 | { 573 | "cell_type": "code", 574 | "execution_count": null, 575 | "id": "b110b0a7", 576 | "metadata": {}, 577 | "outputs": [], 578 | "source": [ 579 | "count = people.count\n", 580 | "data = [{'name': 'Bulk1'}, {'name': 'Bulk2'}, {'name': 'Bulk3'}]\n", 581 | "people.insert_all(data)\n", 582 | "assert people.count == len(data) + count" 583 | ] 584 | }, 585 | { 586 | "cell_type": "markdown", 587 | "id": "d1255a3b", 588 | "metadata": {}, 589 | "source": [ 590 | "Test `insert_all` with a list of dataclass instances to insert" 591 | ] 592 | }, 593 | { 594 | "cell_type": "code", 595 | "execution_count": null, 596 | "id": "803e6bc9", 597 | "metadata": {}, 598 | "outputs": [], 599 | "source": [ 600 | "count = people.count\n", 601 | "Person = people.dataclass()\n", 602 | "data = [Person(name=f'DC{i}') for i in range(3)]\n", 603 | "people.insert_all(data)\n", 604 | "assert people.count == count + 3" 605 | ] 606 | }, 607 | { 608 | "cell_type": "markdown", 609 | "id": "8be30bff", 610 | "metadata": {}, 611 | "source": [ 612 | "Test list of regular class instances" 613 | ] 614 | }, 615 | { 616 | "cell_type": "code", 617 | "execution_count": null, 618 | "id": "570d5dce", 619 | "metadata": {}, 620 | "outputs": [], 621 | "source": [ 622 | "count = people.count\n", 623 | "class Student:\n", 624 | " def __init__(self, name): self.name = name\n", 625 | "students = [Student(f'Student{i}') for i in range(3)]\n", 626 | "people.insert_all(students)\n", 627 | "assert people.count == count + 3" 628 | ] 629 | }, 630 | { 631 | "cell_type": "markdown", 632 | "id": "bd68bde0", 633 | "metadata": {}, 634 | "source": [ 635 | "### Edge Cases" 636 | ] 637 | }, 638 | { 639 | "cell_type": "markdown", 640 | "id": "e9ff33c6", 641 | "metadata": {}, 642 | "source": [ 643 | "Test mixed types in list" 644 | ] 645 | }, 646 | { 647 | "cell_type": "code", 648 | "execution_count": null, 649 | "id": "ca76eb12", 650 | "metadata": {}, 651 | "outputs": [], 652 | "source": [ 653 | "count = people.count\n", 654 | "Person = people.dataclass()\n", 655 | "mixed_data = [\n", 656 | " {'name': 'Dict1'},\n", 657 | " Person(name='DC1'),\n", 658 | " Student('Student1')\n", 659 | "]\n", 660 | "people.insert_all(mixed_data)\n", 661 | "assert people.count == count + 3" 662 | ] 663 | }, 664 | { 665 | "cell_type": "markdown", 666 | "id": "76f0e0b4", 667 | "metadata": {}, 668 | "source": [ 669 | "Test None/empty strings in bulk insert" 670 | ] 671 | }, 672 | { 673 | "cell_type": "code", 674 | "execution_count": null, 675 | "id": "5a37e482", 676 | "metadata": {}, 677 | "outputs": [], 678 | "source": [ 679 | "count = people.count\n", 680 | "null_data = [\n", 681 | " {'name': None},\n", 682 | " {'name': ''},\n", 683 | " {'name': 'Regular'}\n", 684 | "]\n", 685 | "people.insert_all(null_data)\n", 686 | "assert people.count == count + 3" 687 | ] 688 | }, 689 | { 690 | "cell_type": "markdown", 691 | "id": "c92ada52", 692 | "metadata": {}, 693 | "source": [ 694 | "Test with special characters in bulk" 695 | ] 696 | }, 697 | { 698 | "cell_type": "code", 699 | "execution_count": null, 700 | "id": "da81a215", 701 | "metadata": {}, 702 | "outputs": [], 703 | "source": [ 704 | "count = people.count\n", 705 | "special_data = [\n", 706 | " {'name': \"O'Brien\"},\n", 707 | " {'name': 'José'},\n", 708 | " {'name': '张伟'}\n", 709 | "]\n", 710 | "res = people.insert_all(special_data)\n", 711 | "assert people.count == count + 3" 712 | ] 713 | }, 714 | { 715 | "cell_type": "markdown", 716 | "id": "63b76213", 717 | "metadata": {}, 718 | "source": [ 719 | "Test error on invalid column" 720 | ] 721 | }, 722 | { 723 | "cell_type": "code", 724 | "execution_count": null, 725 | "id": "9d7d7991", 726 | "metadata": {}, 727 | "outputs": [], 728 | "source": [ 729 | "try:\n", 730 | " people.insert_all([{'name': 'Valid'}, {'invalid_col': 'Bad'}])\n", 731 | "except SQLError as e:\n", 732 | " assert 'no column named invalid_col' in str(e)" 733 | ] 734 | } 735 | ], 736 | "metadata": { 737 | "kernelspec": { 738 | "display_name": "python3", 739 | "language": "python", 740 | "name": "python3" 741 | } 742 | }, 743 | "nbformat": 4, 744 | "nbformat_minor": 5 745 | } 746 | -------------------------------------------------------------------------------- /index_files/figure-commonmark/cell-27-output-1.svg: -------------------------------------------------------------------------------- 1 | 2 | 4 | 6 | 7 | 9 | 10 | G 11 | 12 | 13 | 14 | Album 15 | 16 | 17 | Album 18 | 19 | 20 | AlbumId 🔑 21 | 22 | Title 23 | 24 | ArtistId 25 | 26 | 27 | 28 | 29 | Artist 30 | 31 | 32 | Artist 33 | 34 | 35 | ArtistId 🔑 36 | 37 | Name 38 | 39 | 40 | 41 | 42 | Album:ArtistId->Artist:ArtistId 43 | 44 | 45 | 46 | 47 | 48 | Customer 49 | 50 | 51 | Customer 52 | 53 | 54 | CustomerId 🔑 55 | 56 | FirstName 57 | 58 | LastName 59 | 60 | Company 61 | 62 | Address 63 | 64 | City 65 | 66 | State 67 | 68 | Country 69 | 70 | PostalCode 71 | 72 | Phone 73 | 74 | Fax 75 | 76 | Email 77 | 78 | SupportRepId 79 | 80 | 81 | 82 | 83 | Employee 84 | 85 | 86 | Employee 87 | 88 | 89 | EmployeeId 🔑 90 | 91 | LastName 92 | 93 | FirstName 94 | 95 | Title 96 | 97 | ReportsTo 98 | 99 | BirthDate 100 | 101 | HireDate 102 | 103 | Address 104 | 105 | City 106 | 107 | State 108 | 109 | Country 110 | 111 | PostalCode 112 | 113 | Phone 114 | 115 | Fax 116 | 117 | Email 118 | 119 | 120 | 121 | 122 | Customer:SupportRepId->Employee:EmployeeId 123 | 124 | 125 | 126 | 127 | 128 | Employee:ReportsTo->Employee:EmployeeId 129 | 130 | 131 | 132 | 133 | 134 | Genre 135 | 136 | 137 | Genre 138 | 139 | 140 | GenreId 🔑 141 | 142 | Name 143 | 144 | 145 | 146 | 147 | Invoice 148 | 149 | 150 | Invoice 151 | 152 | 153 | InvoiceId 🔑 154 | 155 | CustomerId 156 | 157 | InvoiceDate 158 | 159 | BillingAddress 160 | 161 | BillingCity 162 | 163 | BillingState 164 | 165 | BillingCountry 166 | 167 | BillingPostalCode 168 | 169 | Total 170 | 171 | 172 | 173 | 174 | Invoice:CustomerId->Customer:CustomerId 175 | 176 | 177 | 178 | 179 | 180 | InvoiceLine 181 | 182 | 183 | InvoiceLine 184 | 185 | 186 | InvoiceLineId 🔑 187 | 188 | InvoiceId 189 | 190 | TrackId 191 | 192 | UnitPrice 193 | 194 | Quantity 195 | 196 | 197 | 198 | 199 | InvoiceLine:InvoiceId->Invoice:InvoiceId 200 | 201 | 202 | 203 | 204 | 205 | Track 206 | 207 | 208 | Track 209 | 210 | 211 | TrackId 🔑 212 | 213 | Name 214 | 215 | AlbumId 216 | 217 | MediaTypeId 218 | 219 | GenreId 220 | 221 | Composer 222 | 223 | Milliseconds 224 | 225 | Bytes 226 | 227 | UnitPrice 228 | 229 | 230 | 231 | 232 | InvoiceLine:TrackId->Track:TrackId 233 | 234 | 235 | 236 | 237 | 238 | MediaType 239 | 240 | 241 | MediaType 242 | 243 | 244 | MediaTypeId 🔑 245 | 246 | Name 247 | 248 | 249 | 250 | 251 | Playlist 252 | 253 | 254 | Playlist 255 | 256 | 257 | PlaylistId 🔑 258 | 259 | Name 260 | 261 | 262 | 263 | 264 | PlaylistTrack 265 | 266 | 267 | PlaylistTrack 268 | 269 | 270 | PlaylistId 🔑 271 | 272 | 273 | TrackId 🔑 274 | 275 | 276 | 277 | 278 | PlaylistTrack:PlaylistId->Playlist:PlaylistId 279 | 280 | 281 | 282 | 283 | 284 | PlaylistTrack:TrackId->Track:TrackId 285 | 286 | 287 | 288 | 289 | 290 | Track:AlbumId->Album:AlbumId 291 | 292 | 293 | 294 | 295 | 296 | Track:GenreId->Genre:GenreId 297 | 298 | 299 | 300 | 301 | 302 | Track:MediaTypeId->MediaType:MediaTypeId 303 | 304 | 305 | 306 | 307 | 308 | -------------------------------------------------------------------------------- /index_files/figure-commonmark/cell-31-output-1.svg: -------------------------------------------------------------------------------- 1 | 2 | 4 | 6 | 7 | 9 | 10 | G 11 | 12 | 13 | 14 | Album 15 | 16 | 17 | Album 18 | 19 | 20 | AlbumId 🔑 21 | 22 | Title 23 | 24 | ArtistId 25 | 26 | 27 | 28 | 29 | Artist 30 | 31 | 32 | Artist 33 | 34 | 35 | ArtistId 🔑 36 | 37 | Name 38 | 39 | 40 | 41 | 42 | Album:ArtistId->Artist:ArtistId 43 | 44 | 45 | 46 | 47 | 48 | Customer 49 | 50 | 51 | Customer 52 | 53 | 54 | CustomerId 🔑 55 | 56 | FirstName 57 | 58 | LastName 59 | 60 | Company 61 | 62 | Address 63 | 64 | City 65 | 66 | State 67 | 68 | Country 69 | 70 | PostalCode 71 | 72 | Phone 73 | 74 | Fax 75 | 76 | Email 77 | 78 | SupportRepId 79 | 80 | 81 | 82 | 83 | Employee 84 | 85 | 86 | Employee 87 | 88 | 89 | EmployeeId 🔑 90 | 91 | LastName 92 | 93 | FirstName 94 | 95 | Title 96 | 97 | ReportsTo 98 | 99 | BirthDate 100 | 101 | HireDate 102 | 103 | Address 104 | 105 | City 106 | 107 | State 108 | 109 | Country 110 | 111 | PostalCode 112 | 113 | Phone 114 | 115 | Fax 116 | 117 | Email 118 | 119 | 120 | 121 | 122 | Customer:SupportRepId->Employee:EmployeeId 123 | 124 | 125 | 126 | 127 | 128 | Employee:ReportsTo->Employee:EmployeeId 129 | 130 | 131 | 132 | 133 | 134 | Genre 135 | 136 | 137 | Genre 138 | 139 | 140 | GenreId 🔑 141 | 142 | Name 143 | 144 | 145 | 146 | 147 | Invoice 148 | 149 | 150 | Invoice 151 | 152 | 153 | InvoiceId 🔑 154 | 155 | CustomerId 156 | 157 | InvoiceDate 158 | 159 | BillingAddress 160 | 161 | BillingCity 162 | 163 | BillingState 164 | 165 | BillingCountry 166 | 167 | BillingPostalCode 168 | 169 | Total 170 | 171 | 172 | 173 | 174 | Invoice:CustomerId->Customer:CustomerId 175 | 176 | 177 | 178 | 179 | 180 | InvoiceLine 181 | 182 | 183 | InvoiceLine 184 | 185 | 186 | InvoiceLineId 🔑 187 | 188 | InvoiceId 189 | 190 | TrackId 191 | 192 | UnitPrice 193 | 194 | Quantity 195 | 196 | 197 | 198 | 199 | InvoiceLine:InvoiceId->Invoice:InvoiceId 200 | 201 | 202 | 203 | 204 | 205 | Track 206 | 207 | 208 | Track 209 | 210 | 211 | TrackId 🔑 212 | 213 | Name 214 | 215 | AlbumId 216 | 217 | MediaTypeId 218 | 219 | GenreId 220 | 221 | Composer 222 | 223 | Milliseconds 224 | 225 | Bytes 226 | 227 | UnitPrice 228 | 229 | 230 | 231 | 232 | InvoiceLine:TrackId->Track:TrackId 233 | 234 | 235 | 236 | 237 | 238 | MediaType 239 | 240 | 241 | MediaType 242 | 243 | 244 | MediaTypeId 🔑 245 | 246 | Name 247 | 248 | 249 | 250 | 251 | Playlist 252 | 253 | 254 | Playlist 255 | 256 | 257 | PlaylistId 🔑 258 | 259 | Name 260 | 261 | 262 | 263 | 264 | PlaylistTrack 265 | 266 | 267 | PlaylistTrack 268 | 269 | 270 | PlaylistId 🔑 271 | 272 | 273 | TrackId 🔑 274 | 275 | 276 | 277 | 278 | PlaylistTrack:PlaylistId->Playlist:PlaylistId 279 | 280 | 281 | 282 | 283 | 284 | PlaylistTrack:TrackId->Track:TrackId 285 | 286 | 287 | 288 | 289 | 290 | Track:AlbumId->Album:AlbumId 291 | 292 | 293 | 294 | 295 | 296 | Track:GenreId->Genre:GenreId 297 | 298 | 299 | 300 | 301 | 302 | Track:MediaTypeId->MediaType:MediaTypeId 303 | 304 | 305 | 306 | 307 | 308 | --------------------------------------------------------------------------------