├── .flake8 ├── .github └── workflows │ └── ci-cd.yml ├── .gitignore ├── CHANGELOG.md ├── LICENSE ├── README.md ├── datasette_ml ├── __init__.py └── py.typed ├── demo ├── generate.py ├── metadata.yml ├── samples.sql └── sqml.db ├── poetry.lock ├── pyproject.toml ├── renovate.json └── tests ├── __init__.py ├── conftest.py └── test_plugin.py /.flake8: -------------------------------------------------------------------------------- 1 | [flake8] 2 | max-line-length = 88 3 | extend-ignore = E203,E501 -------------------------------------------------------------------------------- /.github/workflows/ci-cd.yml: -------------------------------------------------------------------------------- 1 | name: CI/CD 2 | 3 | on: 4 | push: 5 | pull_request: 6 | workflow_dispatch: 7 | 8 | jobs: 9 | test: 10 | runs-on: ubuntu-latest 11 | strategy: 12 | matrix: 13 | python-version: ["3.8", "3.9", "3.10", "3.11"] 14 | steps: 15 | - uses: actions/checkout@v4 16 | - name: Set up Python ${{ matrix.python-version }} 17 | uses: actions/setup-python@v5 18 | with: 19 | python-version: ${{ matrix.python-version }} 20 | - name: Cache Python modules 21 | uses: actions/cache@v3 22 | with: 23 | path: ~/.cache/pip 24 | key: ${{ runner.os }}-pip-${{ hashFiles('**/poetry.lock') }} 25 | restore-keys: | 26 | ${{ runner.os }}-pip- 27 | - name: Install dependencies 28 | run: | 29 | python -m pip install --upgrade pip 30 | python -m pip install --upgrade poetry 31 | poetry install 32 | - name: Run linting 33 | run: | 34 | poetry run black --check datasette_ml tests 35 | poetry run flake8 datasette_ml tests 36 | poetry run mypy datasette_ml tests 37 | - name: Run tests 38 | run: | 39 | poetry run pytest -v --cov=datasette_ml --cov=tests --cov-branch --cov-report=term-missing tests 40 | poetry run coverage xml 41 | - name: Publish code coverage to Codecov 42 | uses: codecov/codecov-action@v3 43 | with: 44 | token: ${{ secrets.CODECOV_TOKEN }} 45 | file: ./coverage.xml 46 | 47 | publish-package-test: 48 | runs-on: ubuntu-latest 49 | needs: [test] 50 | if: contains(github.ref, 'main') 51 | steps: 52 | - uses: actions/checkout@v4 53 | - name: Set up Python 54 | uses: actions/setup-python@v5 55 | with: 56 | python-version: "3.11" 57 | - uses: actions/cache@v3 58 | name: Configure pip caching 59 | with: 60 | path: ~/.cache/pip 61 | key: ${{ runner.os }}-publish-pip-${{ hashFiles('**/pyproject.toml') }} 62 | restore-keys: | 63 | ${{ runner.os }}-publish-pip- 64 | - name: Install dependencies 65 | run: | 66 | python -m pip install --upgrade pip 67 | python -m pip install --upgrade poetry 68 | poetry install 69 | - name: Build Python package 70 | run: poetry build 71 | - name: Publish Python package on PyPI 72 | env: 73 | POETRY_REPOSITORIES_TESTPYPI_URL: https://test.pypi.org/legacy/ 74 | POETRY_PYPI_TOKEN_TESTPYPI: ${{ secrets.PYPI_TOKEN_TEST }} 75 | POETRY_HTTP_BASIC_TESTPYPI_USERNAME: __token__ 76 | POETRY_HTTP_BASIC_TESTPYPI_PASSWORD: ${{ secrets.PYPI_TOKEN_TEST }} 77 | run: poetry publish -r testpypi --skip-existing 78 | 79 | publish-package: 80 | runs-on: ubuntu-latest 81 | needs: [test] 82 | if: contains(github.ref, 'tags') 83 | steps: 84 | - uses: actions/checkout@v4 85 | - name: Set up Python 86 | uses: actions/setup-python@v5 87 | with: 88 | python-version: "3.11" 89 | - uses: actions/cache@v3 90 | name: Configure pip caching 91 | with: 92 | path: ~/.cache/pip 93 | key: ${{ runner.os }}-publish-pip-${{ hashFiles('**/pyproject.toml') }} 94 | restore-keys: | 95 | ${{ runner.os }}-publish-pip- 96 | - name: Install dependencies 97 | run: | 98 | python -m pip install --upgrade pip 99 | python -m pip install --upgrade poetry 100 | poetry install 101 | - name: Build Python package 102 | run: poetry build 103 | - name: Publish Python package on PyPI 104 | env: 105 | POETRY_PYPI_TOKEN_PYPI: ${{ secrets.PYPI_TOKEN }} 106 | POETRY_HTTP_BASIC_PYPI_USERNAME: __token__ 107 | POETRY_HTTP_BASIC_PYPI_PASSWORD: ${{ secrets.PYPI_TOKEN }} 108 | run: poetry publish 109 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | env/ 12 | build/ 13 | develop-eggs/ 14 | dist/ 15 | downloads/ 16 | eggs/ 17 | .eggs/ 18 | lib/ 19 | lib64/ 20 | parts/ 21 | sdist/ 22 | var/ 23 | wheels/ 24 | *.egg-info/ 25 | .installed.cfg 26 | *.egg 27 | 28 | # PyInstaller 29 | # Usually these files are written by a python script from a template 30 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 31 | *.manifest 32 | *.spec 33 | 34 | # Installer logs 35 | pip-log.txt 36 | pip-delete-this-directory.txt 37 | 38 | # Unit test / coverage reports 39 | htmlcov/ 40 | .tox/ 41 | .coverage 42 | .coverage.* 43 | .cache 44 | nosetests.xml 45 | coverage.xml 46 | coverage.json 47 | *.cover 48 | .hypothesis/ 49 | test-report.xml 50 | 51 | # Translations 52 | *.mo 53 | *.pot 54 | 55 | # Django stuff: 56 | *.log 57 | local_settings.py 58 | media/ 59 | 60 | # Flask stuff: 61 | instance/ 62 | .webassets-cache 63 | 64 | # Scrapy stuff: 65 | .scrapy 66 | dbs/ 67 | logs/ 68 | 69 | # Sphinx documentation 70 | docs/_build/ 71 | 72 | # PyBuilder 73 | target/ 74 | 75 | # Jupyter Notebook 76 | .ipynb_checkpoints 77 | 78 | # pyenv 79 | .python-version 80 | 81 | # celery beat schedule file 82 | celerybeat-schedule 83 | 84 | # SageMath parsed files 85 | *.sage.py 86 | 87 | # dotenv 88 | .env 89 | 90 | # virtualenv 91 | .venv 92 | venv/ 93 | ENV/ 94 | 95 | # Spyder project settings 96 | .spyderproject 97 | .spyproject 98 | 99 | # Rope project settings 100 | .ropeproject 101 | 102 | # mkdocs documentation 103 | /site 104 | 105 | # mypy 106 | .mypy_cache/ 107 | 108 | # pytest 109 | .pytest_cache/ 110 | 111 | # Sphinx documentation 112 | docs/_build/ 113 | 114 | # OS generated files # 115 | .DS_Store 116 | .DS_Store? 117 | ._* 118 | .Spotlight-V100 119 | .Trashes 120 | ehthumbs.db 121 | Thumbs.db 122 | .sqlite3 123 | 124 | # IDEs and editors 125 | .idea/ 126 | .vscode/ 127 | *.swp 128 | .now 129 | 130 | # Docker 131 | .docker/ 132 | .vercel 133 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Changelog 2 | All notable changes to this project will be documented in this file. 3 | 4 | The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), 5 | and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). 6 | 7 | ## [Unreleased] 8 | 9 | ## [0.1.2] - 2023-05-26 10 | ### Fixed 11 | - RMSE metric for regression tasks with `sqlite-ml` 0.1.2 12 | 13 | ### Internal 14 | - Add Renovate configuration 15 | - Remove `importlib-metadata` dev dependency 16 | 17 | ## [0.1.1] - 2023-04-20 18 | ### Added 19 | - Missing `py.typed` file to distribute type information 20 | 21 | ### Changed 22 | - Replace `sqml` module with `sqlite-ml` dependency 23 | 24 | ## [0.1.0] - 2023-04-18 25 | ### Added 26 | - Initial release of `datasette-ml` 27 | 28 | [Unreleased]: https://github.com/rclement/datasette-ml/compare/0.1.2...HEAD 29 | [0.1.2]: https://github.com/rclement/datasette-ml/compare/0.1.1...0.1.2 30 | [0.1.1]: https://github.com/rclement/datasette-ml/compare/0.1.0...0.1.1 31 | [0.1.0]: https://github.com/rclement/datasette-ml/releases/tag/0.1.0 32 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Datasette ML 2 | 3 | > Bringing Machine Learning models near your data, not the other way around! 4 | 5 | Datasette ML is a [Datasette](https://datasette.io) plugin providing an MLOps 6 | platform to train, evaluate and make predictions from machine learning models. 7 | 8 | All the underlying features are provided by [`sqlite-ml`](https://github.com/rclement/sqlite-ml). 9 | 10 | [![PyPI](https://img.shields.io/pypi/v/datasette-ml.svg)](https://pypi.org/project/datasette-ml/) 11 | [![CI/CD](https://github.com/rclement/datasette-ml/actions/workflows/ci-cd.yml/badge.svg)](https://github.com/rclement/datasette-ml/actions/workflows/ci-cd.yml) 12 | [![Coverage Status](https://img.shields.io/codecov/c/github/rclement/datasette-ml)](https://codecov.io/gh/rclement/datasette-ml) 13 | [![License](https://img.shields.io/github/license/rclement/datasette-ml)](https://github.com/rclement/datasette-ml/blob/master/LICENSE) 14 | 15 | 16 | 17 | **WARNING**: this plugin is still experimental and not ready for production. 18 | Some breaking changes might happen between releases before reaching a stable version. 19 | Use it at your own risks! 20 | 21 | 22 | 23 | ## Installation 24 | 25 | Install this plugin in the same environment as Datasette: 26 | 27 | ```bash 28 | $ datasette install datasette-ml 29 | ``` 30 | 31 | ## Usage 32 | 33 | Define configuration within `metadata.yml` / `metadata.json`: 34 | 35 | ```yaml 36 | plugins: 37 | datasette-ml: 38 | db: sqml 39 | ``` 40 | 41 | A new menu entry is now available, pointing at `/-/ml` to access the MLOps dashboard. 42 | 43 | ### Configuration properties 44 | 45 | | Property | Type | Description | 46 | | -------- | -------- | ----------------------------------------------- | 47 | | `db` | `string` | Database to store ML models (default is `sqml`) | 48 | 49 | ## Tutorial 50 | 51 | Using `datasette-ml` you can start training Machine Learning models directly 52 | along your data, simply by using custom SQL functions! Let's get started by 53 | training a classifier against the famous "Iris Dataset" to predict flower types. 54 | 55 | ### Loading the dataset 56 | 57 | First let's load our data. For a real world project, your data may live with its 58 | own table or being accessed through an SQL view. For the purpose of this tutorial, 59 | we can use the `sqml_load_dataset` function to load 60 | [standard Scikit-Learn datasets](https://scikit-learn.org/stable/modules/classes.html#module-sklearn.datasets): 61 | 62 | ```sql 63 | SELECT sqml_load_dataset('iris') AS dataset; 64 | ``` 65 | 66 | It will return the following data: 67 | 68 | | dataset | 69 | | --- | 70 | | {"table": "dataset_iris", "feature_names": ["sepal length (cm)", "sepal width (cm)", "petal length (cm)", "petal width (cm)"], "target_names": ["setosa", "versicolor", "virginica"], "size": 150} | 71 | 72 | The Iris dataset is loaded into a table nammed `dataset_iris`, 73 | containing 150 examples, 4 features and 3 classes to be predicted. 74 | 75 | ### Training a classifier 76 | 77 | Now that our dataset is ready, let's train a first machine learning model to 78 | perform a classification task using the `sqml_train` function: 79 | 80 | ```sql 81 | SELECT sqml_train( 82 | 'Iris prediction', 83 | 'classification', 84 | 'logistic_regression', 85 | 'dataset_iris', 86 | 'target' 87 | ) AS training; 88 | ``` 89 | 90 | It will return the following data: 91 | 92 | | training | 93 | | --- | 94 | | {"experiment_name": "Iris prediction", "prediction_type": "classification", "algorithm": "logistic_regression", "deployed": true, "score": 0.9473684210526315} | 95 | 96 | We have just trained our first machine learning model! The output data informs us 97 | that our model has been trained, yields a score of 0.94 and has been deployed. 98 | 99 | ### Performing predictions 100 | 101 | Now that we have trained our classifier, let's use it to make predictions! 102 | 103 | Predict the target label for the first row of `dataset_iris` using the 104 | `sqml_predict` function: 105 | 106 | ```sql 107 | SELECT 108 | dataset_iris.*, 109 | sqml_predict( 110 | 'Iris prediction', 111 | json_object( 112 | 'sepal length (cm)', [sepal length (cm)], 113 | 'sepal width (cm)', [sepal width (cm)], 114 | 'petal length (cm)', [petal length (cm)], 115 | 'petal width (cm)', [petal width (cm)] 116 | ) 117 | ) AS prediction 118 | FROM dataset_iris 119 | LIMIT 1; 120 | ``` 121 | 122 | This will output the following data: 123 | 124 | | sepal length (cm) | sepal width (cm) | petal length (cm) | petal width (cm) | target | prediction | 125 | | --- | --- | --- | --- | --- | --- | 126 | | 5.1 | 3.5 | 1.4 | 0.2 | 0.0 | 0.0 | 127 | 128 | Yay! Our prediction is matching the target label! 129 | 130 | Let's see if we can find some predictions not matching the target label. 131 | To perform lots of predictions, we will use `sqml_predict_batch` which is more 132 | efficient than `sqml_predict`: 133 | 134 | ```sql 135 | SELECT 136 | dataset_iris.*, 137 | batch.value AS prediction, 138 | dataset_iris.target = batch.value AS match 139 | FROM 140 | dataset_iris 141 | JOIN json_each ( 142 | ( 143 | SELECT 144 | sqml_predict_batch( 145 | 'Iris prediction', 146 | json_group_array( 147 | json_object( 148 | 'sepal length (cm)', [sepal length (cm)], 149 | 'sepal width (cm)', [sepal width (cm)], 150 | 'petal length (cm)', [petal length (cm)], 151 | 'petal width (cm)', [petal width (cm)] 152 | ) 153 | ) 154 | ) 155 | FROM 156 | dataset_iris 157 | ) 158 | ) batch ON (batch.rowid + 1) = dataset_iris.rowid 159 | WHERE match = FALSE; 160 | ``` 161 | 162 | This will yield the following output data: 163 | 164 | | sepal length (cm) | sepal width (cm) | petal length (cm) | petal width (cm) | target | prediction | match | 165 | | --- | --- | --- | --- | --- | --- | --- | 166 | | 5.9 | 3.2 | 4.8 | 1.8 | 1.0 | 2.0 | 0 | 167 | | 6.7 | 3.0 | 5.0 | 1.7 | 1.0 | 2.0 | 0 | 168 | | 6.0 | 2.7 | 5.1 | 1.6 | 1.0 | 2.0 | 0 | 169 | | 4.9 | 2.5 | 4.5 | 1.7 | 2.0 | 1.0 | 0 | 170 | 171 | Oh no! 4 predictions have not predicted the correct target label! 172 | 173 | Let's see if we can train a better algorithm to enhance the prediction quality. 174 | 175 | ### Training a new model 176 | 177 | Let's use a Support Vector Machine algorithm, usually yielding better results 178 | compared to the more simplistic Logistic Regression: 179 | 180 | ```sql 181 | SELECT sqml_train( 182 | 'Iris prediction', 183 | 'classification', 184 | 'svc', 185 | 'dataset_iris', 186 | 'target' 187 | ) AS training; 188 | ``` 189 | 190 | This will yield the following data: 191 | 192 | | training | 193 | | --- | 194 | | {"experiment_name": "Iris prediction", "prediction_type": "classification", "algorithm": "svc", "deployed": true, "score": 0.9736842105263158} | 195 | 196 | We can already see that the score of this new model is higher than the previous one and it has been deployed. 197 | 198 | Let's try our new classifier on the same dataset: 199 | 200 | ```sql 201 | SELECT 202 | dataset_iris.*, 203 | batch.value AS prediction, 204 | dataset_iris.target = batch.value AS match 205 | FROM 206 | dataset_iris 207 | JOIN json_each ( 208 | ( 209 | SELECT 210 | sqml_predict_batch( 211 | 'Iris prediction', 212 | json_group_array( 213 | json_object( 214 | 'sepal length (cm)', [sepal length (cm)], 215 | 'sepal width (cm)', [sepal width (cm)], 216 | 'petal length (cm)', [petal length (cm)], 217 | 'petal width (cm)', [petal width (cm)] 218 | ) 219 | ) 220 | ) 221 | FROM 222 | dataset_iris 223 | ) 224 | ) batch ON (batch.rowid + 1) = dataset_iris.rowid 225 | WHERE match = FALSE; 226 | ``` 227 | 228 | This will lead the following results: 229 | 230 | | sepal length (cm) | sepal width (cm) | petal length (cm) | petal width (cm) | target | prediction | match | 231 | | --- | --- | --- | --- | --- | --- | --- | 232 | | 5.9 | 3.2 | 4.8 | 1.8 | 1.0 | 2.0 | 0 | 233 | | 6.7 | 3.0 | 5.0 | 1.7 | 1.0 | 2.0 | 0 | 234 | | 6.0 | 2.7 | 5.1 | 1.6 | 1.0 | 2.0 | 0 | 235 | 236 | Yay! We manage to predict one more target label with this new model! 237 | 238 | Also note that we did not have to do anything to switch to the better model: 239 | exactly the same query is used to perform the prediction without having to 240 | specify anything about the new model! This is because new models are deployed 241 | automatically for the current experiment only if their score outperforms the 242 | score of the previously deployed model. 243 | 244 | ### SQL functions 245 | 246 | This plugin registers a few SQL functions to perform machine learning model training and predictions: 247 | 248 | `sqml_load_dataset(name, table)` 249 | - `name: str`: name of the dataset to load 250 | - `table: str`: (optional) custom table name destination for the dataset 251 | 252 | `sqml_train(experiment_name, prediction_type, algorithm, dataset, target, test_size, split_strategy)`: 253 | - `experiment_name: str`: name of the experiment to train the model within 254 | - `prediction_type: str`: prediction task type to be performed for this experiment (`regression`, `classification`) 255 | - `algorithm: str`: algorithm type to be trained 256 | - `dataset: str`: name of the table or view containing the dataset 257 | - `target: str`: name of the column to be treated as target label 258 | - `test_size: float`: (optional) dataset test size ratio (default is `0.25`) 259 | - `split_strategy: str`: (optional) dataset train/test split strategy (default is `shuffle`) 260 | 261 | `sqml_predict(experiment_name, features)` 262 | - `experiment_name: str`: name of the experiment to train the model within 263 | - `features: json object`: JSON object containing the features 264 | 265 | `sqml_predict_batch(experiment_name, features)` 266 | - `experiment_name: str`: name of the experiment to train the model within 267 | - `features: json list`: JSON list containing all feature objects 268 | 269 | ## Development 270 | 271 | To set up this plugin locally, first checkout the code. 272 | Then create a new virtual environment and the required dependencies: 273 | 274 | ```bash 275 | poetry shell 276 | poetry install 277 | ``` 278 | 279 | To run the QA suite: 280 | 281 | ```bash 282 | black --check datasette_ml tests 283 | flake8 datasette_ml tests 284 | mypy datasette_ml tests 285 | pytest -v --cov=datasette_ml --cov=tests --cov-branch --cov-report=term-missing tests 286 | ``` 287 | 288 | ## Demo 289 | 290 | With the developmnent environment setup, you can run the demo locally: 291 | 292 | ```bash 293 | python demo/generate.py 294 | datasette --metadata demo/metadata.yml demo/sqml.db 295 | ``` 296 | 297 | ## Inspiration 298 | 299 | All the things on the internet that have been inspiring this project: 300 | 301 | - [PostgresML](https://postgresml.org) 302 | - [MLFlow](https://mlflow.org) 303 | - [SQLite Run-Time Loadable Extensions](https://www.sqlite.org/loadext.html) 304 | - [Alex Garcia's `sqlite-loadable-rs`](https://github.com/asg017/sqlite-loadable-rs) 305 | - [Alex Garcia's SQLite extensions](https://github.com/asg017) 306 | - [Alex Garcia, "Making SQLite extensions pip install-able"](https://observablehq.com/@asg017/making-sqlite-extensions-pip-install-able) 307 | - [Max Halford, "Online gradient descent written in SQL"](https://maxhalford.github.io/blog/ogd-in-sql/) 308 | - [Ricardo Anderegg, "Extending SQLite with Rust"](https://ricardoanderegg.com/posts/extending-sqlite-with-rust/) 309 | 310 | ## License 311 | 312 | Licensed under Apache License, Version 2.0 313 | 314 | Copyright (c) 2023 - present Romain Clement 315 | -------------------------------------------------------------------------------- /datasette_ml/__init__.py: -------------------------------------------------------------------------------- 1 | import sqlite3 2 | import typing as t 3 | 4 | from datasette import hookimpl 5 | from datasette.database import Database 6 | 7 | from sqlite_ml.sqml import SQML 8 | 9 | 10 | if t.TYPE_CHECKING: # pragma: no cover 11 | from datasette.app import Datasette 12 | 13 | 14 | sqml = SQML() 15 | 16 | 17 | @hookimpl 18 | def startup(datasette: "Datasette") -> None: 19 | config = datasette.plugin_config("datasette-ml") or {} 20 | db_name = config.get("db", "sqml") 21 | db: Database = datasette.get_database(db_name) 22 | sqml.setup_schema(db.connect(True)) 23 | 24 | 25 | @hookimpl 26 | def prepare_connection( 27 | conn: sqlite3.Connection, database: str, datasette: "Datasette" 28 | ) -> None: 29 | sqml.register_functions(conn) 30 | -------------------------------------------------------------------------------- /datasette_ml/py.typed: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rclement/datasette-ml/6f0077e3c877e58736d5574def6ea9f3a4a56ca2/datasette_ml/py.typed -------------------------------------------------------------------------------- /demo/generate.py: -------------------------------------------------------------------------------- 1 | import sqlite_utils 2 | 3 | from pathlib import Path 4 | from sqlite_ml.sqml import SQML 5 | 6 | 7 | def main() -> None: 8 | db_path = Path(__file__).parent / "sqml.db" 9 | db_path.unlink(missing_ok=True) 10 | db = sqlite_utils.Database(db_path) 11 | 12 | sqml = SQML() 13 | sqml.setup_schema(db.conn) 14 | sqml.register_functions(db.conn) 15 | 16 | samples_sql = (Path(__file__).parent / "samples.sql").read_text() 17 | db.executescript(samples_sql) 18 | 19 | 20 | if __name__ == "__main__": 21 | main() 22 | -------------------------------------------------------------------------------- /demo/metadata.yml: -------------------------------------------------------------------------------- 1 | plugins: 2 | datasette-ml: 3 | db: sqml 4 | -------------------------------------------------------------------------------- /demo/samples.sql: -------------------------------------------------------------------------------- 1 | -- load sample datasets 2 | SELECT sqml_load_dataset('iris'); 3 | SELECT sqml_load_dataset('digits'); 4 | SELECT sqml_load_dataset('wine'); 5 | SELECT sqml_load_dataset('breast_cancer'); 6 | SELECT sqml_load_dataset('diabetes'); 7 | 8 | -- train some models 9 | SELECT sqml_train('Iris prediction', 'classification', 'logistic_regression', 'dataset_iris', 'target'); 10 | SELECT sqml_train('Iris prediction', 'classification', 'svc', 'dataset_iris', 'target'); 11 | SELECT sqml_train('Digits prediction', 'classification', 'logistic_regression', 'dataset_digits', 'target'); 12 | SELECT sqml_train('Digits prediction', 'classification', 'svc', 'dataset_digits', 'target'); 13 | SELECT sqml_train('Wine prediction', 'classification', 'logistic_regression', 'dataset_wine', 'target'); 14 | SELECT sqml_train('Wine prediction', 'classification', 'svc', 'dataset_wine', 'target'); 15 | SELECT sqml_train('Breast cancer prediction', 'classification', 'logistic_regression', 'dataset_breast_cancer', 'target'); 16 | SELECT sqml_train('Breast cancer prediction', 'classification', 'svc', 'dataset_breast_cancer', 'target'); 17 | SELECT sqml_train('Diabetes prediction', 'regression', 'linear_regression', 'dataset_diabetes', 'target'); 18 | SELECT sqml_train('Diabetes prediction', 'regression', 'svr', 'dataset_diabetes', 'target'); 19 | -------------------------------------------------------------------------------- /demo/sqml.db: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rclement/datasette-ml/6f0077e3c877e58736d5574def6ea9f3a4a56ca2/demo/sqml.db -------------------------------------------------------------------------------- /poetry.lock: -------------------------------------------------------------------------------- 1 | # This file is automatically @generated by Poetry 1.4.2 and should not be changed by hand. 2 | 3 | [[package]] 4 | name = "aiofiles" 5 | version = "23.1.0" 6 | description = "File support for asyncio." 7 | category = "main" 8 | optional = false 9 | python-versions = ">=3.7,<4.0" 10 | files = [ 11 | {file = "aiofiles-23.1.0-py3-none-any.whl", hash = "sha256:9312414ae06472eb6f1d163f555e466a23aed1c8f60c30cccf7121dba2e53eb2"}, 12 | {file = "aiofiles-23.1.0.tar.gz", hash = "sha256:edd247df9a19e0db16534d4baaf536d6609a43e1de5401d7a4c1c148753a1635"}, 13 | ] 14 | 15 | [[package]] 16 | name = "anyio" 17 | version = "3.6.2" 18 | description = "High level compatibility layer for multiple asynchronous event loop implementations" 19 | category = "main" 20 | optional = false 21 | python-versions = ">=3.6.2" 22 | files = [ 23 | {file = "anyio-3.6.2-py3-none-any.whl", hash = "sha256:fbbe32bd270d2a2ef3ed1c5d45041250284e31fc0a4df4a5a6071842051a51e3"}, 24 | {file = "anyio-3.6.2.tar.gz", hash = "sha256:25ea0d673ae30af41a0c442f81cf3b38c7e79fdc7b60335a4c14e05eb0947421"}, 25 | ] 26 | 27 | [package.dependencies] 28 | idna = ">=2.8" 29 | sniffio = ">=1.1" 30 | 31 | [package.extras] 32 | doc = ["packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] 33 | test = ["contextlib2", "coverage[toml] (>=4.5)", "hypothesis (>=4.0)", "mock (>=4)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (<0.15)", "uvloop (>=0.15)"] 34 | trio = ["trio (>=0.16,<0.22)"] 35 | 36 | [[package]] 37 | name = "asgi-csrf" 38 | version = "0.9" 39 | description = "ASGI middleware for protecting against CSRF attacks" 40 | category = "main" 41 | optional = false 42 | python-versions = "*" 43 | files = [ 44 | {file = "asgi-csrf-0.9.tar.gz", hash = "sha256:6e9d3bddaeac1a8fd33b188fe2abc8271f9085ab7be6e1a7f4d3c9df5d7f741a"}, 45 | {file = "asgi_csrf-0.9-py3-none-any.whl", hash = "sha256:e974cffb8a4ab84a28a0088acbf7a4ecc5be4a64f08dcbe19c60dea103da01c0"}, 46 | ] 47 | 48 | [package.dependencies] 49 | itsdangerous = "*" 50 | python-multipart = "*" 51 | 52 | [package.extras] 53 | test = ["asgi-lifespan", "httpx (>=0.16)", "pytest", "pytest-asyncio", "pytest-cov", "starlette"] 54 | 55 | [[package]] 56 | name = "asgiref" 57 | version = "3.6.0" 58 | description = "ASGI specs, helper code, and adapters" 59 | category = "main" 60 | optional = false 61 | python-versions = ">=3.7" 62 | files = [ 63 | {file = "asgiref-3.6.0-py3-none-any.whl", hash = "sha256:71e68008da809b957b7ee4b43dbccff33d1b23519fb8344e33f049897077afac"}, 64 | {file = "asgiref-3.6.0.tar.gz", hash = "sha256:9567dfe7bd8d3c8c892227827c41cce860b368104c3431da67a0c5a65a949506"}, 65 | ] 66 | 67 | [package.extras] 68 | tests = ["mypy (>=0.800)", "pytest", "pytest-asyncio"] 69 | 70 | [[package]] 71 | name = "black" 72 | version = "23.12.1" 73 | description = "The uncompromising code formatter." 74 | category = "dev" 75 | optional = false 76 | python-versions = ">=3.8" 77 | files = [ 78 | {file = "black-23.12.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e0aaf6041986767a5e0ce663c7a2f0e9eaf21e6ff87a5f95cbf3675bfd4c41d2"}, 79 | {file = "black-23.12.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c88b3711d12905b74206227109272673edce0cb29f27e1385f33b0163c414bba"}, 80 | {file = "black-23.12.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a920b569dc6b3472513ba6ddea21f440d4b4c699494d2e972a1753cdc25df7b0"}, 81 | {file = "black-23.12.1-cp310-cp310-win_amd64.whl", hash = "sha256:3fa4be75ef2a6b96ea8d92b1587dd8cb3a35c7e3d51f0738ced0781c3aa3a5a3"}, 82 | {file = "black-23.12.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8d4df77958a622f9b5a4c96edb4b8c0034f8434032ab11077ec6c56ae9f384ba"}, 83 | {file = "black-23.12.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:602cfb1196dc692424c70b6507593a2b29aac0547c1be9a1d1365f0d964c353b"}, 84 | {file = "black-23.12.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c4352800f14be5b4864016882cdba10755bd50805c95f728011bcb47a4afd59"}, 85 | {file = "black-23.12.1-cp311-cp311-win_amd64.whl", hash = "sha256:0808494f2b2df923ffc5723ed3c7b096bd76341f6213989759287611e9837d50"}, 86 | {file = "black-23.12.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:25e57fd232a6d6ff3f4478a6fd0580838e47c93c83eaf1ccc92d4faf27112c4e"}, 87 | {file = "black-23.12.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2d9e13db441c509a3763a7a3d9a49ccc1b4e974a47be4e08ade2a228876500ec"}, 88 | {file = "black-23.12.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d1bd9c210f8b109b1762ec9fd36592fdd528485aadb3f5849b2740ef17e674e"}, 89 | {file = "black-23.12.1-cp312-cp312-win_amd64.whl", hash = "sha256:ae76c22bde5cbb6bfd211ec343ded2163bba7883c7bc77f6b756a1049436fbb9"}, 90 | {file = "black-23.12.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1fa88a0f74e50e4487477bc0bb900c6781dbddfdfa32691e780bf854c3b4a47f"}, 91 | {file = "black-23.12.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a4d6a9668e45ad99d2f8ec70d5c8c04ef4f32f648ef39048d010b0689832ec6d"}, 92 | {file = "black-23.12.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b18fb2ae6c4bb63eebe5be6bd869ba2f14fd0259bda7d18a46b764d8fb86298a"}, 93 | {file = "black-23.12.1-cp38-cp38-win_amd64.whl", hash = "sha256:c04b6d9d20e9c13f43eee8ea87d44156b8505ca8a3c878773f68b4e4812a421e"}, 94 | {file = "black-23.12.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3e1b38b3135fd4c025c28c55ddfc236b05af657828a8a6abe5deec419a0b7055"}, 95 | {file = "black-23.12.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4f0031eaa7b921db76decd73636ef3a12c942ed367d8c3841a0739412b260a54"}, 96 | {file = "black-23.12.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97e56155c6b737854e60a9ab1c598ff2533d57e7506d97af5481141671abf3ea"}, 97 | {file = "black-23.12.1-cp39-cp39-win_amd64.whl", hash = "sha256:dd15245c8b68fe2b6bd0f32c1556509d11bb33aec9b5d0866dd8e2ed3dba09c2"}, 98 | {file = "black-23.12.1-py3-none-any.whl", hash = "sha256:78baad24af0f033958cad29731e27363183e140962595def56423e626f4bee3e"}, 99 | {file = "black-23.12.1.tar.gz", hash = "sha256:4ce3ef14ebe8d9509188014d96af1c456a910d5b5cbf434a09fef7e024b3d0d5"}, 100 | ] 101 | 102 | [package.dependencies] 103 | click = ">=8.0.0" 104 | mypy-extensions = ">=0.4.3" 105 | packaging = ">=22.0" 106 | pathspec = ">=0.9.0" 107 | platformdirs = ">=2" 108 | tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} 109 | typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""} 110 | 111 | [package.extras] 112 | colorama = ["colorama (>=0.4.3)"] 113 | d = ["aiohttp (>=3.7.4)", "aiohttp (>=3.7.4,!=3.9.0)"] 114 | jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] 115 | uvloop = ["uvloop (>=0.15.2)"] 116 | 117 | [[package]] 118 | name = "certifi" 119 | version = "2022.12.7" 120 | description = "Python package for providing Mozilla's CA Bundle." 121 | category = "main" 122 | optional = false 123 | python-versions = ">=3.6" 124 | files = [ 125 | {file = "certifi-2022.12.7-py3-none-any.whl", hash = "sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18"}, 126 | {file = "certifi-2022.12.7.tar.gz", hash = "sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3"}, 127 | ] 128 | 129 | [[package]] 130 | name = "click" 131 | version = "8.1.3" 132 | description = "Composable command line interface toolkit" 133 | category = "main" 134 | optional = false 135 | python-versions = ">=3.7" 136 | files = [ 137 | {file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"}, 138 | {file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"}, 139 | ] 140 | 141 | [package.dependencies] 142 | colorama = {version = "*", markers = "platform_system == \"Windows\""} 143 | 144 | [[package]] 145 | name = "click-default-group" 146 | version = "1.2.4" 147 | description = "click_default_group" 148 | category = "dev" 149 | optional = false 150 | python-versions = ">=2.7" 151 | files = [ 152 | {file = "click_default_group-1.2.4-py2.py3-none-any.whl", hash = "sha256:9b60486923720e7fc61731bdb32b617039aba820e22e1c88766b1125592eaa5f"}, 153 | {file = "click_default_group-1.2.4.tar.gz", hash = "sha256:eb3f3c99ec0d456ca6cd2a7f08f7d4e91771bef51b01bdd9580cc6450fe1251e"}, 154 | ] 155 | 156 | [package.dependencies] 157 | click = "*" 158 | 159 | [package.extras] 160 | test = ["pytest"] 161 | 162 | [[package]] 163 | name = "click-default-group-wheel" 164 | version = "1.2.2" 165 | description = "Extends click.Group to invoke a command without explicit subcommand name (packaged as a wheel)" 166 | category = "main" 167 | optional = false 168 | python-versions = "*" 169 | files = [ 170 | {file = "click-default-group-wheel-1.2.2.tar.gz", hash = "sha256:e90da42d92c03e88a12ed0c0b69c8a29afb5d36e3dc8d29c423ba4219e6d7747"}, 171 | {file = "click_default_group_wheel-1.2.2-py3-none-any.whl", hash = "sha256:1599b0b6e0ff63ee806c2cb76593cb8cc73e723cd53532c92bc496dc6fc90e5c"}, 172 | ] 173 | 174 | [package.dependencies] 175 | click = "*" 176 | 177 | [[package]] 178 | name = "colorama" 179 | version = "0.4.6" 180 | description = "Cross-platform colored terminal text." 181 | category = "main" 182 | optional = false 183 | python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" 184 | files = [ 185 | {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, 186 | {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, 187 | ] 188 | 189 | [[package]] 190 | name = "coverage" 191 | version = "7.2.5" 192 | description = "Code coverage measurement for Python" 193 | category = "dev" 194 | optional = false 195 | python-versions = ">=3.7" 196 | files = [ 197 | {file = "coverage-7.2.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:883123d0bbe1c136f76b56276074b0c79b5817dd4238097ffa64ac67257f4b6c"}, 198 | {file = "coverage-7.2.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d2fbc2a127e857d2f8898aaabcc34c37771bf78a4d5e17d3e1f5c30cd0cbc62a"}, 199 | {file = "coverage-7.2.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f3671662dc4b422b15776cdca89c041a6349b4864a43aa2350b6b0b03bbcc7f"}, 200 | {file = "coverage-7.2.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:780551e47d62095e088f251f5db428473c26db7829884323e56d9c0c3118791a"}, 201 | {file = "coverage-7.2.5-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:066b44897c493e0dcbc9e6a6d9f8bbb6607ef82367cf6810d387c09f0cd4fe9a"}, 202 | {file = "coverage-7.2.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b9a4ee55174b04f6af539218f9f8083140f61a46eabcaa4234f3c2a452c4ed11"}, 203 | {file = "coverage-7.2.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:706ec567267c96717ab9363904d846ec009a48d5f832140b6ad08aad3791b1f5"}, 204 | {file = "coverage-7.2.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ae453f655640157d76209f42c62c64c4d4f2c7f97256d3567e3b439bd5c9b06c"}, 205 | {file = "coverage-7.2.5-cp310-cp310-win32.whl", hash = "sha256:f81c9b4bd8aa747d417407a7f6f0b1469a43b36a85748145e144ac4e8d303cb5"}, 206 | {file = "coverage-7.2.5-cp310-cp310-win_amd64.whl", hash = "sha256:dc945064a8783b86fcce9a0a705abd7db2117d95e340df8a4333f00be5efb64c"}, 207 | {file = "coverage-7.2.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:40cc0f91c6cde033da493227797be2826cbf8f388eaa36a0271a97a332bfd7ce"}, 208 | {file = "coverage-7.2.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a66e055254a26c82aead7ff420d9fa8dc2da10c82679ea850d8feebf11074d88"}, 209 | {file = "coverage-7.2.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c10fbc8a64aa0f3ed136b0b086b6b577bc64d67d5581acd7cc129af52654384e"}, 210 | {file = "coverage-7.2.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9a22cbb5ede6fade0482111fa7f01115ff04039795d7092ed0db43522431b4f2"}, 211 | {file = "coverage-7.2.5-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:292300f76440651529b8ceec283a9370532f4ecba9ad67d120617021bb5ef139"}, 212 | {file = "coverage-7.2.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7ff8f3fb38233035028dbc93715551d81eadc110199e14bbbfa01c5c4a43f8d8"}, 213 | {file = "coverage-7.2.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:a08c7401d0b24e8c2982f4e307124b671c6736d40d1c39e09d7a8687bddf83ed"}, 214 | {file = "coverage-7.2.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ef9659d1cda9ce9ac9585c045aaa1e59223b143f2407db0eaee0b61a4f266fb6"}, 215 | {file = "coverage-7.2.5-cp311-cp311-win32.whl", hash = "sha256:30dcaf05adfa69c2a7b9f7dfd9f60bc8e36b282d7ed25c308ef9e114de7fc23b"}, 216 | {file = "coverage-7.2.5-cp311-cp311-win_amd64.whl", hash = "sha256:97072cc90f1009386c8a5b7de9d4fc1a9f91ba5ef2146c55c1f005e7b5c5e068"}, 217 | {file = "coverage-7.2.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:bebea5f5ed41f618797ce3ffb4606c64a5de92e9c3f26d26c2e0aae292f015c1"}, 218 | {file = "coverage-7.2.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:828189fcdda99aae0d6bf718ea766b2e715eabc1868670a0a07bf8404bf58c33"}, 219 | {file = "coverage-7.2.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e8a95f243d01ba572341c52f89f3acb98a3b6d1d5d830efba86033dd3687ade"}, 220 | {file = "coverage-7.2.5-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e8834e5f17d89e05697c3c043d3e58a8b19682bf365048837383abfe39adaed5"}, 221 | {file = "coverage-7.2.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d1f25ee9de21a39b3a8516f2c5feb8de248f17da7eead089c2e04aa097936b47"}, 222 | {file = "coverage-7.2.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1637253b11a18f453e34013c665d8bf15904c9e3c44fbda34c643fbdc9d452cd"}, 223 | {file = "coverage-7.2.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8e575a59315a91ccd00c7757127f6b2488c2f914096077c745c2f1ba5b8c0969"}, 224 | {file = "coverage-7.2.5-cp37-cp37m-win32.whl", hash = "sha256:509ecd8334c380000d259dc66feb191dd0a93b21f2453faa75f7f9cdcefc0718"}, 225 | {file = "coverage-7.2.5-cp37-cp37m-win_amd64.whl", hash = "sha256:12580845917b1e59f8a1c2ffa6af6d0908cb39220f3019e36c110c943dc875b0"}, 226 | {file = "coverage-7.2.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b5016e331b75310610c2cf955d9f58a9749943ed5f7b8cfc0bb89c6134ab0a84"}, 227 | {file = "coverage-7.2.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:373ea34dca98f2fdb3e5cb33d83b6d801007a8074f992b80311fc589d3e6b790"}, 228 | {file = "coverage-7.2.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a063aad9f7b4c9f9da7b2550eae0a582ffc7623dca1c925e50c3fbde7a579771"}, 229 | {file = "coverage-7.2.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:38c0a497a000d50491055805313ed83ddba069353d102ece8aef5d11b5faf045"}, 230 | {file = "coverage-7.2.5-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2b3b05e22a77bb0ae1a3125126a4e08535961c946b62f30985535ed40e26614"}, 231 | {file = "coverage-7.2.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0342a28617e63ad15d96dca0f7ae9479a37b7d8a295f749c14f3436ea59fdcb3"}, 232 | {file = "coverage-7.2.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:cf97ed82ca986e5c637ea286ba2793c85325b30f869bf64d3009ccc1a31ae3fd"}, 233 | {file = "coverage-7.2.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c2c41c1b1866b670573657d584de413df701f482574bad7e28214a2362cb1fd1"}, 234 | {file = "coverage-7.2.5-cp38-cp38-win32.whl", hash = "sha256:10b15394c13544fce02382360cab54e51a9e0fd1bd61ae9ce012c0d1e103c813"}, 235 | {file = "coverage-7.2.5-cp38-cp38-win_amd64.whl", hash = "sha256:a0b273fe6dc655b110e8dc89b8ec7f1a778d78c9fd9b4bda7c384c8906072212"}, 236 | {file = "coverage-7.2.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5c587f52c81211d4530fa6857884d37f514bcf9453bdeee0ff93eaaf906a5c1b"}, 237 | {file = "coverage-7.2.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4436cc9ba5414c2c998eaedee5343f49c02ca93b21769c5fdfa4f9d799e84200"}, 238 | {file = "coverage-7.2.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6599bf92f33ab041e36e06d25890afbdf12078aacfe1f1d08c713906e49a3fe5"}, 239 | {file = "coverage-7.2.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:857abe2fa6a4973f8663e039ead8d22215d31db613ace76e4a98f52ec919068e"}, 240 | {file = "coverage-7.2.5-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6f5cab2d7f0c12f8187a376cc6582c477d2df91d63f75341307fcdcb5d60303"}, 241 | {file = "coverage-7.2.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:aa387bd7489f3e1787ff82068b295bcaafbf6f79c3dad3cbc82ef88ce3f48ad3"}, 242 | {file = "coverage-7.2.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:156192e5fd3dbbcb11cd777cc469cf010a294f4c736a2b2c891c77618cb1379a"}, 243 | {file = "coverage-7.2.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bd3b4b8175c1db502adf209d06136c000df4d245105c8839e9d0be71c94aefe1"}, 244 | {file = "coverage-7.2.5-cp39-cp39-win32.whl", hash = "sha256:ddc5a54edb653e9e215f75de377354e2455376f416c4378e1d43b08ec50acc31"}, 245 | {file = "coverage-7.2.5-cp39-cp39-win_amd64.whl", hash = "sha256:338aa9d9883aaaad53695cb14ccdeb36d4060485bb9388446330bef9c361c252"}, 246 | {file = "coverage-7.2.5-pp37.pp38.pp39-none-any.whl", hash = "sha256:8877d9b437b35a85c18e3c6499b23674684bf690f5d96c1006a1ef61f9fdf0f3"}, 247 | {file = "coverage-7.2.5.tar.gz", hash = "sha256:f99ef080288f09ffc687423b8d60978cf3a465d3f404a18d1a05474bd8575a47"}, 248 | ] 249 | 250 | [package.dependencies] 251 | tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} 252 | 253 | [package.extras] 254 | toml = ["tomli"] 255 | 256 | [[package]] 257 | name = "datasette" 258 | version = "0.64.3" 259 | description = "An open source multi-tool for exploring and publishing data" 260 | category = "main" 261 | optional = false 262 | python-versions = ">=3.7" 263 | files = [ 264 | {file = "datasette-0.64.3-py3-none-any.whl", hash = "sha256:5d1e2272c9b5321f4fe158da76c81897dc34ea8daef9633b7232437782026096"}, 265 | {file = "datasette-0.64.3.tar.gz", hash = "sha256:12ae15cd680d87f76a45ad30ff5b28a1fbf482e480dffdcfcea48be58b7a7c11"}, 266 | ] 267 | 268 | [package.dependencies] 269 | aiofiles = ">=0.4" 270 | asgi-csrf = ">=0.9" 271 | asgiref = ">=3.2.10" 272 | click = ">=7.1.1" 273 | click-default-group-wheel = ">=1.2.2" 274 | httpx = ">=0.20" 275 | hupper = ">=1.9" 276 | itsdangerous = ">=1.1" 277 | janus = ">=0.6.2" 278 | Jinja2 = ">=2.10.3" 279 | mergedeep = ">=1.1.1" 280 | pint = ">=0.9" 281 | pip = "*" 282 | pluggy = ">=1.0" 283 | PyYAML = ">=5.3" 284 | setuptools = "*" 285 | uvicorn = ">=0.11" 286 | 287 | [package.extras] 288 | docs = ["blacken-docs", "codespell", "furo (==2022.9.29)", "sphinx-autobuild", "sphinx-copybutton"] 289 | rich = ["rich"] 290 | test = ["beautifulsoup4 (>=4.8.1)", "black (==22.10.0)", "blacken-docs (==1.12.1)", "cogapp (>=3.3.0)", "pytest (>=5.2.2)", "pytest-asyncio (>=0.17)", "pytest-timeout (>=1.4.2)", "pytest-xdist (>=2.2.1)", "trustme (>=0.7)"] 291 | 292 | [[package]] 293 | name = "exceptiongroup" 294 | version = "1.1.1" 295 | description = "Backport of PEP 654 (exception groups)" 296 | category = "dev" 297 | optional = false 298 | python-versions = ">=3.7" 299 | files = [ 300 | {file = "exceptiongroup-1.1.1-py3-none-any.whl", hash = "sha256:232c37c63e4f682982c8b6459f33a8981039e5fb8756b2074364e5055c498c9e"}, 301 | {file = "exceptiongroup-1.1.1.tar.gz", hash = "sha256:d484c3090ba2889ae2928419117447a14daf3c1231d5e30d0aae34f354f01785"}, 302 | ] 303 | 304 | [package.extras] 305 | test = ["pytest (>=6)"] 306 | 307 | [[package]] 308 | name = "faker" 309 | version = "19.13.0" 310 | description = "Faker is a Python package that generates fake data for you." 311 | category = "dev" 312 | optional = false 313 | python-versions = ">=3.8" 314 | files = [ 315 | {file = "Faker-19.13.0-py3-none-any.whl", hash = "sha256:da880a76322db7a879c848a0771e129338e0a680a9f695fd9a3e7a6ac82b45e1"}, 316 | {file = "Faker-19.13.0.tar.gz", hash = "sha256:14ccb0aec342d33aa3889a864a56e5b3c2d56bce1b89f9189f4fbc128b9afc1e"}, 317 | ] 318 | 319 | [package.dependencies] 320 | python-dateutil = ">=2.4" 321 | typing-extensions = {version = ">=3.10.0.1", markers = "python_version <= \"3.8\""} 322 | 323 | [[package]] 324 | name = "flake8" 325 | version = "6.1.0" 326 | description = "the modular source code checker: pep8 pyflakes and co" 327 | category = "dev" 328 | optional = false 329 | python-versions = ">=3.8.1" 330 | files = [ 331 | {file = "flake8-6.1.0-py2.py3-none-any.whl", hash = "sha256:ffdfce58ea94c6580c77888a86506937f9a1a227dfcd15f245d694ae20a6b6e5"}, 332 | {file = "flake8-6.1.0.tar.gz", hash = "sha256:d5b3857f07c030bdb5bf41c7f53799571d75c4491748a3adcd47de929e34cd23"}, 333 | ] 334 | 335 | [package.dependencies] 336 | mccabe = ">=0.7.0,<0.8.0" 337 | pycodestyle = ">=2.11.0,<2.12.0" 338 | pyflakes = ">=3.1.0,<3.2.0" 339 | 340 | [[package]] 341 | name = "h11" 342 | version = "0.14.0" 343 | description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" 344 | category = "main" 345 | optional = false 346 | python-versions = ">=3.7" 347 | files = [ 348 | {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, 349 | {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, 350 | ] 351 | 352 | [[package]] 353 | name = "httpcore" 354 | version = "0.17.0" 355 | description = "A minimal low-level HTTP client." 356 | category = "main" 357 | optional = false 358 | python-versions = ">=3.7" 359 | files = [ 360 | {file = "httpcore-0.17.0-py3-none-any.whl", hash = "sha256:0fdfea45e94f0c9fd96eab9286077f9ff788dd186635ae61b312693e4d943599"}, 361 | {file = "httpcore-0.17.0.tar.gz", hash = "sha256:cc045a3241afbf60ce056202301b4d8b6af08845e3294055eb26b09913ef903c"}, 362 | ] 363 | 364 | [package.dependencies] 365 | anyio = ">=3.0,<5.0" 366 | certifi = "*" 367 | h11 = ">=0.13,<0.15" 368 | sniffio = ">=1.0.0,<2.0.0" 369 | 370 | [package.extras] 371 | http2 = ["h2 (>=3,<5)"] 372 | socks = ["socksio (>=1.0.0,<2.0.0)"] 373 | 374 | [[package]] 375 | name = "httpx" 376 | version = "0.24.0" 377 | description = "The next generation HTTP client." 378 | category = "main" 379 | optional = false 380 | python-versions = ">=3.7" 381 | files = [ 382 | {file = "httpx-0.24.0-py3-none-any.whl", hash = "sha256:447556b50c1921c351ea54b4fe79d91b724ed2b027462ab9a329465d147d5a4e"}, 383 | {file = "httpx-0.24.0.tar.gz", hash = "sha256:507d676fc3e26110d41df7d35ebd8b3b8585052450f4097401c9be59d928c63e"}, 384 | ] 385 | 386 | [package.dependencies] 387 | certifi = "*" 388 | httpcore = ">=0.15.0,<0.18.0" 389 | idna = "*" 390 | sniffio = "*" 391 | 392 | [package.extras] 393 | brotli = ["brotli", "brotlicffi"] 394 | cli = ["click (>=8.0.0,<9.0.0)", "pygments (>=2.0.0,<3.0.0)", "rich (>=10,<14)"] 395 | http2 = ["h2 (>=3,<5)"] 396 | socks = ["socksio (>=1.0.0,<2.0.0)"] 397 | 398 | [[package]] 399 | name = "hupper" 400 | version = "1.12" 401 | description = "Integrated process monitor for developing and reloading daemons." 402 | category = "main" 403 | optional = false 404 | python-versions = ">=3.7" 405 | files = [ 406 | {file = "hupper-1.12-py3-none-any.whl", hash = "sha256:b8bc41bb75939e816f30f118026d0ba99544af4d6992583df3b4813765af27ef"}, 407 | {file = "hupper-1.12.tar.gz", hash = "sha256:18b1653d9832c9f8e7d3401986c7e7af2ae6783616be0bc406bfe0b14134a5c6"}, 408 | ] 409 | 410 | [package.extras] 411 | docs = ["Sphinx", "pylons-sphinx-themes", "setuptools", "watchdog"] 412 | testing = ["mock", "pytest", "pytest-cov", "watchdog"] 413 | 414 | [[package]] 415 | name = "idna" 416 | version = "3.4" 417 | description = "Internationalized Domain Names in Applications (IDNA)" 418 | category = "main" 419 | optional = false 420 | python-versions = ">=3.5" 421 | files = [ 422 | {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, 423 | {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, 424 | ] 425 | 426 | [[package]] 427 | name = "iniconfig" 428 | version = "2.0.0" 429 | description = "brain-dead simple config-ini parsing" 430 | category = "dev" 431 | optional = false 432 | python-versions = ">=3.7" 433 | files = [ 434 | {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, 435 | {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, 436 | ] 437 | 438 | [[package]] 439 | name = "itsdangerous" 440 | version = "2.1.2" 441 | description = "Safely pass data to untrusted environments and back." 442 | category = "main" 443 | optional = false 444 | python-versions = ">=3.7" 445 | files = [ 446 | {file = "itsdangerous-2.1.2-py3-none-any.whl", hash = "sha256:2c2349112351b88699d8d4b6b075022c0808887cb7ad10069318a8b0bc88db44"}, 447 | {file = "itsdangerous-2.1.2.tar.gz", hash = "sha256:5dbbc68b317e5e42f327f9021763545dc3fc3bfe22e6deb96aaf1fc38874156a"}, 448 | ] 449 | 450 | [[package]] 451 | name = "janus" 452 | version = "1.0.0" 453 | description = "Mixed sync-async queue to interoperate between asyncio tasks and classic threads" 454 | category = "main" 455 | optional = false 456 | python-versions = ">=3.7" 457 | files = [ 458 | {file = "janus-1.0.0-py3-none-any.whl", hash = "sha256:2596ea5482711c1ee3ef2df6c290aaf370a13c55a007826e8f7c32d696d1d00a"}, 459 | {file = "janus-1.0.0.tar.gz", hash = "sha256:df976f2cdcfb034b147a2d51edfc34ff6bfb12d4e2643d3ad0e10de058cb1612"}, 460 | ] 461 | 462 | [package.dependencies] 463 | typing-extensions = ">=3.7.4.3" 464 | 465 | [[package]] 466 | name = "jinja2" 467 | version = "3.1.2" 468 | description = "A very fast and expressive template engine." 469 | category = "main" 470 | optional = false 471 | python-versions = ">=3.7" 472 | files = [ 473 | {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"}, 474 | {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"}, 475 | ] 476 | 477 | [package.dependencies] 478 | MarkupSafe = ">=2.0" 479 | 480 | [package.extras] 481 | i18n = ["Babel (>=2.7)"] 482 | 483 | [[package]] 484 | name = "joblib" 485 | version = "1.2.0" 486 | description = "Lightweight pipelining with Python functions" 487 | category = "main" 488 | optional = false 489 | python-versions = ">=3.7" 490 | files = [ 491 | {file = "joblib-1.2.0-py3-none-any.whl", hash = "sha256:091138ed78f800342968c523bdde947e7a305b8594b910a0fea2ab83c3c6d385"}, 492 | {file = "joblib-1.2.0.tar.gz", hash = "sha256:e1cee4a79e4af22881164f218d4311f60074197fb707e082e803b61f6d137018"}, 493 | ] 494 | 495 | [[package]] 496 | name = "markupsafe" 497 | version = "2.1.2" 498 | description = "Safely add untrusted strings to HTML/XML markup." 499 | category = "main" 500 | optional = false 501 | python-versions = ">=3.7" 502 | files = [ 503 | {file = "MarkupSafe-2.1.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:665a36ae6f8f20a4676b53224e33d456a6f5a72657d9c83c2aa00765072f31f7"}, 504 | {file = "MarkupSafe-2.1.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:340bea174e9761308703ae988e982005aedf427de816d1afe98147668cc03036"}, 505 | {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22152d00bf4a9c7c83960521fc558f55a1adbc0631fbb00a9471e097b19d72e1"}, 506 | {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28057e985dace2f478e042eaa15606c7efccb700797660629da387eb289b9323"}, 507 | {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca244fa73f50a800cf8c3ebf7fd93149ec37f5cb9596aa8873ae2c1d23498601"}, 508 | {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d9d971ec1e79906046aa3ca266de79eac42f1dbf3612a05dc9368125952bd1a1"}, 509 | {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7e007132af78ea9df29495dbf7b5824cb71648d7133cf7848a2a5dd00d36f9ff"}, 510 | {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7313ce6a199651c4ed9d7e4cfb4aa56fe923b1adf9af3b420ee14e6d9a73df65"}, 511 | {file = "MarkupSafe-2.1.2-cp310-cp310-win32.whl", hash = "sha256:c4a549890a45f57f1ebf99c067a4ad0cb423a05544accaf2b065246827ed9603"}, 512 | {file = "MarkupSafe-2.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:835fb5e38fd89328e9c81067fd642b3593c33e1e17e2fdbf77f5676abb14a156"}, 513 | {file = "MarkupSafe-2.1.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2ec4f2d48ae59bbb9d1f9d7efb9236ab81429a764dedca114f5fdabbc3788013"}, 514 | {file = "MarkupSafe-2.1.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:608e7073dfa9e38a85d38474c082d4281f4ce276ac0010224eaba11e929dd53a"}, 515 | {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:65608c35bfb8a76763f37036547f7adfd09270fbdbf96608be2bead319728fcd"}, 516 | {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2bfb563d0211ce16b63c7cb9395d2c682a23187f54c3d79bfec33e6705473c6"}, 517 | {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:da25303d91526aac3672ee6d49a2f3db2d9502a4a60b55519feb1a4c7714e07d"}, 518 | {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9cad97ab29dfc3f0249b483412c85c8ef4766d96cdf9dcf5a1e3caa3f3661cf1"}, 519 | {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:085fd3201e7b12809f9e6e9bc1e5c96a368c8523fad5afb02afe3c051ae4afcc"}, 520 | {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1bea30e9bf331f3fef67e0a3877b2288593c98a21ccb2cf29b74c581a4eb3af0"}, 521 | {file = "MarkupSafe-2.1.2-cp311-cp311-win32.whl", hash = "sha256:7df70907e00c970c60b9ef2938d894a9381f38e6b9db73c5be35e59d92e06625"}, 522 | {file = "MarkupSafe-2.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:e55e40ff0cc8cc5c07996915ad367fa47da6b3fc091fdadca7f5403239c5fec3"}, 523 | {file = "MarkupSafe-2.1.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a6e40afa7f45939ca356f348c8e23048e02cb109ced1eb8420961b2f40fb373a"}, 524 | {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf877ab4ed6e302ec1d04952ca358b381a882fbd9d1b07cccbfd61783561f98a"}, 525 | {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63ba06c9941e46fa389d389644e2d8225e0e3e5ebcc4ff1ea8506dce646f8c8a"}, 526 | {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f1cd098434e83e656abf198f103a8207a8187c0fc110306691a2e94a78d0abb2"}, 527 | {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:55f44b440d491028addb3b88f72207d71eeebfb7b5dbf0643f7c023ae1fba619"}, 528 | {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:a6f2fcca746e8d5910e18782f976489939d54a91f9411c32051b4aab2bd7c513"}, 529 | {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0b462104ba25f1ac006fdab8b6a01ebbfbce9ed37fd37fd4acd70c67c973e460"}, 530 | {file = "MarkupSafe-2.1.2-cp37-cp37m-win32.whl", hash = "sha256:7668b52e102d0ed87cb082380a7e2e1e78737ddecdde129acadb0eccc5423859"}, 531 | {file = "MarkupSafe-2.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6d6607f98fcf17e534162f0709aaad3ab7a96032723d8ac8750ffe17ae5a0666"}, 532 | {file = "MarkupSafe-2.1.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:a806db027852538d2ad7555b203300173dd1b77ba116de92da9afbc3a3be3eed"}, 533 | {file = "MarkupSafe-2.1.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a4abaec6ca3ad8660690236d11bfe28dfd707778e2442b45addd2f086d6ef094"}, 534 | {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f03a532d7dee1bed20bc4884194a16160a2de9ffc6354b3878ec9682bb623c54"}, 535 | {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4cf06cdc1dda95223e9d2d3c58d3b178aa5dacb35ee7e3bbac10e4e1faacb419"}, 536 | {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22731d79ed2eb25059ae3df1dfc9cb1546691cc41f4e3130fe6bfbc3ecbbecfa"}, 537 | {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f8ffb705ffcf5ddd0e80b65ddf7bed7ee4f5a441ea7d3419e861a12eaf41af58"}, 538 | {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8db032bf0ce9022a8e41a22598eefc802314e81b879ae093f36ce9ddf39ab1ba"}, 539 | {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2298c859cfc5463f1b64bd55cb3e602528db6fa0f3cfd568d3605c50678f8f03"}, 540 | {file = "MarkupSafe-2.1.2-cp38-cp38-win32.whl", hash = "sha256:50c42830a633fa0cf9e7d27664637532791bfc31c731a87b202d2d8ac40c3ea2"}, 541 | {file = "MarkupSafe-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:bb06feb762bade6bf3c8b844462274db0c76acc95c52abe8dbed28ae3d44a147"}, 542 | {file = "MarkupSafe-2.1.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:99625a92da8229df6d44335e6fcc558a5037dd0a760e11d84be2260e6f37002f"}, 543 | {file = "MarkupSafe-2.1.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8bca7e26c1dd751236cfb0c6c72d4ad61d986e9a41bbf76cb445f69488b2a2bd"}, 544 | {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40627dcf047dadb22cd25ea7ecfe9cbf3bbbad0482ee5920b582f3809c97654f"}, 545 | {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40dfd3fefbef579ee058f139733ac336312663c6706d1163b82b3003fb1925c4"}, 546 | {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:090376d812fb6ac5f171e5938e82e7f2d7adc2b629101cec0db8b267815c85e2"}, 547 | {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2e7821bffe00aa6bd07a23913b7f4e01328c3d5cc0b40b36c0bd81d362faeb65"}, 548 | {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c0a33bc9f02c2b17c3ea382f91b4db0e6cde90b63b296422a939886a7a80de1c"}, 549 | {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b8526c6d437855442cdd3d87eede9c425c4445ea011ca38d937db299382e6fa3"}, 550 | {file = "MarkupSafe-2.1.2-cp39-cp39-win32.whl", hash = "sha256:137678c63c977754abe9086a3ec011e8fd985ab90631145dfb9294ad09c102a7"}, 551 | {file = "MarkupSafe-2.1.2-cp39-cp39-win_amd64.whl", hash = "sha256:0576fe974b40a400449768941d5d0858cc624e3249dfd1e0c33674e5c7ca7aed"}, 552 | {file = "MarkupSafe-2.1.2.tar.gz", hash = "sha256:abcabc8c2b26036d62d4c746381a6f7cf60aafcc653198ad678306986b09450d"}, 553 | ] 554 | 555 | [[package]] 556 | name = "mccabe" 557 | version = "0.7.0" 558 | description = "McCabe checker, plugin for flake8" 559 | category = "dev" 560 | optional = false 561 | python-versions = ">=3.6" 562 | files = [ 563 | {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, 564 | {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, 565 | ] 566 | 567 | [[package]] 568 | name = "mergedeep" 569 | version = "1.3.4" 570 | description = "A deep merge function for 🐍." 571 | category = "main" 572 | optional = false 573 | python-versions = ">=3.6" 574 | files = [ 575 | {file = "mergedeep-1.3.4-py3-none-any.whl", hash = "sha256:70775750742b25c0d8f36c55aed03d24c3384d17c951b3175d898bd778ef0307"}, 576 | {file = "mergedeep-1.3.4.tar.gz", hash = "sha256:0096d52e9dad9939c3d975a774666af186eda617e6ca84df4c94dec30004f2a8"}, 577 | ] 578 | 579 | [[package]] 580 | name = "mypy" 581 | version = "1.8.0" 582 | description = "Optional static typing for Python" 583 | category = "dev" 584 | optional = false 585 | python-versions = ">=3.8" 586 | files = [ 587 | {file = "mypy-1.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:485a8942f671120f76afffff70f259e1cd0f0cfe08f81c05d8816d958d4577d3"}, 588 | {file = "mypy-1.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:df9824ac11deaf007443e7ed2a4a26bebff98d2bc43c6da21b2b64185da011c4"}, 589 | {file = "mypy-1.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2afecd6354bbfb6e0160f4e4ad9ba6e4e003b767dd80d85516e71f2e955ab50d"}, 590 | {file = "mypy-1.8.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8963b83d53ee733a6e4196954502b33567ad07dfd74851f32be18eb932fb1cb9"}, 591 | {file = "mypy-1.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:e46f44b54ebddbeedbd3d5b289a893219065ef805d95094d16a0af6630f5d410"}, 592 | {file = "mypy-1.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:855fe27b80375e5c5878492f0729540db47b186509c98dae341254c8f45f42ae"}, 593 | {file = "mypy-1.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4c886c6cce2d070bd7df4ec4a05a13ee20c0aa60cb587e8d1265b6c03cf91da3"}, 594 | {file = "mypy-1.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d19c413b3c07cbecf1f991e2221746b0d2a9410b59cb3f4fb9557f0365a1a817"}, 595 | {file = "mypy-1.8.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9261ed810972061388918c83c3f5cd46079d875026ba97380f3e3978a72f503d"}, 596 | {file = "mypy-1.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:51720c776d148bad2372ca21ca29256ed483aa9a4cdefefcef49006dff2a6835"}, 597 | {file = "mypy-1.8.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:52825b01f5c4c1c4eb0db253ec09c7aa17e1a7304d247c48b6f3599ef40db8bd"}, 598 | {file = "mypy-1.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f5ac9a4eeb1ec0f1ccdc6f326bcdb464de5f80eb07fb38b5ddd7b0de6bc61e55"}, 599 | {file = "mypy-1.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afe3fe972c645b4632c563d3f3eff1cdca2fa058f730df2b93a35e3b0c538218"}, 600 | {file = "mypy-1.8.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:42c6680d256ab35637ef88891c6bd02514ccb7e1122133ac96055ff458f93fc3"}, 601 | {file = "mypy-1.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:720a5ca70e136b675af3af63db533c1c8c9181314d207568bbe79051f122669e"}, 602 | {file = "mypy-1.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:028cf9f2cae89e202d7b6593cd98db6759379f17a319b5faf4f9978d7084cdc6"}, 603 | {file = "mypy-1.8.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4e6d97288757e1ddba10dd9549ac27982e3e74a49d8d0179fc14d4365c7add66"}, 604 | {file = "mypy-1.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f1478736fcebb90f97e40aff11a5f253af890c845ee0c850fe80aa060a267c6"}, 605 | {file = "mypy-1.8.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:42419861b43e6962a649068a61f4a4839205a3ef525b858377a960b9e2de6e0d"}, 606 | {file = "mypy-1.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:2b5b6c721bd4aabaadead3a5e6fa85c11c6c795e0c81a7215776ef8afc66de02"}, 607 | {file = "mypy-1.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5c1538c38584029352878a0466f03a8ee7547d7bd9f641f57a0f3017a7c905b8"}, 608 | {file = "mypy-1.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ef4be7baf08a203170f29e89d79064463b7fc7a0908b9d0d5114e8009c3a259"}, 609 | {file = "mypy-1.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7178def594014aa6c35a8ff411cf37d682f428b3b5617ca79029d8ae72f5402b"}, 610 | {file = "mypy-1.8.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ab3c84fa13c04aeeeabb2a7f67a25ef5d77ac9d6486ff33ded762ef353aa5592"}, 611 | {file = "mypy-1.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:99b00bc72855812a60d253420d8a2eae839b0afa4938f09f4d2aa9bb4654263a"}, 612 | {file = "mypy-1.8.0-py3-none-any.whl", hash = "sha256:538fd81bb5e430cc1381a443971c0475582ff9f434c16cd46d2c66763ce85d9d"}, 613 | {file = "mypy-1.8.0.tar.gz", hash = "sha256:6ff8b244d7085a0b425b56d327b480c3b29cafbd2eff27316a004f9a7391ae07"}, 614 | ] 615 | 616 | [package.dependencies] 617 | mypy-extensions = ">=1.0.0" 618 | tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} 619 | typing-extensions = ">=4.1.0" 620 | 621 | [package.extras] 622 | dmypy = ["psutil (>=4.0)"] 623 | install-types = ["pip"] 624 | mypyc = ["setuptools (>=50)"] 625 | reports = ["lxml"] 626 | 627 | [[package]] 628 | name = "mypy-extensions" 629 | version = "1.0.0" 630 | description = "Type system extensions for programs checked with the mypy type checker." 631 | category = "dev" 632 | optional = false 633 | python-versions = ">=3.5" 634 | files = [ 635 | {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, 636 | {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, 637 | ] 638 | 639 | [[package]] 640 | name = "numpy" 641 | version = "1.24.3" 642 | description = "Fundamental package for array computing in Python" 643 | category = "main" 644 | optional = false 645 | python-versions = ">=3.8" 646 | files = [ 647 | {file = "numpy-1.24.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:3c1104d3c036fb81ab923f507536daedc718d0ad5a8707c6061cdfd6d184e570"}, 648 | {file = "numpy-1.24.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:202de8f38fc4a45a3eea4b63e2f376e5f2dc64ef0fa692838e31a808520efaf7"}, 649 | {file = "numpy-1.24.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8535303847b89aa6b0f00aa1dc62867b5a32923e4d1681a35b5eef2d9591a463"}, 650 | {file = "numpy-1.24.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2d926b52ba1367f9acb76b0df6ed21f0b16a1ad87c6720a1121674e5cf63e2b6"}, 651 | {file = "numpy-1.24.3-cp310-cp310-win32.whl", hash = "sha256:f21c442fdd2805e91799fbe044a7b999b8571bb0ab0f7850d0cb9641a687092b"}, 652 | {file = "numpy-1.24.3-cp310-cp310-win_amd64.whl", hash = "sha256:ab5f23af8c16022663a652d3b25dcdc272ac3f83c3af4c02eb8b824e6b3ab9d7"}, 653 | {file = "numpy-1.24.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9a7721ec204d3a237225db3e194c25268faf92e19338a35f3a224469cb6039a3"}, 654 | {file = "numpy-1.24.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d6cc757de514c00b24ae8cf5c876af2a7c3df189028d68c0cb4eaa9cd5afc2bf"}, 655 | {file = "numpy-1.24.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76e3f4e85fc5d4fd311f6e9b794d0c00e7002ec122be271f2019d63376f1d385"}, 656 | {file = "numpy-1.24.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a1d3c026f57ceaad42f8231305d4653d5f05dc6332a730ae5c0bea3513de0950"}, 657 | {file = "numpy-1.24.3-cp311-cp311-win32.whl", hash = "sha256:c91c4afd8abc3908e00a44b2672718905b8611503f7ff87390cc0ac3423fb096"}, 658 | {file = "numpy-1.24.3-cp311-cp311-win_amd64.whl", hash = "sha256:5342cf6aad47943286afa6f1609cad9b4266a05e7f2ec408e2cf7aea7ff69d80"}, 659 | {file = "numpy-1.24.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7776ea65423ca6a15255ba1872d82d207bd1e09f6d0894ee4a64678dd2204078"}, 660 | {file = "numpy-1.24.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ae8d0be48d1b6ed82588934aaaa179875e7dc4f3d84da18d7eae6eb3f06c242c"}, 661 | {file = "numpy-1.24.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ecde0f8adef7dfdec993fd54b0f78183051b6580f606111a6d789cd14c61ea0c"}, 662 | {file = "numpy-1.24.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4749e053a29364d3452c034827102ee100986903263e89884922ef01a0a6fd2f"}, 663 | {file = "numpy-1.24.3-cp38-cp38-win32.whl", hash = "sha256:d933fabd8f6a319e8530d0de4fcc2e6a61917e0b0c271fded460032db42a0fe4"}, 664 | {file = "numpy-1.24.3-cp38-cp38-win_amd64.whl", hash = "sha256:56e48aec79ae238f6e4395886b5eaed058abb7231fb3361ddd7bfdf4eed54289"}, 665 | {file = "numpy-1.24.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4719d5aefb5189f50887773699eaf94e7d1e02bf36c1a9d353d9f46703758ca4"}, 666 | {file = "numpy-1.24.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0ec87a7084caa559c36e0a2309e4ecb1baa03b687201d0a847c8b0ed476a7187"}, 667 | {file = "numpy-1.24.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea8282b9bcfe2b5e7d491d0bf7f3e2da29700cec05b49e64d6246923329f2b02"}, 668 | {file = "numpy-1.24.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:210461d87fb02a84ef243cac5e814aad2b7f4be953b32cb53327bb49fd77fbb4"}, 669 | {file = "numpy-1.24.3-cp39-cp39-win32.whl", hash = "sha256:784c6da1a07818491b0ffd63c6bbe5a33deaa0e25a20e1b3ea20cf0e43f8046c"}, 670 | {file = "numpy-1.24.3-cp39-cp39-win_amd64.whl", hash = "sha256:d5036197ecae68d7f491fcdb4df90082b0d4960ca6599ba2659957aafced7c17"}, 671 | {file = "numpy-1.24.3-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:352ee00c7f8387b44d19f4cada524586f07379c0d49270f87233983bc5087ca0"}, 672 | {file = "numpy-1.24.3-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a7d6acc2e7524c9955e5c903160aa4ea083736fde7e91276b0e5d98e6332812"}, 673 | {file = "numpy-1.24.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:35400e6a8d102fd07c71ed7dcadd9eb62ee9a6e84ec159bd48c28235bbb0f8e4"}, 674 | {file = "numpy-1.24.3.tar.gz", hash = "sha256:ab344f1bf21f140adab8e47fdbc7c35a477dc01408791f8ba00d018dd0bc5155"}, 675 | ] 676 | 677 | [[package]] 678 | name = "packaging" 679 | version = "23.1" 680 | description = "Core utilities for Python packages" 681 | category = "dev" 682 | optional = false 683 | python-versions = ">=3.7" 684 | files = [ 685 | {file = "packaging-23.1-py3-none-any.whl", hash = "sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61"}, 686 | {file = "packaging-23.1.tar.gz", hash = "sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f"}, 687 | ] 688 | 689 | [[package]] 690 | name = "pandas" 691 | version = "2.0.1" 692 | description = "Powerful data structures for data analysis, time series, and statistics" 693 | category = "main" 694 | optional = false 695 | python-versions = ">=3.8" 696 | files = [ 697 | {file = "pandas-2.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:70a996a1d2432dadedbb638fe7d921c88b0cc4dd90374eab51bb33dc6c0c2a12"}, 698 | {file = "pandas-2.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:909a72b52175590debbf1d0c9e3e6bce2f1833c80c76d80bd1aa09188be768e5"}, 699 | {file = "pandas-2.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fe7914d8ddb2d54b900cec264c090b88d141a1eed605c9539a187dbc2547f022"}, 700 | {file = "pandas-2.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0a514ae436b23a92366fbad8365807fc0eed15ca219690b3445dcfa33597a5cc"}, 701 | {file = "pandas-2.0.1-cp310-cp310-win32.whl", hash = "sha256:12bd6618e3cc737c5200ecabbbb5eaba8ab645a4b0db508ceeb4004bb10b060e"}, 702 | {file = "pandas-2.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:2b6fe5f7ce1cba0e74188c8473c9091ead9b293ef0a6794939f8cc7947057abd"}, 703 | {file = "pandas-2.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:00959a04a1d7bbc63d75a768540fb20ecc9e65fd80744c930e23768345a362a7"}, 704 | {file = "pandas-2.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:af2449e9e984dfad39276b885271ba31c5e0204ffd9f21f287a245980b0e4091"}, 705 | {file = "pandas-2.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:910df06feaf9935d05247db6de452f6d59820e432c18a2919a92ffcd98f8f79b"}, 706 | {file = "pandas-2.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fa0067f2419f933101bdc6001bcea1d50812afbd367b30943417d67fbb99678"}, 707 | {file = "pandas-2.0.1-cp311-cp311-win32.whl", hash = "sha256:7b8395d335b08bc8b050590da264f94a439b4770ff16bb51798527f1dd840388"}, 708 | {file = "pandas-2.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:8db5a644d184a38e6ed40feeb12d410d7fcc36648443defe4707022da127fc35"}, 709 | {file = "pandas-2.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7bbf173d364130334e0159a9a034f573e8b44a05320995127cf676b85fd8ce86"}, 710 | {file = "pandas-2.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6c0853d487b6c868bf107a4b270a823746175b1932093b537b9b76c639fc6f7e"}, 711 | {file = "pandas-2.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f25e23a03f7ad7211ffa30cb181c3e5f6d96a8e4cb22898af462a7333f8a74eb"}, 712 | {file = "pandas-2.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e09a53a4fe8d6ae2149959a2d02e1ef2f4d2ceb285ac48f74b79798507e468b4"}, 713 | {file = "pandas-2.0.1-cp38-cp38-win32.whl", hash = "sha256:a2564629b3a47b6aa303e024e3d84e850d36746f7e804347f64229f8c87416ea"}, 714 | {file = "pandas-2.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:03e677c6bc9cfb7f93a8b617d44f6091613a5671ef2944818469be7b42114a00"}, 715 | {file = "pandas-2.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3d099ecaa5b9e977b55cd43cf842ec13b14afa1cfa51b7e1179d90b38c53ce6a"}, 716 | {file = "pandas-2.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a37ee35a3eb6ce523b2c064af6286c45ea1c7ff882d46e10d0945dbda7572753"}, 717 | {file = "pandas-2.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:320b180d125c3842c5da5889183b9a43da4ebba375ab2ef938f57bf267a3c684"}, 718 | {file = "pandas-2.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18d22cb9043b6c6804529810f492ab09d638ddf625c5dea8529239607295cb59"}, 719 | {file = "pandas-2.0.1-cp39-cp39-win32.whl", hash = "sha256:90d1d365d77d287063c5e339f49b27bd99ef06d10a8843cf00b1a49326d492c1"}, 720 | {file = "pandas-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:99f7192d8b0e6daf8e0d0fd93baa40056684e4b4aaaef9ea78dff34168e1f2f0"}, 721 | {file = "pandas-2.0.1.tar.gz", hash = "sha256:19b8e5270da32b41ebf12f0e7165efa7024492e9513fb46fb631c5022ae5709d"}, 722 | ] 723 | 724 | [package.dependencies] 725 | numpy = [ 726 | {version = ">=1.20.3", markers = "python_version < \"3.10\""}, 727 | {version = ">=1.21.0", markers = "python_version >= \"3.10\""}, 728 | {version = ">=1.23.2", markers = "python_version >= \"3.11\""}, 729 | ] 730 | python-dateutil = ">=2.8.2" 731 | pytz = ">=2020.1" 732 | tzdata = ">=2022.1" 733 | 734 | [package.extras] 735 | all = ["PyQt5 (>=5.15.1)", "SQLAlchemy (>=1.4.16)", "beautifulsoup4 (>=4.9.3)", "bottleneck (>=1.3.2)", "brotlipy (>=0.7.0)", "fastparquet (>=0.6.3)", "fsspec (>=2021.07.0)", "gcsfs (>=2021.07.0)", "html5lib (>=1.1)", "hypothesis (>=6.34.2)", "jinja2 (>=3.0.0)", "lxml (>=4.6.3)", "matplotlib (>=3.6.1)", "numba (>=0.53.1)", "numexpr (>=2.7.3)", "odfpy (>=1.4.1)", "openpyxl (>=3.0.7)", "pandas-gbq (>=0.15.0)", "psycopg2 (>=2.8.6)", "pyarrow (>=7.0.0)", "pymysql (>=1.0.2)", "pyreadstat (>=1.1.2)", "pytest (>=7.0.0)", "pytest-asyncio (>=0.17.0)", "pytest-xdist (>=2.2.0)", "python-snappy (>=0.6.0)", "pyxlsb (>=1.0.8)", "qtpy (>=2.2.0)", "s3fs (>=2021.08.0)", "scipy (>=1.7.1)", "tables (>=3.6.1)", "tabulate (>=0.8.9)", "xarray (>=0.21.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=1.4.3)", "zstandard (>=0.15.2)"] 736 | aws = ["s3fs (>=2021.08.0)"] 737 | clipboard = ["PyQt5 (>=5.15.1)", "qtpy (>=2.2.0)"] 738 | compression = ["brotlipy (>=0.7.0)", "python-snappy (>=0.6.0)", "zstandard (>=0.15.2)"] 739 | computation = ["scipy (>=1.7.1)", "xarray (>=0.21.0)"] 740 | excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.0.7)", "pyxlsb (>=1.0.8)", "xlrd (>=2.0.1)", "xlsxwriter (>=1.4.3)"] 741 | feather = ["pyarrow (>=7.0.0)"] 742 | fss = ["fsspec (>=2021.07.0)"] 743 | gcp = ["gcsfs (>=2021.07.0)", "pandas-gbq (>=0.15.0)"] 744 | hdf5 = ["tables (>=3.6.1)"] 745 | html = ["beautifulsoup4 (>=4.9.3)", "html5lib (>=1.1)", "lxml (>=4.6.3)"] 746 | mysql = ["SQLAlchemy (>=1.4.16)", "pymysql (>=1.0.2)"] 747 | output-formatting = ["jinja2 (>=3.0.0)", "tabulate (>=0.8.9)"] 748 | parquet = ["pyarrow (>=7.0.0)"] 749 | performance = ["bottleneck (>=1.3.2)", "numba (>=0.53.1)", "numexpr (>=2.7.1)"] 750 | plot = ["matplotlib (>=3.6.1)"] 751 | postgresql = ["SQLAlchemy (>=1.4.16)", "psycopg2 (>=2.8.6)"] 752 | spss = ["pyreadstat (>=1.1.2)"] 753 | sql-other = ["SQLAlchemy (>=1.4.16)"] 754 | test = ["hypothesis (>=6.34.2)", "pytest (>=7.0.0)", "pytest-asyncio (>=0.17.0)", "pytest-xdist (>=2.2.0)"] 755 | xml = ["lxml (>=4.6.3)"] 756 | 757 | [[package]] 758 | name = "pathspec" 759 | version = "0.11.1" 760 | description = "Utility library for gitignore style pattern matching of file paths." 761 | category = "dev" 762 | optional = false 763 | python-versions = ">=3.7" 764 | files = [ 765 | {file = "pathspec-0.11.1-py3-none-any.whl", hash = "sha256:d8af70af76652554bd134c22b3e8a1cc46ed7d91edcdd721ef1a0c51a84a5293"}, 766 | {file = "pathspec-0.11.1.tar.gz", hash = "sha256:2798de800fa92780e33acca925945e9a19a133b715067cf165b8866c15a31687"}, 767 | ] 768 | 769 | [[package]] 770 | name = "pint" 771 | version = "0.21" 772 | description = "Physical quantities module" 773 | category = "main" 774 | optional = false 775 | python-versions = ">=3.8" 776 | files = [ 777 | {file = "Pint-0.21-py3-none-any.whl", hash = "sha256:998b695e84a34d11702da4a8b9457a39bb5c7ab5ec68db90e948e30878e421f1"}, 778 | {file = "Pint-0.21.tar.gz", hash = "sha256:3e98bdf01f4dcf840cc0207c0b6f7510d4e0c6288efc1bf470626e875c831172"}, 779 | ] 780 | 781 | [package.extras] 782 | babel = ["babel (<=2.8)"] 783 | dask = ["dask"] 784 | mip = ["mip (>=1.13)"] 785 | numpy = ["numpy (>=1.19.5)"] 786 | pandas = ["pint-pandas (>=0.3)"] 787 | test = ["pytest", "pytest-cov", "pytest-mpl", "pytest-subtests"] 788 | uncertainties = ["uncertainties (>=3.1.6)"] 789 | xarray = ["xarray"] 790 | 791 | [[package]] 792 | name = "pip" 793 | version = "23.1.2" 794 | description = "The PyPA recommended tool for installing Python packages." 795 | category = "main" 796 | optional = false 797 | python-versions = ">=3.7" 798 | files = [ 799 | {file = "pip-23.1.2-py3-none-any.whl", hash = "sha256:3ef6ac33239e4027d9a5598a381b9d30880a1477e50039db2eac6e8a8f6d1b18"}, 800 | {file = "pip-23.1.2.tar.gz", hash = "sha256:0e7c86f486935893c708287b30bd050a36ac827ec7fe5e43fe7cb198dd835fba"}, 801 | ] 802 | 803 | [[package]] 804 | name = "platformdirs" 805 | version = "3.5.0" 806 | description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." 807 | category = "dev" 808 | optional = false 809 | python-versions = ">=3.7" 810 | files = [ 811 | {file = "platformdirs-3.5.0-py3-none-any.whl", hash = "sha256:47692bc24c1958e8b0f13dd727307cff1db103fca36399f457da8e05f222fdc4"}, 812 | {file = "platformdirs-3.5.0.tar.gz", hash = "sha256:7954a68d0ba23558d753f73437c55f89027cf8f5108c19844d4b82e5af396335"}, 813 | ] 814 | 815 | [package.extras] 816 | docs = ["furo (>=2023.3.27)", "proselint (>=0.13)", "sphinx (>=6.1.3)", "sphinx-autodoc-typehints (>=1.23,!=1.23.4)"] 817 | test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.3.1)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"] 818 | 819 | [[package]] 820 | name = "pluggy" 821 | version = "1.0.0" 822 | description = "plugin and hook calling mechanisms for python" 823 | category = "main" 824 | optional = false 825 | python-versions = ">=3.6" 826 | files = [ 827 | {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, 828 | {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, 829 | ] 830 | 831 | [package.extras] 832 | dev = ["pre-commit", "tox"] 833 | testing = ["pytest", "pytest-benchmark"] 834 | 835 | [[package]] 836 | name = "pycodestyle" 837 | version = "2.11.0" 838 | description = "Python style guide checker" 839 | category = "dev" 840 | optional = false 841 | python-versions = ">=3.8" 842 | files = [ 843 | {file = "pycodestyle-2.11.0-py2.py3-none-any.whl", hash = "sha256:5d1013ba8dc7895b548be5afb05740ca82454fd899971563d2ef625d090326f8"}, 844 | {file = "pycodestyle-2.11.0.tar.gz", hash = "sha256:259bcc17857d8a8b3b4a2327324b79e5f020a13c16074670f9c8c8f872ea76d0"}, 845 | ] 846 | 847 | [[package]] 848 | name = "pyflakes" 849 | version = "3.1.0" 850 | description = "passive checker of Python programs" 851 | category = "dev" 852 | optional = false 853 | python-versions = ">=3.8" 854 | files = [ 855 | {file = "pyflakes-3.1.0-py2.py3-none-any.whl", hash = "sha256:4132f6d49cb4dae6819e5379898f2b8cce3c5f23994194c24b77d5da2e36f774"}, 856 | {file = "pyflakes-3.1.0.tar.gz", hash = "sha256:a0aae034c444db0071aa077972ba4768d40c830d9539fd45bf4cd3f8f6992efc"}, 857 | ] 858 | 859 | [[package]] 860 | name = "pytest" 861 | version = "7.4.3" 862 | description = "pytest: simple powerful testing with Python" 863 | category = "dev" 864 | optional = false 865 | python-versions = ">=3.7" 866 | files = [ 867 | {file = "pytest-7.4.3-py3-none-any.whl", hash = "sha256:0d009c083ea859a71b76adf7c1d502e4bc170b80a8ef002da5806527b9591fac"}, 868 | {file = "pytest-7.4.3.tar.gz", hash = "sha256:d989d136982de4e3b29dabcc838ad581c64e8ed52c11fbe86ddebd9da0818cd5"}, 869 | ] 870 | 871 | [package.dependencies] 872 | colorama = {version = "*", markers = "sys_platform == \"win32\""} 873 | exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} 874 | iniconfig = "*" 875 | packaging = "*" 876 | pluggy = ">=0.12,<2.0" 877 | tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} 878 | 879 | [package.extras] 880 | testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] 881 | 882 | [[package]] 883 | name = "pytest-asyncio" 884 | version = "0.23.2" 885 | description = "Pytest support for asyncio" 886 | category = "dev" 887 | optional = false 888 | python-versions = ">=3.8" 889 | files = [ 890 | {file = "pytest-asyncio-0.23.2.tar.gz", hash = "sha256:c16052382554c7b22d48782ab3438d5b10f8cf7a4bdcae7f0f67f097d95beecc"}, 891 | {file = "pytest_asyncio-0.23.2-py3-none-any.whl", hash = "sha256:ea9021364e32d58f0be43b91c6233fb8d2224ccef2398d6837559e587682808f"}, 892 | ] 893 | 894 | [package.dependencies] 895 | pytest = ">=7.0.0" 896 | 897 | [package.extras] 898 | docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"] 899 | testing = ["coverage (>=6.2)", "hypothesis (>=5.7.1)"] 900 | 901 | [[package]] 902 | name = "pytest-cov" 903 | version = "4.1.0" 904 | description = "Pytest plugin for measuring coverage." 905 | category = "dev" 906 | optional = false 907 | python-versions = ">=3.7" 908 | files = [ 909 | {file = "pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6"}, 910 | {file = "pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a"}, 911 | ] 912 | 913 | [package.dependencies] 914 | coverage = {version = ">=5.2.1", extras = ["toml"]} 915 | pytest = ">=4.6" 916 | 917 | [package.extras] 918 | testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"] 919 | 920 | [[package]] 921 | name = "python-dateutil" 922 | version = "2.8.2" 923 | description = "Extensions to the standard Python datetime module" 924 | category = "main" 925 | optional = false 926 | python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" 927 | files = [ 928 | {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, 929 | {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, 930 | ] 931 | 932 | [package.dependencies] 933 | six = ">=1.5" 934 | 935 | [[package]] 936 | name = "python-multipart" 937 | version = "0.0.6" 938 | description = "A streaming multipart parser for Python" 939 | category = "main" 940 | optional = false 941 | python-versions = ">=3.7" 942 | files = [ 943 | {file = "python_multipart-0.0.6-py3-none-any.whl", hash = "sha256:ee698bab5ef148b0a760751c261902cd096e57e10558e11aca17646b74ee1c18"}, 944 | {file = "python_multipart-0.0.6.tar.gz", hash = "sha256:e9925a80bb668529f1b67c7fdb0a5dacdd7cbfc6fb0bff3ea443fe22bdd62132"}, 945 | ] 946 | 947 | [package.extras] 948 | dev = ["atomicwrites (==1.2.1)", "attrs (==19.2.0)", "coverage (==6.5.0)", "hatch", "invoke (==1.7.3)", "more-itertools (==4.3.0)", "pbr (==4.3.0)", "pluggy (==1.0.0)", "py (==1.11.0)", "pytest (==7.2.0)", "pytest-cov (==4.0.0)", "pytest-timeout (==2.1.0)", "pyyaml (==5.1)"] 949 | 950 | [[package]] 951 | name = "pytz" 952 | version = "2023.3" 953 | description = "World timezone definitions, modern and historical" 954 | category = "main" 955 | optional = false 956 | python-versions = "*" 957 | files = [ 958 | {file = "pytz-2023.3-py2.py3-none-any.whl", hash = "sha256:a151b3abb88eda1d4e34a9814df37de2a80e301e68ba0fd856fb9b46bfbbbffb"}, 959 | {file = "pytz-2023.3.tar.gz", hash = "sha256:1d8ce29db189191fb55338ee6d0387d82ab59f3d00eac103412d64e0ebd0c588"}, 960 | ] 961 | 962 | [[package]] 963 | name = "pyyaml" 964 | version = "6.0" 965 | description = "YAML parser and emitter for Python" 966 | category = "main" 967 | optional = false 968 | python-versions = ">=3.6" 969 | files = [ 970 | {file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"}, 971 | {file = "PyYAML-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c"}, 972 | {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc"}, 973 | {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b"}, 974 | {file = "PyYAML-6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5"}, 975 | {file = "PyYAML-6.0-cp310-cp310-win32.whl", hash = "sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513"}, 976 | {file = "PyYAML-6.0-cp310-cp310-win_amd64.whl", hash = "sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a"}, 977 | {file = "PyYAML-6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d4b0ba9512519522b118090257be113b9468d804b19d63c71dbcf4a48fa32358"}, 978 | {file = "PyYAML-6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:81957921f441d50af23654aa6c5e5eaf9b06aba7f0a19c18a538dc7ef291c5a1"}, 979 | {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:afa17f5bc4d1b10afd4466fd3a44dc0e245382deca5b3c353d8b757f9e3ecb8d"}, 980 | {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dbad0e9d368bb989f4515da330b88a057617d16b6a8245084f1b05400f24609f"}, 981 | {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:432557aa2c09802be39460360ddffd48156e30721f5e8d917f01d31694216782"}, 982 | {file = "PyYAML-6.0-cp311-cp311-win32.whl", hash = "sha256:bfaef573a63ba8923503d27530362590ff4f576c626d86a9fed95822a8255fd7"}, 983 | {file = "PyYAML-6.0-cp311-cp311-win_amd64.whl", hash = "sha256:01b45c0191e6d66c470b6cf1b9531a771a83c1c4208272ead47a3ae4f2f603bf"}, 984 | {file = "PyYAML-6.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86"}, 985 | {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f"}, 986 | {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92"}, 987 | {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4"}, 988 | {file = "PyYAML-6.0-cp36-cp36m-win32.whl", hash = "sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293"}, 989 | {file = "PyYAML-6.0-cp36-cp36m-win_amd64.whl", hash = "sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57"}, 990 | {file = "PyYAML-6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c"}, 991 | {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0"}, 992 | {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4"}, 993 | {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9"}, 994 | {file = "PyYAML-6.0-cp37-cp37m-win32.whl", hash = "sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737"}, 995 | {file = "PyYAML-6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d"}, 996 | {file = "PyYAML-6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b"}, 997 | {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba"}, 998 | {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34"}, 999 | {file = "PyYAML-6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287"}, 1000 | {file = "PyYAML-6.0-cp38-cp38-win32.whl", hash = "sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78"}, 1001 | {file = "PyYAML-6.0-cp38-cp38-win_amd64.whl", hash = "sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07"}, 1002 | {file = "PyYAML-6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b"}, 1003 | {file = "PyYAML-6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174"}, 1004 | {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803"}, 1005 | {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3"}, 1006 | {file = "PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0"}, 1007 | {file = "PyYAML-6.0-cp39-cp39-win32.whl", hash = "sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb"}, 1008 | {file = "PyYAML-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c"}, 1009 | {file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"}, 1010 | ] 1011 | 1012 | [[package]] 1013 | name = "scikit-learn" 1014 | version = "1.2.2" 1015 | description = "A set of python modules for machine learning and data mining" 1016 | category = "main" 1017 | optional = false 1018 | python-versions = ">=3.8" 1019 | files = [ 1020 | {file = "scikit-learn-1.2.2.tar.gz", hash = "sha256:8429aea30ec24e7a8c7ed8a3fa6213adf3814a6efbea09e16e0a0c71e1a1a3d7"}, 1021 | {file = "scikit_learn-1.2.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:99cc01184e347de485bf253d19fcb3b1a3fb0ee4cea5ee3c43ec0cc429b6d29f"}, 1022 | {file = "scikit_learn-1.2.2-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:e6e574db9914afcb4e11ade84fab084536a895ca60aadea3041e85b8ac963edb"}, 1023 | {file = "scikit_learn-1.2.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6fe83b676f407f00afa388dd1fdd49e5c6612e551ed84f3b1b182858f09e987d"}, 1024 | {file = "scikit_learn-1.2.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e2642baa0ad1e8f8188917423dd73994bf25429f8893ddbe115be3ca3183584"}, 1025 | {file = "scikit_learn-1.2.2-cp310-cp310-win_amd64.whl", hash = "sha256:ad66c3848c0a1ec13464b2a95d0a484fd5b02ce74268eaa7e0c697b904f31d6c"}, 1026 | {file = "scikit_learn-1.2.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:dfeaf8be72117eb61a164ea6fc8afb6dfe08c6f90365bde2dc16456e4bc8e45f"}, 1027 | {file = "scikit_learn-1.2.2-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:fe0aa1a7029ed3e1dcbf4a5bc675aa3b1bc468d9012ecf6c6f081251ca47f590"}, 1028 | {file = "scikit_learn-1.2.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:065e9673e24e0dc5113e2dd2b4ca30c9d8aa2fa90f4c0597241c93b63130d233"}, 1029 | {file = "scikit_learn-1.2.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf036ea7ef66115e0d49655f16febfa547886deba20149555a41d28f56fd6d3c"}, 1030 | {file = "scikit_learn-1.2.2-cp311-cp311-win_amd64.whl", hash = "sha256:8b0670d4224a3c2d596fd572fb4fa673b2a0ccfb07152688ebd2ea0b8c61025c"}, 1031 | {file = "scikit_learn-1.2.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9c710ff9f9936ba8a3b74a455ccf0dcf59b230caa1e9ba0223773c490cab1e51"}, 1032 | {file = "scikit_learn-1.2.2-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:2dd3ffd3950e3d6c0c0ef9033a9b9b32d910c61bd06cb8206303fb4514b88a49"}, 1033 | {file = "scikit_learn-1.2.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:44b47a305190c28dd8dd73fc9445f802b6ea716669cfc22ab1eb97b335d238b1"}, 1034 | {file = "scikit_learn-1.2.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:953236889928d104c2ef14027539f5f2609a47ebf716b8cbe4437e85dce42744"}, 1035 | {file = "scikit_learn-1.2.2-cp38-cp38-win_amd64.whl", hash = "sha256:7f69313884e8eb311460cc2f28676d5e400bd929841a2c8eb8742ae78ebf7c20"}, 1036 | {file = "scikit_learn-1.2.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8156db41e1c39c69aa2d8599ab7577af53e9e5e7a57b0504e116cc73c39138dd"}, 1037 | {file = "scikit_learn-1.2.2-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:fe175ee1dab589d2e1033657c5b6bec92a8a3b69103e3dd361b58014729975c3"}, 1038 | {file = "scikit_learn-1.2.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7d5312d9674bed14f73773d2acf15a3272639b981e60b72c9b190a0cffed5bad"}, 1039 | {file = "scikit_learn-1.2.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ea061bf0283bf9a9f36ea3c5d3231ba2176221bbd430abd2603b1c3b2ed85c89"}, 1040 | {file = "scikit_learn-1.2.2-cp39-cp39-win_amd64.whl", hash = "sha256:6477eed40dbce190f9f9e9d0d37e020815825b300121307942ec2110302b66a3"}, 1041 | ] 1042 | 1043 | [package.dependencies] 1044 | joblib = ">=1.1.1" 1045 | numpy = ">=1.17.3" 1046 | scipy = ">=1.3.2" 1047 | threadpoolctl = ">=2.0.0" 1048 | 1049 | [package.extras] 1050 | benchmark = ["matplotlib (>=3.1.3)", "memory-profiler (>=0.57.0)", "pandas (>=1.0.5)"] 1051 | docs = ["Pillow (>=7.1.2)", "matplotlib (>=3.1.3)", "memory-profiler (>=0.57.0)", "numpydoc (>=1.2.0)", "pandas (>=1.0.5)", "plotly (>=5.10.0)", "pooch (>=1.6.0)", "scikit-image (>=0.16.2)", "seaborn (>=0.9.0)", "sphinx (>=4.0.1)", "sphinx-gallery (>=0.7.0)", "sphinx-prompt (>=1.3.0)", "sphinxext-opengraph (>=0.4.2)"] 1052 | examples = ["matplotlib (>=3.1.3)", "pandas (>=1.0.5)", "plotly (>=5.10.0)", "pooch (>=1.6.0)", "scikit-image (>=0.16.2)", "seaborn (>=0.9.0)"] 1053 | tests = ["black (>=22.3.0)", "flake8 (>=3.8.2)", "matplotlib (>=3.1.3)", "mypy (>=0.961)", "numpydoc (>=1.2.0)", "pandas (>=1.0.5)", "pooch (>=1.6.0)", "pyamg (>=4.0.0)", "pytest (>=5.3.1)", "pytest-cov (>=2.9.0)", "scikit-image (>=0.16.2)"] 1054 | 1055 | [[package]] 1056 | name = "scipy" 1057 | version = "1.9.3" 1058 | description = "Fundamental algorithms for scientific computing in Python" 1059 | category = "main" 1060 | optional = false 1061 | python-versions = ">=3.8" 1062 | files = [ 1063 | {file = "scipy-1.9.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1884b66a54887e21addf9c16fb588720a8309a57b2e258ae1c7986d4444d3bc0"}, 1064 | {file = "scipy-1.9.3-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:83b89e9586c62e787f5012e8475fbb12185bafb996a03257e9675cd73d3736dd"}, 1065 | {file = "scipy-1.9.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a72d885fa44247f92743fc20732ae55564ff2a519e8302fb7e18717c5355a8b"}, 1066 | {file = "scipy-1.9.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d01e1dd7b15bd2449c8bfc6b7cc67d630700ed655654f0dfcf121600bad205c9"}, 1067 | {file = "scipy-1.9.3-cp310-cp310-win_amd64.whl", hash = "sha256:68239b6aa6f9c593da8be1509a05cb7f9efe98b80f43a5861cd24c7557e98523"}, 1068 | {file = "scipy-1.9.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b41bc822679ad1c9a5f023bc93f6d0543129ca0f37c1ce294dd9d386f0a21096"}, 1069 | {file = "scipy-1.9.3-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:90453d2b93ea82a9f434e4e1cba043e779ff67b92f7a0e85d05d286a3625df3c"}, 1070 | {file = "scipy-1.9.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:83c06e62a390a9167da60bedd4575a14c1f58ca9dfde59830fc42e5197283dab"}, 1071 | {file = "scipy-1.9.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:abaf921531b5aeaafced90157db505e10345e45038c39e5d9b6c7922d68085cb"}, 1072 | {file = "scipy-1.9.3-cp311-cp311-win_amd64.whl", hash = "sha256:06d2e1b4c491dc7d8eacea139a1b0b295f74e1a1a0f704c375028f8320d16e31"}, 1073 | {file = "scipy-1.9.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5a04cd7d0d3eff6ea4719371cbc44df31411862b9646db617c99718ff68d4840"}, 1074 | {file = "scipy-1.9.3-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:545c83ffb518094d8c9d83cce216c0c32f8c04aaf28b92cc8283eda0685162d5"}, 1075 | {file = "scipy-1.9.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d54222d7a3ba6022fdf5773931b5d7c56efe41ede7f7128c7b1637700409108"}, 1076 | {file = "scipy-1.9.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cff3a5295234037e39500d35316a4c5794739433528310e117b8a9a0c76d20fc"}, 1077 | {file = "scipy-1.9.3-cp38-cp38-win_amd64.whl", hash = "sha256:2318bef588acc7a574f5bfdff9c172d0b1bf2c8143d9582e05f878e580a3781e"}, 1078 | {file = "scipy-1.9.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d644a64e174c16cb4b2e41dfea6af722053e83d066da7343f333a54dae9bc31c"}, 1079 | {file = "scipy-1.9.3-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:da8245491d73ed0a994ed9c2e380fd058ce2fa8a18da204681f2fe1f57f98f95"}, 1080 | {file = "scipy-1.9.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4db5b30849606a95dcf519763dd3ab6fe9bd91df49eba517359e450a7d80ce2e"}, 1081 | {file = "scipy-1.9.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c68db6b290cbd4049012990d7fe71a2abd9ffbe82c0056ebe0f01df8be5436b0"}, 1082 | {file = "scipy-1.9.3-cp39-cp39-win_amd64.whl", hash = "sha256:5b88e6d91ad9d59478fafe92a7c757d00c59e3bdc3331be8ada76a4f8d683f58"}, 1083 | {file = "scipy-1.9.3.tar.gz", hash = "sha256:fbc5c05c85c1a02be77b1ff591087c83bc44579c6d2bd9fb798bb64ea5e1a027"}, 1084 | ] 1085 | 1086 | [package.dependencies] 1087 | numpy = ">=1.18.5,<1.26.0" 1088 | 1089 | [package.extras] 1090 | dev = ["flake8", "mypy", "pycodestyle", "typing_extensions"] 1091 | doc = ["matplotlib (>2)", "numpydoc", "pydata-sphinx-theme (==0.9.0)", "sphinx (!=4.1.0)", "sphinx-panels (>=0.5.2)", "sphinx-tabs"] 1092 | test = ["asv", "gmpy2", "mpmath", "pytest", "pytest-cov", "pytest-xdist", "scikit-umfpack", "threadpoolctl"] 1093 | 1094 | [[package]] 1095 | name = "setuptools" 1096 | version = "67.7.2" 1097 | description = "Easily download, build, install, upgrade, and uninstall Python packages" 1098 | category = "main" 1099 | optional = false 1100 | python-versions = ">=3.7" 1101 | files = [ 1102 | {file = "setuptools-67.7.2-py3-none-any.whl", hash = "sha256:23aaf86b85ca52ceb801d32703f12d77517b2556af839621c641fca11287952b"}, 1103 | {file = "setuptools-67.7.2.tar.gz", hash = "sha256:f104fa03692a2602fa0fec6c6a9e63b6c8a968de13e17c026957dd1f53d80990"}, 1104 | ] 1105 | 1106 | [package.extras] 1107 | docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] 1108 | testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] 1109 | testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] 1110 | 1111 | [[package]] 1112 | name = "six" 1113 | version = "1.16.0" 1114 | description = "Python 2 and 3 compatibility utilities" 1115 | category = "main" 1116 | optional = false 1117 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" 1118 | files = [ 1119 | {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, 1120 | {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, 1121 | ] 1122 | 1123 | [[package]] 1124 | name = "sniffio" 1125 | version = "1.3.0" 1126 | description = "Sniff out which async library your code is running under" 1127 | category = "main" 1128 | optional = false 1129 | python-versions = ">=3.7" 1130 | files = [ 1131 | {file = "sniffio-1.3.0-py3-none-any.whl", hash = "sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384"}, 1132 | {file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"}, 1133 | ] 1134 | 1135 | [[package]] 1136 | name = "sqlite-fts4" 1137 | version = "1.0.3" 1138 | description = "Python functions for working with SQLite FTS4 search" 1139 | category = "dev" 1140 | optional = false 1141 | python-versions = "*" 1142 | files = [ 1143 | {file = "sqlite-fts4-1.0.3.tar.gz", hash = "sha256:78b05eeaf6680e9dbed8986bde011e9c086a06cb0c931b3cf7da94c214e8930c"}, 1144 | {file = "sqlite_fts4-1.0.3-py3-none-any.whl", hash = "sha256:0359edd8dea6fd73c848989e1e2b1f31a50fe5f9d7272299ff0e8dbaa62d035f"}, 1145 | ] 1146 | 1147 | [package.extras] 1148 | test = ["pytest"] 1149 | 1150 | [[package]] 1151 | name = "sqlite-ml" 1152 | version = "0.1.2" 1153 | description = "An SQLite extension for machine learning" 1154 | category = "main" 1155 | optional = false 1156 | python-versions = ">=3.8.1,<4.0.0" 1157 | files = [ 1158 | {file = "sqlite_ml-0.1.2-py3-none-any.whl", hash = "sha256:5c2fb47bd65e55e5917c93aa668742c1422949d3c2c7394e2add5ddf78950370"}, 1159 | {file = "sqlite_ml-0.1.2.tar.gz", hash = "sha256:99bf8011fdd20e481240de3bb73da4ed3adebf3fe90918eef740f0b7b510d5e8"}, 1160 | ] 1161 | 1162 | [package.dependencies] 1163 | pandas = "*" 1164 | scikit-learn = "*" 1165 | 1166 | [[package]] 1167 | name = "sqlite-utils" 1168 | version = "3.36" 1169 | description = "CLI tool and Python library for manipulating SQLite databases" 1170 | category = "dev" 1171 | optional = false 1172 | python-versions = ">=3.7" 1173 | files = [ 1174 | {file = "sqlite-utils-3.36.tar.gz", hash = "sha256:dcc311394fe86dc16f65037b0075e238efcfd2e12e65d53ed196954502996f3c"}, 1175 | {file = "sqlite_utils-3.36-py3-none-any.whl", hash = "sha256:b71e829755c2efbdcd6931a31968dee4e8bd71b3c14f0fe648b22377027c5bec"}, 1176 | ] 1177 | 1178 | [package.dependencies] 1179 | click = "*" 1180 | click-default-group = ">=1.2.3" 1181 | pluggy = "*" 1182 | python-dateutil = "*" 1183 | sqlite-fts4 = "*" 1184 | tabulate = "*" 1185 | 1186 | [package.extras] 1187 | docs = ["beanbag-docutils (>=2.0)", "codespell", "furo", "pygments-csv-lexer", "sphinx-autobuild", "sphinx-copybutton"] 1188 | flake8 = ["flake8"] 1189 | mypy = ["data-science-types", "mypy", "types-click", "types-pluggy", "types-python-dateutil", "types-tabulate"] 1190 | test = ["black", "cogapp", "hypothesis", "pytest"] 1191 | tui = ["trogon"] 1192 | 1193 | [[package]] 1194 | name = "tabulate" 1195 | version = "0.9.0" 1196 | description = "Pretty-print tabular data" 1197 | category = "dev" 1198 | optional = false 1199 | python-versions = ">=3.7" 1200 | files = [ 1201 | {file = "tabulate-0.9.0-py3-none-any.whl", hash = "sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f"}, 1202 | {file = "tabulate-0.9.0.tar.gz", hash = "sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c"}, 1203 | ] 1204 | 1205 | [package.extras] 1206 | widechars = ["wcwidth"] 1207 | 1208 | [[package]] 1209 | name = "threadpoolctl" 1210 | version = "3.1.0" 1211 | description = "threadpoolctl" 1212 | category = "main" 1213 | optional = false 1214 | python-versions = ">=3.6" 1215 | files = [ 1216 | {file = "threadpoolctl-3.1.0-py3-none-any.whl", hash = "sha256:8b99adda265feb6773280df41eece7b2e6561b772d21ffd52e372f999024907b"}, 1217 | {file = "threadpoolctl-3.1.0.tar.gz", hash = "sha256:a335baacfaa4400ae1f0d8e3a58d6674d2f8828e3716bb2802c44955ad391380"}, 1218 | ] 1219 | 1220 | [[package]] 1221 | name = "tomli" 1222 | version = "2.0.1" 1223 | description = "A lil' TOML parser" 1224 | category = "dev" 1225 | optional = false 1226 | python-versions = ">=3.7" 1227 | files = [ 1228 | {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, 1229 | {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, 1230 | ] 1231 | 1232 | [[package]] 1233 | name = "typing-extensions" 1234 | version = "4.5.0" 1235 | description = "Backported and Experimental Type Hints for Python 3.7+" 1236 | category = "main" 1237 | optional = false 1238 | python-versions = ">=3.7" 1239 | files = [ 1240 | {file = "typing_extensions-4.5.0-py3-none-any.whl", hash = "sha256:fb33085c39dd998ac16d1431ebc293a8b3eedd00fd4a32de0ff79002c19511b4"}, 1241 | {file = "typing_extensions-4.5.0.tar.gz", hash = "sha256:5cb5f4a79139d699607b3ef622a1dedafa84e115ab0024e0d9c044a9479ca7cb"}, 1242 | ] 1243 | 1244 | [[package]] 1245 | name = "tzdata" 1246 | version = "2023.3" 1247 | description = "Provider of IANA time zone data" 1248 | category = "main" 1249 | optional = false 1250 | python-versions = ">=2" 1251 | files = [ 1252 | {file = "tzdata-2023.3-py2.py3-none-any.whl", hash = "sha256:7e65763eef3120314099b6939b5546db7adce1e7d6f2e179e3df563c70511eda"}, 1253 | {file = "tzdata-2023.3.tar.gz", hash = "sha256:11ef1e08e54acb0d4f95bdb1be05da659673de4acbd21bf9c69e94cc5e907a3a"}, 1254 | ] 1255 | 1256 | [[package]] 1257 | name = "uvicorn" 1258 | version = "0.22.0" 1259 | description = "The lightning-fast ASGI server." 1260 | category = "main" 1261 | optional = false 1262 | python-versions = ">=3.7" 1263 | files = [ 1264 | {file = "uvicorn-0.22.0-py3-none-any.whl", hash = "sha256:e9434d3bbf05f310e762147f769c9f21235ee118ba2d2bf1155a7196448bd996"}, 1265 | {file = "uvicorn-0.22.0.tar.gz", hash = "sha256:79277ae03db57ce7d9aa0567830bbb51d7a612f54d6e1e3e92da3ef24c2c8ed8"}, 1266 | ] 1267 | 1268 | [package.dependencies] 1269 | click = ">=7.0" 1270 | h11 = ">=0.8" 1271 | 1272 | [package.extras] 1273 | standard = ["colorama (>=0.4)", "httptools (>=0.5.0)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1)", "watchfiles (>=0.13)", "websockets (>=10.4)"] 1274 | 1275 | [metadata] 1276 | lock-version = "2.0" 1277 | python-versions = "^3.8.1" 1278 | content-hash = "043f9d12efe57f08dabcd0b0284e04d9b2f89875ce28851199a76a447407dbc2" 1279 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | name = "datasette-ml" 3 | version = "0.1.2" 4 | description = "A Datasette plugin providing an MLOps platform to train, eval and predict machine learning models" 5 | repository = "https://github.com/rclement/datasette-ml" 6 | authors = ["Romain Clement"] 7 | license = "Apache License, Version 2.0" 8 | readme = "README.md" 9 | packages = [{include = "datasette_ml"}] 10 | 11 | [tool.poetry.dependencies] 12 | python = "^3.8.1" 13 | datasette = "*" 14 | sqlite-ml = "==0.1.2" 15 | 16 | [tool.poetry.group.dev.dependencies] 17 | black = "==23.12.1" 18 | faker = "==19.13.0" 19 | flake8 = "==6.1.0" 20 | mypy = "==1.8.0" 21 | pytest = "==7.4.3" 22 | pytest-asyncio = "==0.23.2" 23 | pytest-cov = "==4.1.0" 24 | sqlite-utils = "==3.36" 25 | 26 | [tool.poetry.plugins."datasette"] 27 | "ml" = "datasette_ml" 28 | 29 | [tool.mypy] 30 | show_error_codes = "True" 31 | pretty = "True" 32 | follow_imports = "silent" 33 | strict_optional = "True" 34 | warn_redundant_casts = "True" 35 | warn_unused_ignores = "True" 36 | disallow_any_generics = "True" 37 | check_untyped_defs = "True" 38 | no_implicit_reexport = "True" 39 | disallow_untyped_defs = "True" 40 | 41 | [[tool.mypy.overrides]] 42 | module = "datasette.*" 43 | ignore_missing_imports = "True" 44 | 45 | [[tool.mypy.overrides]] 46 | module = "pandas.*" 47 | ignore_missing_imports = "True" 48 | 49 | [[tool.mypy.overrides]] 50 | module = "sklearn.*" 51 | ignore_missing_imports = "True" 52 | 53 | [build-system] 54 | requires = ["poetry-core"] 55 | build-backend = "poetry.core.masonry.api" 56 | -------------------------------------------------------------------------------- /renovate.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://docs.renovatebot.com/renovate-schema.json", 3 | "extends": [ 4 | "config:base", 5 | ":preserveSemverRanges", 6 | ":disableDependencyDashboard" 7 | ], 8 | "rangeStrategy": "bump", 9 | "labels": ["dependencies"], 10 | "assignees": ["rclement"], 11 | "reviewers": ["rclement"] 12 | } 13 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rclement/datasette-ml/6f0077e3c877e58736d5574def6ea9f3a4a56ca2/tests/__init__.py -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | import typing as t 2 | import numpy as np 3 | import pandas as pd 4 | import pytest 5 | import sqlite_utils 6 | 7 | from pathlib import Path 8 | from datasette.app import Datasette 9 | from sklearn.datasets import make_classification, make_regression 10 | 11 | 12 | @pytest.fixture(scope="session") 13 | def regression_dataset() -> pd.DataFrame: 14 | X, y = make_regression(n_samples=1000, n_features=3, random_state=0) 15 | return pd.DataFrame( 16 | np.concatenate([X, np.vstack(y)], axis=1), 17 | columns=[f"feature{i + 1}" for i in range(X.shape[1])] + ["target"], 18 | ) 19 | 20 | 21 | @pytest.fixture(scope="session") 22 | def classification_dataset() -> pd.DataFrame: 23 | X, y = make_classification(n_samples=1000, random_state=0) 24 | return pd.DataFrame( 25 | np.concatenate([X, np.vstack(y)], axis=1), 26 | columns=[f"feature{i + 1}" for i in range(X.shape[1])] + ["target"], 27 | ) 28 | 29 | 30 | @pytest.fixture(scope="function") 31 | def sqml_db( 32 | tmp_path_factory: pytest.TempPathFactory, 33 | regression_dataset: pd.DataFrame, 34 | classification_dataset: pd.DataFrame, 35 | ) -> Path: 36 | db_directory = tmp_path_factory.mktemp("dbs") 37 | db_path = db_directory / "sqml.db" 38 | db = sqlite_utils.Database(db_path) 39 | 40 | data_regression = sqlite_utils.db.Table(db, "data_regression") 41 | data_regression.insert_all(regression_dataset.to_dict("records")) 42 | 43 | data_classification = sqlite_utils.db.Table(db, "data_classification") 44 | data_classification.insert_all(classification_dataset.to_dict("records")) 45 | return db_path 46 | 47 | 48 | @pytest.fixture(scope="session") 49 | def datasette_metadata() -> t.Mapping[str, t.Any]: 50 | return {"plugins": {"datasette-ml": {"db": "sqml"}}} 51 | 52 | 53 | @pytest.fixture(scope="function") 54 | def datasette(sqml_db: Path, datasette_metadata: t.Mapping[str, t.Any]) -> Datasette: 55 | return Datasette([str(sqml_db)], metadata=datasette_metadata) 56 | -------------------------------------------------------------------------------- /tests/test_plugin.py: -------------------------------------------------------------------------------- 1 | import copy 2 | import json 3 | import sys 4 | import typing as t 5 | import sklearn 6 | import pytest 7 | 8 | from urllib.parse import urlencode 9 | from pathlib import Path 10 | from datasette.app import Datasette 11 | from datasette.database import Database 12 | from faker import Faker 13 | 14 | 15 | @pytest.mark.asyncio 16 | async def test_plugin_is_installed(datasette: Datasette) -> None: 17 | response = await datasette.client.get("/-/plugins.json") 18 | assert response.status_code == 200 19 | 20 | installed_plugins = {p["name"] for p in response.json()} 21 | assert "datasette-ml" in installed_plugins 22 | 23 | 24 | # ------------------------------------------------------------------------------ 25 | 26 | 27 | @pytest.mark.asyncio 28 | @pytest.mark.parametrize( 29 | "sql_table", 30 | [ 31 | "sqml_experiments", 32 | "sqml_runs", 33 | "sqml_models", 34 | "sqml_metrics", 35 | "sqml_deployments", 36 | ], 37 | ) 38 | async def test_plugin_created_sql_table(datasette: Datasette, sql_table: str) -> None: 39 | response = await datasette.client.get("/sqml.json") 40 | assert response.status_code == 200 41 | 42 | available_sql_tables = {f["name"] for f in response.json()["tables"]} 43 | assert sql_table in available_sql_tables 44 | 45 | 46 | @pytest.mark.asyncio 47 | @pytest.mark.parametrize( 48 | "sql_view", 49 | [ 50 | "sqml_runs_overview", 51 | ], 52 | ) 53 | async def test_plugin_created_sql_view(datasette: Datasette, sql_view: str) -> None: 54 | response = await datasette.client.get("/sqml.json") 55 | assert response.status_code == 200 56 | 57 | available_sql_views = {f["name"] for f in response.json()["views"]} 58 | assert sql_view in available_sql_views 59 | 60 | 61 | @pytest.mark.asyncio 62 | @pytest.mark.parametrize( 63 | "sql_function", 64 | [ 65 | "sqml_python_version", 66 | "sqml_sklearn_version", 67 | "sqml_load_dataset", 68 | "sqml_train", 69 | "sqml_predict", 70 | ], 71 | ) 72 | async def test_plugin_registered_sql_function( 73 | datasette: Datasette, sql_function: str 74 | ) -> None: 75 | response = await datasette.client.get( 76 | "/sqml.json?sql=select * from pragma_function_list()&_shape=array" 77 | ) 78 | assert response.status_code == 200 79 | 80 | available_sql_functions = {f["name"] for f in response.json()} 81 | assert sql_function in available_sql_functions 82 | 83 | 84 | @pytest.mark.asyncio 85 | async def test_plugin_default_database( 86 | sqml_db: Path, datasette_metadata: t.Mapping[str, t.Any] 87 | ) -> None: 88 | metadata = copy.deepcopy(datasette_metadata) 89 | metadata["plugins"].pop("datasette-ml") 90 | datasette = Datasette([str(sqml_db)], metadata=metadata) 91 | 92 | response = await datasette.client.get("/sqml.json") 93 | assert response.status_code == 200 94 | 95 | 96 | @pytest.mark.asyncio 97 | async def test_plugin_unknown_database( 98 | sqml_db: Path, datasette_metadata: t.Mapping[str, t.Any] 99 | ) -> None: 100 | metadata = copy.deepcopy(datasette_metadata) 101 | metadata["plugins"]["datasette-ml"]["db"] = "unknown" 102 | datasette = Datasette([str(sqml_db)], metadata=metadata) 103 | 104 | with pytest.raises(KeyError): 105 | await datasette.invoke_startup() 106 | 107 | 108 | # ------------------------------------------------------------------------------ 109 | 110 | 111 | @pytest.mark.asyncio 112 | async def test_sqml_python_version(datasette: Datasette) -> None: 113 | query = """ 114 | SELECT sqml_python_version() AS version; 115 | """ 116 | qs = urlencode({"sql": query, "_shape": "array"}) 117 | response = await datasette.client.get(f"/sqml.json?{qs}") 118 | assert response.status_code == 200 119 | 120 | rows = response.json() 121 | assert len(rows) == 1 122 | assert rows[0]["version"] == sys.version 123 | 124 | 125 | @pytest.mark.asyncio 126 | async def test_sqml_sklearn_version(datasette: Datasette) -> None: 127 | query = """ 128 | SELECT sqml_sklearn_version() AS version; 129 | """ 130 | qs = urlencode({"sql": query, "_shape": "array"}) 131 | response = await datasette.client.get(f"/sqml.json?{qs}") 132 | assert response.status_code == 200 133 | 134 | rows = response.json() 135 | assert len(rows) == 1 136 | assert rows[0]["version"] == sklearn.__version__ 137 | 138 | 139 | # ------------------------------------------------------------------------------ 140 | 141 | 142 | @pytest.mark.asyncio 143 | @pytest.mark.parametrize( 144 | "dataset", 145 | ["iris", "digits", "wine", "breast_cancer", "diabetes"], 146 | ) 147 | async def test_sqml_load_dataset(datasette: Datasette, dataset: str) -> None: 148 | query = f""" 149 | SELECT sqml_load_dataset('{dataset}') AS info; 150 | """ 151 | qs = urlencode({"sql": query, "_shape": "array"}) 152 | response = await datasette.client.get(f"/sqml.json?{qs}") 153 | assert response.status_code == 200 154 | 155 | rows = response.json() 156 | assert len(rows) == 1 157 | 158 | info = json.loads(rows[0]["info"]) 159 | db: Database = datasette.get_database("sqml") 160 | columns = await db.table_columns(info["table"]) 161 | count_res = await db.execute( 162 | f'select count(*) from [{info["table"]}]', 163 | ) 164 | count = count_res.rows[0][0] 165 | 166 | assert info["table"] == f"dataset_{dataset}" 167 | assert columns == info["feature_names"] + ["target"] 168 | assert count > 0 and count == info["size"] 169 | 170 | 171 | @pytest.mark.asyncio 172 | async def test_sqml_load_dataset_unknown(datasette: Datasette) -> None: 173 | query = """ 174 | SELECT sqml_load_dataset('unknown') AS info; 175 | """ 176 | qs = urlencode({"sql": query, "_shape": "array"}) 177 | response = await datasette.client.get(f"/sqml.json?{qs}") 178 | assert response.status_code == 200 179 | 180 | rows = response.json() 181 | assert len(rows) == 1 182 | 183 | info = json.loads(rows[0]["info"]) 184 | assert "error" in info.keys() 185 | 186 | 187 | # ------------------------------------------------------------------------------ 188 | 189 | 190 | @pytest.mark.asyncio 191 | @pytest.mark.parametrize( 192 | ("prediction_type", "algorithm"), 193 | [ 194 | ("regression", "dummy"), 195 | ("regression", "linear_regression"), 196 | ("regression", "sgd"), 197 | ("regression", "ridge"), 198 | ("regression", "ridge_cv"), 199 | ("regression", "elastic_net"), 200 | ("regression", "elastic_net_cv"), 201 | ("regression", "lasso"), 202 | ("regression", "lasso_cv"), 203 | ("regression", "decision_tree"), 204 | ("regression", "ada_boost"), 205 | ("regression", "bagging"), 206 | ("regression", "gradient_boosting"), 207 | ("regression", "random_forest"), 208 | ("regression", "knn"), 209 | ("regression", "mlp"), 210 | ("regression", "svr"), 211 | ("classification", "dummy"), 212 | ("classification", "logistic_regression"), 213 | ("classification", "sgd"), 214 | ("classification", "ridge"), 215 | ("classification", "ridge_cv"), 216 | ("classification", "decision_tree"), 217 | ("classification", "ada_boost"), 218 | ("classification", "bagging"), 219 | ("classification", "gradient_boosting"), 220 | ("classification", "random_forest"), 221 | ("classification", "knn"), 222 | ("classification", "mlp"), 223 | ("classification", "svc"), 224 | ], 225 | ) 226 | async def test_sqml_train( 227 | datasette: Datasette, faker: Faker, prediction_type: str, algorithm: str 228 | ) -> None: 229 | experiment_name = faker.bs() 230 | dataset = f"data_{prediction_type}" 231 | target = "target" 232 | query = f""" 233 | SELECT sqml_train( 234 | '{experiment_name}', 235 | '{prediction_type}', 236 | '{algorithm}', 237 | '{dataset}', 238 | '{target}' 239 | ) AS training; 240 | """ 241 | qs = urlencode({"sql": query, "_shape": "array"}) 242 | response = await datasette.client.get(f"/sqml.json?{qs}") 243 | assert response.status_code == 200 244 | 245 | rows = response.json() 246 | assert len(rows) == 1 247 | 248 | training = json.loads(rows[0]["training"]) 249 | db: Database = datasette.get_database("sqml") 250 | 251 | assert training["experiment_name"] == experiment_name 252 | assert training["prediction_type"] == prediction_type 253 | assert training["algorithm"] == algorithm 254 | assert training["deployed"] 255 | assert isinstance(training["score"], float) 256 | 257 | experiment = ( 258 | await db.execute( 259 | """ 260 | SELECT * 261 | FROM sqml_experiments 262 | WHERE id = 1 263 | """, 264 | ) 265 | ).rows[0] 266 | assert experiment["name"] == experiment_name 267 | assert experiment["prediction_type"] == prediction_type 268 | 269 | run = ( 270 | await db.execute( 271 | """ 272 | SELECT * 273 | FROM sqml_runs 274 | WHERE id = 1 275 | """, 276 | ) 277 | ).rows[0] 278 | assert run["status"] == "success" 279 | assert run["algorithm"] == algorithm 280 | assert run["dataset"] == dataset 281 | assert run["target"] == target 282 | assert run["test_size"] == 0.25 283 | assert run["split_strategy"] == "shuffle" 284 | assert run["experiment_id"] == 1 285 | 286 | model = ( 287 | await db.execute( 288 | """ 289 | SELECT * 290 | FROM sqml_models 291 | WHERE id = 1 292 | """, 293 | ) 294 | ).rows[0] 295 | assert model["run_id"] == 1 296 | assert model["library"] == "scikit-learn" 297 | assert isinstance(model["data"], bytes) and len(model["data"]) > 0 298 | 299 | metrics = { 300 | m["name"]: m["value"] 301 | for m in ( 302 | await db.execute( 303 | """ 304 | SELECT * 305 | FROM sqml_metrics 306 | WHERE model_id = 1 307 | """, 308 | ) 309 | ).rows 310 | } 311 | 312 | assert isinstance(metrics["score"], float) 313 | if prediction_type == "regression": 314 | assert len(metrics.keys()) == 4 315 | assert isinstance(metrics["r2"], float) 316 | assert isinstance(metrics["mae"], float) 317 | assert isinstance(metrics["rmse"], float) 318 | assert metrics["score"] == metrics["r2"] 319 | else: 320 | assert len(metrics.keys()) == 5 321 | assert isinstance(metrics["accuracy"], float) 322 | assert isinstance(metrics["f1"], float) 323 | assert isinstance(metrics["precision"], float) 324 | assert isinstance(metrics["recall"], float) 325 | assert metrics["score"] == metrics["accuracy"] 326 | 327 | deployment = ( 328 | await db.execute( 329 | """ 330 | SELECT * 331 | FROM sqml_deployments 332 | WHERE id = 1 333 | """, 334 | ) 335 | ).rows[0] 336 | assert deployment["experiment_id"] == 1 337 | assert deployment["model_id"] == 1 338 | assert deployment["active"] 339 | 340 | 341 | @pytest.mark.asyncio 342 | async def test_sqml_train_better_model(datasette: Datasette, faker: Faker) -> None: 343 | db: Database = datasette.get_database("sqml") 344 | 345 | experiment_name = faker.bs() 346 | prediction_type = "regression" 347 | algorithm = "linear_regression" 348 | dataset = f"data_{prediction_type}" 349 | target = "target" 350 | query = f""" 351 | SELECT sqml_train( 352 | '{experiment_name}', 353 | '{prediction_type}', 354 | '{algorithm}', 355 | '{dataset}', 356 | '{target}' 357 | ) AS training; 358 | """ 359 | qs = urlencode({"sql": query, "_shape": "array"}) 360 | response = await datasette.client.get(f"/sqml.json?{qs}") 361 | assert response.status_code == 200 362 | 363 | await db.execute_write( 364 | """ 365 | UPDATE sqml_metrics 366 | SET value = 0.5 367 | WHERE id = 1 AND name = 'score' 368 | """ 369 | ) 370 | 371 | qs = urlencode({"sql": query, "_shape": "array"}) 372 | response = await datasette.client.get(f"/sqml.json?{qs}") 373 | assert response.status_code == 200 374 | 375 | rows = response.json() 376 | assert len(rows) == 1 377 | 378 | training = json.loads(rows[0]["training"]) 379 | assert training["deployed"] 380 | 381 | runs = (await db.execute("SELECT * FROM sqml_runs ORDER BY id")).rows 382 | assert len(runs) == 2 383 | 384 | deployments = (await db.execute("SELECT * FROM sqml_deployments ORDER BY id")).rows 385 | assert len(deployments) == 2 386 | assert not deployments[0]["active"] 387 | assert deployments[0]["model_id"] == 1 388 | assert deployments[1]["active"] 389 | assert deployments[1]["model_id"] == 2 390 | 391 | 392 | @pytest.mark.asyncio 393 | async def test_sqml_train_worse_model(datasette: Datasette, faker: Faker) -> None: 394 | db: Database = datasette.get_database("sqml") 395 | 396 | experiment_name = faker.bs() 397 | prediction_type = "regression" 398 | algorithm = "linear_regression" 399 | dataset = f"data_{prediction_type}" 400 | target = "target" 401 | query = f""" 402 | SELECT sqml_train( 403 | '{experiment_name}', 404 | '{prediction_type}', 405 | '{algorithm}', 406 | '{dataset}', 407 | '{target}' 408 | ) AS training; 409 | """ 410 | qs = urlencode({"sql": query, "_shape": "array"}) 411 | response = await datasette.client.get(f"/sqml.json?{qs}") 412 | assert response.status_code == 200 413 | 414 | await db.execute_write( 415 | """ 416 | UPDATE sqml_metrics 417 | SET value = 1.0 418 | WHERE id = 1 AND name = 'score' 419 | """ 420 | ) 421 | 422 | qs = urlencode({"sql": query, "_shape": "array"}) 423 | response = await datasette.client.get(f"/sqml.json?{qs}") 424 | assert response.status_code == 200 425 | 426 | rows = response.json() 427 | assert len(rows) == 1 428 | 429 | training = json.loads(rows[0]["training"]) 430 | assert not training["deployed"] 431 | 432 | runs = (await db.execute("SELECT * FROM sqml_runs ORDER BY id")).rows 433 | assert len(runs) == 2 434 | 435 | deployments = (await db.execute("SELECT * FROM sqml_deployments ORDER BY id")).rows 436 | assert len(deployments) == 1 437 | assert deployments[0]["active"] 438 | assert deployments[0]["model_id"] == 1 439 | 440 | 441 | @pytest.mark.asyncio 442 | async def test_sqml_train_existing_experiment( 443 | datasette: Datasette, faker: Faker 444 | ) -> None: 445 | experiment_name = faker.bs() 446 | prediction_type = "regression" 447 | algorithm = "linear_regression" 448 | dataset = f"data_{prediction_type}" 449 | target = "target" 450 | query = f""" 451 | SELECT sqml_train( 452 | '{experiment_name}', 453 | '{prediction_type}', 454 | '{algorithm}', 455 | '{dataset}', 456 | '{target}' 457 | ) AS training; 458 | """ 459 | qs = urlencode({"sql": query, "_shape": "array"}) 460 | response = await datasette.client.get(f"/sqml.json?{qs}") 461 | assert response.status_code == 200 462 | qs = urlencode({"sql": query, "_shape": "array"}) 463 | response = await datasette.client.get(f"/sqml.json?{qs}") 464 | assert response.status_code == 200 465 | 466 | db: Database = datasette.get_database("sqml") 467 | 468 | experiment = ( 469 | await db.execute( 470 | """ 471 | SELECT count(*) AS count 472 | FROM sqml_experiments 473 | """, 474 | ) 475 | ).rows[0] 476 | assert experiment["count"] == 1 477 | 478 | run = ( 479 | await db.execute( 480 | """ 481 | SELECT count(*) AS count 482 | FROM sqml_runs 483 | """, 484 | ) 485 | ).rows[0] 486 | assert run["count"] == 2 487 | 488 | 489 | @pytest.mark.asyncio 490 | async def test_sqml_train_existing_experiment_wrong_prediction_type( 491 | datasette: Datasette, faker: Faker 492 | ) -> None: 493 | experiment_name = faker.bs() 494 | prediction_type = "regression" 495 | algorithm = "linear_regression" 496 | dataset = f"data_{prediction_type}" 497 | target = "target" 498 | query = f""" 499 | SELECT sqml_train( 500 | '{experiment_name}', 501 | '{prediction_type}', 502 | '{algorithm}', 503 | '{dataset}', 504 | '{target}' 505 | ) AS training; 506 | """ 507 | qs = urlencode({"sql": query, "_shape": "array"}) 508 | response = await datasette.client.get(f"/sqml.json?{qs}") 509 | assert response.status_code == 200 510 | 511 | prediction_type = "classification" 512 | algorithm = "logistic_regression" 513 | query = f""" 514 | SELECT sqml_train( 515 | '{experiment_name}', 516 | '{prediction_type}', 517 | '{algorithm}', 518 | '{dataset}', 519 | '{target}' 520 | ) AS training; 521 | """ 522 | qs = urlencode({"sql": query, "_shape": "array"}) 523 | response = await datasette.client.get(f"/sqml.json?{qs}") 524 | assert response.status_code == 200 525 | 526 | rows = response.json() 527 | assert len(rows) == 1 528 | 529 | training = json.loads(rows[0]["training"]) 530 | assert "error" in training.keys() 531 | 532 | 533 | @pytest.mark.asyncio 534 | async def test_sqml_train_wrong_prediction_type_algorithm( 535 | datasette: Datasette, faker: Faker 536 | ) -> None: 537 | experiment_name = faker.bs() 538 | prediction_type = "regression" 539 | algorithm = "logistic_regression" 540 | dataset = f"data_{prediction_type}" 541 | target = "target" 542 | query = f""" 543 | SELECT sqml_train( 544 | '{experiment_name}', 545 | '{prediction_type}', 546 | '{algorithm}', 547 | '{dataset}', 548 | '{target}' 549 | ) AS training; 550 | """ 551 | qs = urlencode({"sql": query, "_shape": "array"}) 552 | response = await datasette.client.get(f"/sqml.json?{qs}") 553 | assert response.status_code == 200 554 | 555 | rows = response.json() 556 | assert len(rows) == 1 557 | 558 | training = json.loads(rows[0]["training"]) 559 | assert "error" in training.keys() 560 | 561 | 562 | @pytest.mark.asyncio 563 | async def test_sqml_train_unknown_prediction_type( 564 | datasette: Datasette, faker: Faker 565 | ) -> None: 566 | experiment_name = faker.bs() 567 | prediction_type = "unknown" 568 | algorithm = "linear_regression" 569 | dataset = f"data_{prediction_type}" 570 | target = "target" 571 | query = f""" 572 | SELECT sqml_train( 573 | '{experiment_name}', 574 | '{prediction_type}', 575 | '{algorithm}', 576 | '{dataset}', 577 | '{target}' 578 | ) AS training; 579 | """ 580 | qs = urlencode({"sql": query, "_shape": "array"}) 581 | response = await datasette.client.get(f"/sqml.json?{qs}") 582 | assert response.status_code == 200 583 | 584 | rows = response.json() 585 | assert len(rows) == 1 586 | 587 | training = json.loads(rows[0]["training"]) 588 | assert "error" in training.keys() 589 | 590 | 591 | @pytest.mark.asyncio 592 | async def test_sqml_train_unknown_algorithm(datasette: Datasette, faker: Faker) -> None: 593 | experiment_name = faker.bs() 594 | prediction_type = "regression" 595 | algorithm = "unknown" 596 | dataset = f"data_{prediction_type}" 597 | target = "target" 598 | query = f""" 599 | SELECT sqml_train( 600 | '{experiment_name}', 601 | '{prediction_type}', 602 | '{algorithm}', 603 | '{dataset}', 604 | '{target}' 605 | ) AS training; 606 | """ 607 | qs = urlencode({"sql": query, "_shape": "array"}) 608 | response = await datasette.client.get(f"/sqml.json?{qs}") 609 | assert response.status_code == 200 610 | 611 | rows = response.json() 612 | assert len(rows) == 1 613 | 614 | training = json.loads(rows[0]["training"]) 615 | assert "error" in training.keys() 616 | 617 | 618 | @pytest.mark.asyncio 619 | async def test_sqml_train_unknown_dataset(datasette: Datasette, faker: Faker) -> None: 620 | experiment_name = faker.bs() 621 | prediction_type = "regression" 622 | algorithm = "linear_regression" 623 | dataset = "unknown" 624 | target = "target" 625 | query = f""" 626 | SELECT sqml_train( 627 | '{experiment_name}', 628 | '{prediction_type}', 629 | '{algorithm}', 630 | '{dataset}', 631 | '{target}' 632 | ) AS training; 633 | """ 634 | qs = urlencode({"sql": query, "_shape": "array"}) 635 | response = await datasette.client.get(f"/sqml.json?{qs}") 636 | assert response.status_code == 200 637 | 638 | rows = response.json() 639 | assert len(rows) == 1 640 | 641 | training = json.loads(rows[0]["training"]) 642 | assert "error" in training.keys() 643 | 644 | 645 | @pytest.mark.asyncio 646 | async def test_sqml_train_unknown_target(datasette: Datasette, faker: Faker) -> None: 647 | experiment_name = faker.bs() 648 | prediction_type = "regression" 649 | algorithm = "linear_regression" 650 | dataset = f"data_{prediction_type}" 651 | target = "unknown" 652 | query = f""" 653 | SELECT sqml_train( 654 | '{experiment_name}', 655 | '{prediction_type}', 656 | '{algorithm}', 657 | '{dataset}', 658 | '{target}' 659 | ) AS training; 660 | """ 661 | qs = urlencode({"sql": query, "_shape": "array"}) 662 | response = await datasette.client.get(f"/sqml.json?{qs}") 663 | assert response.status_code == 200 664 | 665 | rows = response.json() 666 | assert len(rows) == 1 667 | 668 | training = json.loads(rows[0]["training"]) 669 | assert "error" in training.keys() 670 | 671 | 672 | @pytest.mark.asyncio 673 | async def test_sqml_train_unknown_split_strategy( 674 | datasette: Datasette, faker: Faker 675 | ) -> None: 676 | experiment_name = faker.bs() 677 | prediction_type = "regression" 678 | algorithm = "linear_regression" 679 | dataset = f"data_{prediction_type}" 680 | target = "target" 681 | test_size = 0.25 682 | split_strategy = "unknown" 683 | query = f""" 684 | SELECT sqml_train( 685 | '{experiment_name}', 686 | '{prediction_type}', 687 | '{algorithm}', 688 | '{dataset}', 689 | '{target}', 690 | {test_size}, 691 | '{split_strategy}' 692 | ) AS training; 693 | """ 694 | qs = urlencode({"sql": query, "_shape": "array"}) 695 | response = await datasette.client.get(f"/sqml.json?{qs}") 696 | assert response.status_code == 200 697 | 698 | rows = response.json() 699 | assert len(rows) == 1 700 | 701 | training = json.loads(rows[0]["training"]) 702 | assert "error" in training.keys() 703 | 704 | 705 | @pytest.mark.asyncio 706 | @pytest.mark.parametrize( 707 | "test_size", 708 | [-0.25, 1.1], 709 | ) 710 | async def test_sqml_train_out_of_range_test_size( 711 | datasette: Datasette, faker: Faker, test_size: float 712 | ) -> None: 713 | experiment_name = faker.bs() 714 | prediction_type = "regression" 715 | algorithm = "linear_regression" 716 | dataset = f"data_{prediction_type}" 717 | target = "target" 718 | split_strategy = "shuffle" 719 | query = f""" 720 | SELECT sqml_train( 721 | '{experiment_name}', 722 | '{prediction_type}', 723 | '{algorithm}', 724 | '{dataset}', 725 | '{target}', 726 | {test_size}, 727 | '{split_strategy}' 728 | ) AS training; 729 | """ 730 | qs = urlencode({"sql": query, "_shape": "array"}) 731 | response = await datasette.client.get(f"/sqml.json?{qs}") 732 | assert response.status_code == 200 733 | 734 | rows = response.json() 735 | assert len(rows) == 1 736 | 737 | training = json.loads(rows[0]["training"]) 738 | assert "error" in training.keys() 739 | 740 | 741 | # ------------------------------------------------------------------------------ 742 | 743 | 744 | @pytest.mark.asyncio 745 | @pytest.mark.parametrize( 746 | ("prediction_type", "algorithm"), 747 | [ 748 | ("regression", "linear_regression"), 749 | ("classification", "logistic_regression"), 750 | ], 751 | ) 752 | async def test_sqml_predict( 753 | datasette: Datasette, faker: Faker, prediction_type: str, algorithm: str 754 | ) -> None: 755 | experiment_name = faker.bs() 756 | dataset = f"data_{prediction_type}" 757 | target = "target" 758 | query = f""" 759 | SELECT sqml_train( 760 | '{experiment_name}', 761 | '{prediction_type}', 762 | '{algorithm}', 763 | '{dataset}', 764 | '{target}' 765 | ) AS training; 766 | """ 767 | qs = urlencode({"sql": query, "_shape": "array"}) 768 | response = await datasette.client.get(f"/sqml.json?{qs}") 769 | assert response.status_code == 200 770 | 771 | db: Database = datasette.get_database("sqml") 772 | data_row = ( 773 | await db.execute( 774 | f""" 775 | SELECT * 776 | FROM {dataset} 777 | LIMIT 1 778 | """, 779 | ) 780 | ).rows[0] 781 | 782 | features = json.dumps({k: v for k, v in dict(data_row).items() if k != target}) 783 | query = f""" 784 | SELECT sqml_predict( 785 | '{experiment_name}', 786 | '{features}' 787 | ) AS prediction; 788 | """ 789 | qs = urlencode({"sql": query, "_shape": "array"}) 790 | response = await datasette.client.get(f"/sqml.json?{qs}") 791 | assert response.status_code == 200 792 | 793 | rows = response.json() 794 | assert len(rows) == 1 795 | 796 | prediction = rows[0]["prediction"] 797 | assert isinstance(prediction, float) 798 | 799 | 800 | @pytest.mark.asyncio 801 | async def test_sqml_predict_unknown_experiment( 802 | datasette: Datasette, faker: Faker 803 | ) -> None: 804 | experiment_name = faker.bs() 805 | query = f""" 806 | SELECT sqml_predict( 807 | '{experiment_name}', 808 | '{{}}' 809 | ) AS prediction; 810 | """ 811 | qs = urlencode({"sql": query, "_shape": "array"}) 812 | response = await datasette.client.get(f"/sqml.json?{qs}") 813 | assert response.status_code == 200 814 | 815 | rows = response.json() 816 | assert len(rows) == 1 817 | 818 | prediction = json.loads(rows[0]["prediction"]) 819 | assert "error" in prediction.keys() 820 | 821 | 822 | @pytest.mark.asyncio 823 | async def test_sqml_predict_no_deployment(datasette: Datasette, faker: Faker) -> None: 824 | experiment_name = faker.bs() 825 | 826 | db: Database = datasette.get_database("sqml") 827 | await datasette.invoke_startup() 828 | await db.execute_write( 829 | """ 830 | INSERT INTO sqml_experiments(name, prediction_type) 831 | VALUES (?, ?) 832 | """, 833 | (experiment_name, "classification"), 834 | ) 835 | 836 | query = f""" 837 | SELECT sqml_predict( 838 | '{experiment_name}', 839 | '{{}}' 840 | ) AS prediction; 841 | """ 842 | qs = urlencode({"sql": query, "_shape": "array"}) 843 | response = await datasette.client.get(f"/sqml.json?{qs}") 844 | assert response.status_code == 200 845 | 846 | rows = response.json() 847 | assert len(rows) == 1 848 | 849 | prediction = json.loads(rows[0]["prediction"]) 850 | assert "error" in prediction.keys() 851 | 852 | 853 | # ------------------------------------------------------------------------------ 854 | 855 | 856 | @pytest.mark.asyncio 857 | @pytest.mark.parametrize( 858 | ("prediction_type", "algorithm"), 859 | [ 860 | ("regression", "linear_regression"), 861 | ("classification", "logistic_regression"), 862 | ], 863 | ) 864 | async def test_sqml_predict_batch( 865 | datasette: Datasette, faker: Faker, prediction_type: str, algorithm: str 866 | ) -> None: 867 | experiment_name = faker.bs() 868 | dataset = f"data_{prediction_type}" 869 | target = "target" 870 | query = f""" 871 | SELECT sqml_train( 872 | '{experiment_name}', 873 | '{prediction_type}', 874 | '{algorithm}', 875 | '{dataset}', 876 | '{target}' 877 | ) AS training; 878 | """ 879 | qs = urlencode({"sql": query, "_shape": "array"}) 880 | response = await datasette.client.get(f"/sqml.json?{qs}") 881 | assert response.status_code == 200 882 | 883 | db: Database = datasette.get_database("sqml") 884 | data_rows = (await db.execute(f"SELECT * FROM {dataset}")).rows 885 | count_rows = (await db.execute(f"SELECT count(*) AS count FROM {dataset}")).rows[0][ 886 | "count" 887 | ] 888 | 889 | features = ", ".join( 890 | f"'{k}', [{k}]" for k in dict(data_rows[0]).keys() if k != target 891 | ) 892 | query = f""" 893 | SELECT sqml_predict_batch( 894 | '{experiment_name}', 895 | json_group_array( 896 | json_object({features}) 897 | ) 898 | ) AS predictions 899 | FROM {dataset}; 900 | """ 901 | qs = urlencode({"sql": query, "_shape": "array"}) 902 | response = await datasette.client.get(f"/sqml.json?{qs}") 903 | assert response.status_code == 200 904 | 905 | rows = response.json() 906 | assert len(rows) == 1 907 | 908 | predictions = json.loads(rows[0]["predictions"]) 909 | assert len(predictions) == count_rows 910 | for pred in predictions: 911 | assert isinstance(pred, float) 912 | 913 | 914 | @pytest.mark.asyncio 915 | async def test_sqml_predict_batch_unknown_experiment( 916 | datasette: Datasette, faker: Faker 917 | ) -> None: 918 | experiment_name = faker.bs() 919 | query = f""" 920 | SELECT sqml_predict_batch( 921 | '{experiment_name}', 922 | '[]' 923 | ) AS prediction; 924 | """ 925 | qs = urlencode({"sql": query, "_shape": "array"}) 926 | response = await datasette.client.get(f"/sqml.json?{qs}") 927 | assert response.status_code == 200 928 | 929 | rows = response.json() 930 | assert len(rows) == 1 931 | 932 | prediction = json.loads(rows[0]["prediction"]) 933 | assert "error" in prediction.keys() 934 | 935 | 936 | @pytest.mark.asyncio 937 | async def test_sqml_predict_batch_no_deployment( 938 | datasette: Datasette, faker: Faker 939 | ) -> None: 940 | experiment_name = faker.bs() 941 | 942 | db: Database = datasette.get_database("sqml") 943 | await datasette.invoke_startup() 944 | await db.execute_write( 945 | """ 946 | INSERT INTO sqml_experiments(name, prediction_type) 947 | VALUES (?, ?) 948 | """, 949 | (experiment_name, "classification"), 950 | ) 951 | 952 | query = f""" 953 | SELECT sqml_predict_batch( 954 | '{experiment_name}', 955 | '[]' 956 | ) AS prediction; 957 | """ 958 | qs = urlencode({"sql": query, "_shape": "array"}) 959 | response = await datasette.client.get(f"/sqml.json?{qs}") 960 | assert response.status_code == 200 961 | 962 | rows = response.json() 963 | assert len(rows) == 1 964 | 965 | prediction = json.loads(rows[0]["prediction"]) 966 | assert "error" in prediction.keys() 967 | --------------------------------------------------------------------------------