├── .gitignore ├── LICENSE ├── Makefile ├── README.md ├── example ├── config_files │ ├── example0.json │ ├── example1.json │ ├── example2.json │ ├── example3.json │ ├── example4.json │ ├── example5.json │ └── example6.json ├── data │ ├── json_test.json │ ├── space filename.csv │ ├── test-filename.csv │ ├── test.csv │ ├── test1.csv │ ├── test_null.csv │ └── test_special_chars.csv ├── json_list_test.json ├── json_test.json ├── ndjson_test.ndjson ├── queries │ ├── join.sql │ ├── json_csv_join.sql │ ├── json_query.sql │ └── multi_query.sql └── test.csv ├── poetry.lock ├── pyproject.toml ├── src └── filequery │ ├── __init__.py │ ├── __main__.py │ ├── __version__.py │ ├── exceptions.py │ ├── file_query_args.py │ ├── filedb.py │ ├── filetype.py │ ├── queryresult.py │ └── tui │ ├── duckui.py │ ├── help_content.py │ ├── screens │ ├── file_browser.py │ ├── menu.py │ └── menu_events.py │ └── styles │ └── style.tcss └── tests ├── __init__.py └── test_filequery.py /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | pip-wheel-metadata/ 24 | share/python-wheels/ 25 | *.egg-info/ 26 | .installed.cfg 27 | *.egg 28 | MANIFEST 29 | 30 | # PyInstaller 31 | # Usually these files are written by a python script from a template 32 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 33 | *.manifest 34 | *.spec 35 | 36 | # Installer logs 37 | pip-log.txt 38 | pip-delete-this-directory.txt 39 | 40 | # Unit test / coverage reports 41 | htmlcov/ 42 | .tox/ 43 | .nox/ 44 | .coverage 45 | .coverage.* 46 | .cache 47 | nosetests.xml 48 | coverage.xml 49 | *.cover 50 | *.py,cover 51 | .hypothesis/ 52 | .pytest_cache/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | target/ 76 | 77 | # Jupyter Notebook 78 | .ipynb_checkpoints 79 | 80 | # IPython 81 | profile_default/ 82 | ipython_config.py 83 | 84 | # pyenv 85 | .python-version 86 | 87 | # pipenv 88 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 89 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 90 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 91 | # install all needed dependencies. 92 | #Pipfile.lock 93 | 94 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 95 | __pypackages__/ 96 | 97 | # Celery stuff 98 | celerybeat-schedule 99 | celerybeat.pid 100 | 101 | # SageMath parsed files 102 | *.sage.py 103 | 104 | # Environments 105 | .env 106 | .venv 107 | env/ 108 | venv/ 109 | dev-env/ 110 | test-env/ 111 | ENV/ 112 | env.bak/ 113 | venv.bak/ 114 | 115 | # Spyder project settings 116 | .spyderproject 117 | .spyproject 118 | 119 | # Rope project settings 120 | .ropeproject 121 | 122 | # mkdocs documentation 123 | /site 124 | 125 | # mypy 126 | .mypy_cache/ 127 | .dmypy.json 128 | dmypy.json 129 | 130 | # Pyre type checker 131 | .pyre/ 132 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2023 Markus Hutnik 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | # for build and deploy, must be in an environment with requirements from requirements-dev.txt installed 2 | 3 | # create a build - outputs to dist directory 4 | build-pkg: 5 | poetry build 6 | 7 | # upload to pypi 8 | deploy: 9 | twine upload dist/* 10 | 11 | test: 12 | python tests/test_filequery.py 13 | 14 | clean: 15 | rm -rf dist/ build/ __pycache__/ src/filequery.egg-info/ 16 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # filequery 2 | [![pypi](https://img.shields.io/pypi/v/filequery.svg)](https://pypi.org/project/filequery/) 3 | [![GitHub license](https://img.shields.io/badge/license-MIT-blue.svg)](https://github.com/MarkyMan4/filequery) 4 | 5 | Query CSV, JSON and Parquet files using SQL. 6 | - runs queries using a DuckDB in-memory database for efficient querying 7 | - any SQL that works with DuckDB will work here 8 | - use the CLI to easily query files in your terminal or automate queries/transformations as part of a script 9 | - use the TUI for a more interactive experience 10 | 11 | ## Demo 12 | 13 | ### CLI 14 | 15 | ![out](https://github.com/MarkyMan4/filequery/assets/37815834/38b6f69b-297f-4913-826e-89ffbfe483b3) 16 | 17 | ### TUI 18 | 19 | ![filequery_tui](https://github.com/MarkyMan4/filequery/assets/37815834/202655ab-359e-4a42-a9eb-49227cf32f22) 20 | 21 | ![filequery_menu](https://github.com/MarkyMan4/filequery/assets/37815834/57a58e3b-f283-43e9-8a9f-68c363d748af) 22 | 23 | ## Installation 24 | 25 | ```bash 26 | pipx install filequery 27 | ``` 28 | 29 | or 30 | 31 | ```bash 32 | pip install filequery 33 | ``` 34 | 35 | ## CLI usage 36 | Run `filequery --help` to see what options are available. 37 | 38 | ``` 39 | usage: filequery [-h] [-f FILENAME] [-d FILESDIR] [-q QUERY] [-Q QUERY_FILE] [-o OUT_FILE [OUT_FILE ...]] [-F OUT_FILE_FORMAT] [-D DELIMITER] [-c CONFIG] [-e] [-v] 40 | 41 | options: 42 | -h, --help show this help message and exit 43 | -f FILENAME, --filename FILENAME 44 | path to a CSV, Parquet or JSON file 45 | -d FILESDIR, --filesdir FILESDIR 46 | path to a directory which can contain a combination of CSV, Parquet and JSON files 47 | -q QUERY, --query QUERY 48 | SQL query to execute against file 49 | -Q QUERY_FILE, --query_file QUERY_FILE 50 | path to file with query to execute 51 | -o OUT_FILE [OUT_FILE ...], --out_file OUT_FILE [OUT_FILE ...] 52 | file to write results to instead of printing to standard output 53 | -F OUT_FILE_FORMAT, --out_file_format OUT_FILE_FORMAT 54 | either csv or parquet, defaults to csv 55 | -D DELIMITER, --delimiter DELIMITER 56 | delimiter to use when printing result or writing to CSV file 57 | -c CONFIG, --config CONFIG 58 | path to JSON config file 59 | -e, --editor run SQL editor UI for exploring data 60 | -v, --version show program's version number and exit 61 | ``` 62 | 63 | For basic usage, provide a path to a CSV or Parquet file and a query to execute against it. The table name will be the 64 | file name without the extension. If the file name does not conform to DuckDB's rules for unquoted identifiers, the 65 | table name will need to be wrapped in double quotes. For example, a file named `my data.csv` would be queried as 66 | `select * from "my data"`. 67 | 68 | ```bash 69 | filequery --filename example/test.csv --query 'select * from test' 70 | ``` 71 | 72 | ## TUI usage 73 | 74 | To use the TUI for querying your files, use the `-e` flag and provide a path to a file or directory. 75 | 76 | ```bash 77 | filequery -e -f path/to/file.csv 78 | ``` 79 | 80 | or 81 | 82 | ```bash 83 | filequery -e -d path/to/file_directory 84 | ``` 85 | 86 | You can also omit a path to a file or directory and open a blank editor. This can be helpful if 87 | you want to directly use DuckDB functions such as `read_csv_auto()` for querying your files. 88 | 89 | ```bash 90 | filequery -e 91 | ``` 92 | 93 | ## Examples 94 | 95 | ```bash 96 | filequery --filename example/json_test.json --query 'select nested.nest_id, nested.nest_val from json_test' # query json 97 | ``` 98 | ```bash 99 | filequery --filesdir example/data --query 'select * from test inner join test1 on test.col1 = test1.col1' # query multiple files in a directory 100 | ``` 101 | ```bash 102 | filequery --filesdir example/data --query_file example/queries/join.sql # point to a file containing SQL 103 | ``` 104 | ```bash 105 | filequery --filesdir example/data --query_file example/queries/json_csv_join.sql # SQL file joining data from JSON and CSV files 106 | ``` 107 | ```bash 108 | filequery --filesdir example/test.csv --query 'select * from test; select sum(col3) from test;' # output multiple query results to multiple files 109 | ``` 110 | 111 | ```bash 112 | filequery --filename example/ndjson_test.ndjson --query 'select id, value, nested.subid, nested.subval from ndjson_test' # query nested JSON in an ndjson file 113 | ``` 114 | 115 | You can also provide a config file instead of specifying the arguments when running the command. 116 | 117 | ```bash 118 | filequery --config 119 | ``` 120 | 121 | The config file should be a json file. See example config file contents below. 122 | 123 | ```json 124 | { 125 | "filename": "../example/test.csv", 126 | "query": "select col1, col2 from test" 127 | } 128 | ``` 129 | 130 | ```json 131 | { 132 | "filesdir": "../example/data", 133 | "query_file": "../example/queries/join.sql", 134 | "out_file": "result.parquet", 135 | "out_file_format": "parquet" 136 | } 137 | ``` 138 | 139 | See the `example` directory in the repo for more examples. 140 | 141 | ## Module usage 142 | You can also use filequery in your own programs. See the example below. 143 | 144 | ```python 145 | from filequery.filedb import FileDb 146 | 147 | query = 'select * from test' 148 | 149 | # read test.csv into a table called "test" 150 | fdb = FileDb('example/test.csv') 151 | 152 | # return QueryResult object 153 | res = fdb.exec_query(query) 154 | 155 | # formats result as csv 156 | print(str(res)) 157 | 158 | # saves query result to result.csv 159 | res.save_to_file('result.csv') 160 | 161 | # saves query result as parquet file 162 | fdb.export_query(query, 'result.parquet', FileType.PARQUET) 163 | ``` 164 | 165 | ## Development 166 | Packages required for distribution should go in `requirements.txt`. 167 | 168 | To build the wheel: 169 | 170 | ```bash 171 | pip install -r requirements-dev.txt 172 | make 173 | ``` 174 | 175 | ## Testing 176 | To test the CLI, create a separate virtual environment perform an editable install. 177 | 178 | ```bash 179 | python -m venv test-env 180 | . test-env/bin/activate 181 | pip install -e . 182 | ``` 183 | 184 | To run unit tests, stay in the root of the project. The unit tests add `src` to the path so `filequery` can be imported properly. 185 | 186 | ```bash 187 | python tests/test_filequery.py 188 | ``` 189 | -------------------------------------------------------------------------------- /example/config_files/example0.json: -------------------------------------------------------------------------------- 1 | { 2 | "filename": "../example/test.csv", 3 | "query": "select col1, col2 from test" 4 | } 5 | -------------------------------------------------------------------------------- /example/config_files/example1.json: -------------------------------------------------------------------------------- 1 | { 2 | "filesdir": "../example/data", 3 | "query": "select * from test t inner join test1 t1 on t.col1 = t1.col1" 4 | } 5 | -------------------------------------------------------------------------------- /example/config_files/example2.json: -------------------------------------------------------------------------------- 1 | { 2 | "filesdir": "../example/data", 3 | "query_file": "../example/queries/multi_query.sql" 4 | } 5 | -------------------------------------------------------------------------------- /example/config_files/example3.json: -------------------------------------------------------------------------------- 1 | { 2 | "filesdir": "../example/data", 3 | "query_file": "../example/queries/join.sql", 4 | "out_file": "result.csv" 5 | } 6 | -------------------------------------------------------------------------------- /example/config_files/example4.json: -------------------------------------------------------------------------------- 1 | { 2 | "filesdir": "../example/data", 3 | "query_file": "../example/queries/join.sql", 4 | "out_file": "result.parquet", 5 | "out_file_format": "parquet" 6 | } 7 | -------------------------------------------------------------------------------- /example/config_files/example5.json: -------------------------------------------------------------------------------- 1 | { 2 | "filesdir": "../example/data", 3 | "query_file": "../example/queries/multi_query.sql", 4 | "out_file": [ 5 | "result1.csv", 6 | "result2.csv", 7 | "result3.csv" 8 | ] 9 | } -------------------------------------------------------------------------------- /example/config_files/example6.json: -------------------------------------------------------------------------------- 1 | { 2 | "filename": "../example/test.csv", 3 | "query": "select col1, col2, col3 from test", 4 | "out_file": "pipe_delim.csv", 5 | "delimiter": "|" 6 | } -------------------------------------------------------------------------------- /example/data/json_test.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "id": 1, 4 | "val1": "test 1", 5 | "val2": 1.0, 6 | "nested": { 7 | "nest_id": 123, 8 | "nest_val": "nested test 1" 9 | } 10 | }, 11 | { 12 | "id": 2, 13 | "val1": "test 2", 14 | "val2": 2.0, 15 | "nested": { 16 | "nest_id": 456, 17 | "nest_val": "nested test 2" 18 | } 19 | } 20 | ] 21 | -------------------------------------------------------------------------------- /example/data/space filename.csv: -------------------------------------------------------------------------------- 1 | col1,col2,col3 2 | 1,test 1,0.1 3 | 2,test 2,0.2 4 | 3,test 3,0.3 5 | -------------------------------------------------------------------------------- /example/data/test-filename.csv: -------------------------------------------------------------------------------- 1 | col1,col2,col3 2 | 1,test 1,0.1 3 | 2,test 2,0.2 4 | 3,test 3,0.3 5 | -------------------------------------------------------------------------------- /example/data/test.csv: -------------------------------------------------------------------------------- 1 | col1,col2,col3 2 | 1,test 1,0.1 3 | 2,test 2,0.2 4 | 3,test 3,0.3 5 | -------------------------------------------------------------------------------- /example/data/test1.csv: -------------------------------------------------------------------------------- 1 | col1,col2,col3 2 | 2,test 2 another file,0.25 3 | 3,test 3 another file,0.35 4 | -------------------------------------------------------------------------------- /example/data/test_null.csv: -------------------------------------------------------------------------------- 1 | col1,col2,col3 2 | 1,test 1,0.1 3 | 2,,0.2 4 | 3,test 3,0.3 5 | -------------------------------------------------------------------------------- /example/data/test_special_chars.csv: -------------------------------------------------------------------------------- 1 | col1,col2 2 | 1,[asdf 3 | 2,[]asdf[/] 4 | 3,[bold]asdf[/bold] 5 | 4,]asdf 6 | 5,[bold]asdf 7 | 6,[]asdf 8 | -------------------------------------------------------------------------------- /example/json_list_test.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "id": 1, 4 | "val1": "test 1", 5 | "val2": 1.0 6 | }, 7 | { 8 | "id": 2, 9 | "val1": "test 2", 10 | "val2": 2.0 11 | }, 12 | { 13 | "id": 3, 14 | "val1": "test 3", 15 | "val2": 3.0 16 | } 17 | ] 18 | -------------------------------------------------------------------------------- /example/json_test.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": 1, 3 | "val1": "test 1", 4 | "val2": 1.0, 5 | "nested": { 6 | "nest_id": 2, 7 | "nest_val": "nested test 1" 8 | }, 9 | "list": [ 10 | 1, 11 | 2, 12 | 3, 13 | 4 14 | ] 15 | } 16 | -------------------------------------------------------------------------------- /example/ndjson_test.ndjson: -------------------------------------------------------------------------------- 1 | {"id": 1, "value": "abc", "nested": {"subid": 2, "subval": 1.0}} 2 | {"id": 2, "value": "def", "nested": {"subid": 3, "subval": 2.0}} 3 | {"id": 3, "value": "ghi", "nested": {"subid": 4, "subval": 3.0}} 4 | {"id": 4, "value": "jkl", "nested": {"subid": 5, "subval": 4.0}} 5 | -------------------------------------------------------------------------------- /example/queries/join.sql: -------------------------------------------------------------------------------- 1 | select * 2 | from 3 | test t 4 | inner join test1 t1 5 | on t.col1 = t1.col1 6 | -------------------------------------------------------------------------------- /example/queries/json_csv_join.sql: -------------------------------------------------------------------------------- 1 | select 2 | t.*, 3 | j.nested.nest_id, 4 | j.nested.nest_id 5 | from 6 | test t 7 | inner join json_test j 8 | on t.col1 = j.id -------------------------------------------------------------------------------- /example/queries/json_query.sql: -------------------------------------------------------------------------------- 1 | -- queries nested JSON which DuckDB interprets as a struct 2 | select 3 | nested.nest_id, 4 | nested.nest_val 5 | from json_test; 6 | -------------------------------------------------------------------------------- /example/queries/multi_query.sql: -------------------------------------------------------------------------------- 1 | select * 2 | from test; 3 | 4 | select * 5 | from test1; 6 | 7 | select * 8 | from 9 | test t1 10 | inner join test1 t2 11 | on t1.col1 = t2.col1; 12 | -------------------------------------------------------------------------------- /example/test.csv: -------------------------------------------------------------------------------- 1 | col1,col2,col3 2 | 1,test 1,0.1 3 | 2,test 2,0.2 4 | 3,test 3,0.3 5 | -------------------------------------------------------------------------------- /poetry.lock: -------------------------------------------------------------------------------- 1 | # This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. 2 | 3 | [[package]] 4 | name = "backports-tarfile" 5 | version = "1.2.0" 6 | description = "Backport of CPython tarfile module" 7 | optional = false 8 | python-versions = ">=3.8" 9 | files = [ 10 | {file = "backports.tarfile-1.2.0-py3-none-any.whl", hash = "sha256:77e284d754527b01fb1e6fa8a1afe577858ebe4e9dad8919e34c862cb399bc34"}, 11 | {file = "backports_tarfile-1.2.0.tar.gz", hash = "sha256:d75e02c268746e1b8144c278978b6e98e85de6ad16f8e4b0844a154557eca991"}, 12 | ] 13 | 14 | [package.extras] 15 | docs = ["furo", "jaraco.packaging (>=9.3)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] 16 | testing = ["jaraco.test", "pytest (!=8.0.*)", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)"] 17 | 18 | [[package]] 19 | name = "certifi" 20 | version = "2024.6.2" 21 | description = "Python package for providing Mozilla's CA Bundle." 22 | optional = false 23 | python-versions = ">=3.6" 24 | files = [ 25 | {file = "certifi-2024.6.2-py3-none-any.whl", hash = "sha256:ddc6c8ce995e6987e7faf5e3f1b02b302836a0e5d98ece18392cb1a36c72ad56"}, 26 | {file = "certifi-2024.6.2.tar.gz", hash = "sha256:3cd43f1c6fa7dedc5899d69d3ad0398fd018ad1a17fba83ddaf78aa46c747516"}, 27 | ] 28 | 29 | [[package]] 30 | name = "cffi" 31 | version = "1.16.0" 32 | description = "Foreign Function Interface for Python calling C code." 33 | optional = false 34 | python-versions = ">=3.8" 35 | files = [ 36 | {file = "cffi-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088"}, 37 | {file = "cffi-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9"}, 38 | {file = "cffi-1.16.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673"}, 39 | {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896"}, 40 | {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684"}, 41 | {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7"}, 42 | {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614"}, 43 | {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743"}, 44 | {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d"}, 45 | {file = "cffi-1.16.0-cp310-cp310-win32.whl", hash = "sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a"}, 46 | {file = "cffi-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1"}, 47 | {file = "cffi-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404"}, 48 | {file = "cffi-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417"}, 49 | {file = "cffi-1.16.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627"}, 50 | {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936"}, 51 | {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d"}, 52 | {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56"}, 53 | {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e"}, 54 | {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc"}, 55 | {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb"}, 56 | {file = "cffi-1.16.0-cp311-cp311-win32.whl", hash = "sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab"}, 57 | {file = "cffi-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba"}, 58 | {file = "cffi-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956"}, 59 | {file = "cffi-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e"}, 60 | {file = "cffi-1.16.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e"}, 61 | {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2"}, 62 | {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357"}, 63 | {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6"}, 64 | {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969"}, 65 | {file = "cffi-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520"}, 66 | {file = "cffi-1.16.0-cp312-cp312-win32.whl", hash = "sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b"}, 67 | {file = "cffi-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235"}, 68 | {file = "cffi-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc"}, 69 | {file = "cffi-1.16.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0"}, 70 | {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b"}, 71 | {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c"}, 72 | {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b"}, 73 | {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324"}, 74 | {file = "cffi-1.16.0-cp38-cp38-win32.whl", hash = "sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a"}, 75 | {file = "cffi-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36"}, 76 | {file = "cffi-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed"}, 77 | {file = "cffi-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2"}, 78 | {file = "cffi-1.16.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872"}, 79 | {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8"}, 80 | {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f"}, 81 | {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4"}, 82 | {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098"}, 83 | {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000"}, 84 | {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe"}, 85 | {file = "cffi-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4"}, 86 | {file = "cffi-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8"}, 87 | {file = "cffi-1.16.0.tar.gz", hash = "sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0"}, 88 | ] 89 | 90 | [package.dependencies] 91 | pycparser = "*" 92 | 93 | [[package]] 94 | name = "charset-normalizer" 95 | version = "3.3.2" 96 | description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." 97 | optional = false 98 | python-versions = ">=3.7.0" 99 | files = [ 100 | {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, 101 | {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, 102 | {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, 103 | {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, 104 | {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, 105 | {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, 106 | {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, 107 | {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, 108 | {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, 109 | {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, 110 | {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, 111 | {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, 112 | {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, 113 | {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, 114 | {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, 115 | {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, 116 | {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, 117 | {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, 118 | {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, 119 | {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, 120 | {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, 121 | {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, 122 | {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, 123 | {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, 124 | {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, 125 | {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, 126 | {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, 127 | {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, 128 | {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, 129 | {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, 130 | {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, 131 | {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, 132 | {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, 133 | {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, 134 | {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, 135 | {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, 136 | {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, 137 | {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, 138 | {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, 139 | {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, 140 | {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, 141 | {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, 142 | {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, 143 | {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, 144 | {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, 145 | {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, 146 | {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, 147 | {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, 148 | {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, 149 | {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, 150 | {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, 151 | {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, 152 | {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, 153 | {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, 154 | {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, 155 | {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, 156 | {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, 157 | {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, 158 | {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, 159 | {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, 160 | {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, 161 | {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, 162 | {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, 163 | {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, 164 | {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, 165 | {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, 166 | {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, 167 | {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, 168 | {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, 169 | {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, 170 | {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, 171 | {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, 172 | {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, 173 | {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, 174 | {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, 175 | {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, 176 | {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, 177 | {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, 178 | {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, 179 | {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, 180 | {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, 181 | {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, 182 | {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, 183 | {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, 184 | {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, 185 | {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, 186 | {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, 187 | {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, 188 | {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, 189 | {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, 190 | ] 191 | 192 | [[package]] 193 | name = "cryptography" 194 | version = "42.0.8" 195 | description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." 196 | optional = false 197 | python-versions = ">=3.7" 198 | files = [ 199 | {file = "cryptography-42.0.8-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:81d8a521705787afe7a18d5bfb47ea9d9cc068206270aad0b96a725022e18d2e"}, 200 | {file = "cryptography-42.0.8-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:961e61cefdcb06e0c6d7e3a1b22ebe8b996eb2bf50614e89384be54c48c6b63d"}, 201 | {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3ec3672626e1b9e55afd0df6d774ff0e953452886e06e0f1eb7eb0c832e8902"}, 202 | {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e599b53fd95357d92304510fb7bda8523ed1f79ca98dce2f43c115950aa78801"}, 203 | {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:5226d5d21ab681f432a9c1cf8b658c0cb02533eece706b155e5fbd8a0cdd3949"}, 204 | {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:6b7c4f03ce01afd3b76cf69a5455caa9cfa3de8c8f493e0d3ab7d20611c8dae9"}, 205 | {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:2346b911eb349ab547076f47f2e035fc8ff2c02380a7cbbf8d87114fa0f1c583"}, 206 | {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:ad803773e9df0b92e0a817d22fd8a3675493f690b96130a5e24f1b8fabbea9c7"}, 207 | {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:2f66d9cd9147ee495a8374a45ca445819f8929a3efcd2e3df6428e46c3cbb10b"}, 208 | {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:d45b940883a03e19e944456a558b67a41160e367a719833c53de6911cabba2b7"}, 209 | {file = "cryptography-42.0.8-cp37-abi3-win32.whl", hash = "sha256:a0c5b2b0585b6af82d7e385f55a8bc568abff8923af147ee3c07bd8b42cda8b2"}, 210 | {file = "cryptography-42.0.8-cp37-abi3-win_amd64.whl", hash = "sha256:57080dee41209e556a9a4ce60d229244f7a66ef52750f813bfbe18959770cfba"}, 211 | {file = "cryptography-42.0.8-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:dea567d1b0e8bc5764b9443858b673b734100c2871dc93163f58c46a97a83d28"}, 212 | {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4783183f7cb757b73b2ae9aed6599b96338eb957233c58ca8f49a49cc32fd5e"}, 213 | {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0608251135d0e03111152e41f0cc2392d1e74e35703960d4190b2e0f4ca9c70"}, 214 | {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:dc0fdf6787f37b1c6b08e6dfc892d9d068b5bdb671198c72072828b80bd5fe4c"}, 215 | {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:9c0c1716c8447ee7dbf08d6db2e5c41c688544c61074b54fc4564196f55c25a7"}, 216 | {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:fff12c88a672ab9c9c1cf7b0c80e3ad9e2ebd9d828d955c126be4fd3e5578c9e"}, 217 | {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:cafb92b2bc622cd1aa6a1dce4b93307792633f4c5fe1f46c6b97cf67073ec961"}, 218 | {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:31f721658a29331f895a5a54e7e82075554ccfb8b163a18719d342f5ffe5ecb1"}, 219 | {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:b297f90c5723d04bcc8265fc2a0f86d4ea2e0f7ab4b6994459548d3a6b992a14"}, 220 | {file = "cryptography-42.0.8-cp39-abi3-win32.whl", hash = "sha256:2f88d197e66c65be5e42cd72e5c18afbfae3f741742070e3019ac8f4ac57262c"}, 221 | {file = "cryptography-42.0.8-cp39-abi3-win_amd64.whl", hash = "sha256:fa76fbb7596cc5839320000cdd5d0955313696d9511debab7ee7278fc8b5c84a"}, 222 | {file = "cryptography-42.0.8-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:ba4f0a211697362e89ad822e667d8d340b4d8d55fae72cdd619389fb5912eefe"}, 223 | {file = "cryptography-42.0.8-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:81884c4d096c272f00aeb1f11cf62ccd39763581645b0812e99a91505fa48e0c"}, 224 | {file = "cryptography-42.0.8-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c9bb2ae11bfbab395bdd072985abde58ea9860ed84e59dbc0463a5d0159f5b71"}, 225 | {file = "cryptography-42.0.8-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:7016f837e15b0a1c119d27ecd89b3515f01f90a8615ed5e9427e30d9cdbfed3d"}, 226 | {file = "cryptography-42.0.8-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5a94eccb2a81a309806027e1670a358b99b8fe8bfe9f8d329f27d72c094dde8c"}, 227 | {file = "cryptography-42.0.8-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:dec9b018df185f08483f294cae6ccac29e7a6e0678996587363dc352dc65c842"}, 228 | {file = "cryptography-42.0.8-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:343728aac38decfdeecf55ecab3264b015be68fc2816ca800db649607aeee648"}, 229 | {file = "cryptography-42.0.8-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:013629ae70b40af70c9a7a5db40abe5d9054e6f4380e50ce769947b73bf3caad"}, 230 | {file = "cryptography-42.0.8.tar.gz", hash = "sha256:8d09d05439ce7baa8e9e95b07ec5b6c886f548deb7e0f69ef25f64b3bce842f2"}, 231 | ] 232 | 233 | [package.dependencies] 234 | cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} 235 | 236 | [package.extras] 237 | docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] 238 | docstest = ["pyenchant (>=1.6.11)", "readme-renderer", "sphinxcontrib-spelling (>=4.0.1)"] 239 | nox = ["nox"] 240 | pep8test = ["check-sdist", "click", "mypy", "ruff"] 241 | sdist = ["build"] 242 | ssh = ["bcrypt (>=3.1.5)"] 243 | test = ["certifi", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] 244 | test-randomorder = ["pytest-randomly"] 245 | 246 | [[package]] 247 | name = "docutils" 248 | version = "0.21.2" 249 | description = "Docutils -- Python Documentation Utilities" 250 | optional = false 251 | python-versions = ">=3.9" 252 | files = [ 253 | {file = "docutils-0.21.2-py3-none-any.whl", hash = "sha256:dafca5b9e384f0e419294eb4d2ff9fa826435bf15f15b7bd45723e8ad76811b2"}, 254 | {file = "docutils-0.21.2.tar.gz", hash = "sha256:3a6b18732edf182daa3cd12775bbb338cf5691468f91eeeb109deff6ebfa986f"}, 255 | ] 256 | 257 | [[package]] 258 | name = "duckdb" 259 | version = "1.0.0" 260 | description = "DuckDB in-process database" 261 | optional = false 262 | python-versions = ">=3.7.0" 263 | files = [ 264 | {file = "duckdb-1.0.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:4a8ce2d1f9e1c23b9bab3ae4ca7997e9822e21563ff8f646992663f66d050211"}, 265 | {file = "duckdb-1.0.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:19797670f20f430196e48d25d082a264b66150c264c1e8eae8e22c64c2c5f3f5"}, 266 | {file = "duckdb-1.0.0-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:b71c342090fe117b35d866a91ad6bffce61cd6ff3e0cff4003f93fc1506da0d8"}, 267 | {file = "duckdb-1.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25dd69f44ad212c35ae2ea736b0e643ea2b70f204b8dff483af1491b0e2a4cec"}, 268 | {file = "duckdb-1.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8da5f293ecb4f99daa9a9352c5fd1312a6ab02b464653a0c3a25ab7065c45d4d"}, 269 | {file = "duckdb-1.0.0-cp310-cp310-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3207936da9967ddbb60644ec291eb934d5819b08169bc35d08b2dedbe7068c60"}, 270 | {file = "duckdb-1.0.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:1128d6c9c33e883b1f5df6b57c1eb46b7ab1baf2650912d77ee769aaa05111f9"}, 271 | {file = "duckdb-1.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:02310d263474d0ac238646677feff47190ffb82544c018b2ff732a4cb462c6ef"}, 272 | {file = "duckdb-1.0.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:75586791ab2702719c284157b65ecefe12d0cca9041da474391896ddd9aa71a4"}, 273 | {file = "duckdb-1.0.0-cp311-cp311-macosx_12_0_universal2.whl", hash = "sha256:83bb415fc7994e641344f3489e40430ce083b78963cb1057bf714ac3a58da3ba"}, 274 | {file = "duckdb-1.0.0-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:bee2e0b415074e84c5a2cefd91f6b5ebeb4283e7196ba4ef65175a7cef298b57"}, 275 | {file = "duckdb-1.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fa5a4110d2a499312609544ad0be61e85a5cdad90e5b6d75ad16b300bf075b90"}, 276 | {file = "duckdb-1.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5fa389e6a382d4707b5f3d1bc2087895925ebb92b77e9fe3bfb23c9b98372fdc"}, 277 | {file = "duckdb-1.0.0-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7ede6f5277dd851f1a4586b0c78dc93f6c26da45e12b23ee0e88c76519cbdbe0"}, 278 | {file = "duckdb-1.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0b88cdbc0d5c3e3d7545a341784dc6cafd90fc035f17b2f04bf1e870c68456e5"}, 279 | {file = "duckdb-1.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:fd1693cdd15375156f7fff4745debc14e5c54928589f67b87fb8eace9880c370"}, 280 | {file = "duckdb-1.0.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:c65a7fe8a8ce21b985356ee3ec0c3d3b3b2234e288e64b4cfb03356dbe6e5583"}, 281 | {file = "duckdb-1.0.0-cp312-cp312-macosx_12_0_universal2.whl", hash = "sha256:e5a8eda554379b3a43b07bad00968acc14dd3e518c9fbe8f128b484cf95e3d16"}, 282 | {file = "duckdb-1.0.0-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:a1b6acdd54c4a7b43bd7cb584975a1b2ff88ea1a31607a2b734b17960e7d3088"}, 283 | {file = "duckdb-1.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a677bb1b6a8e7cab4a19874249d8144296e6e39dae38fce66a80f26d15e670df"}, 284 | {file = "duckdb-1.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:752e9d412b0a2871bf615a2ede54be494c6dc289d076974eefbf3af28129c759"}, 285 | {file = "duckdb-1.0.0-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3aadb99d098c5e32d00dc09421bc63a47134a6a0de9d7cd6abf21780b678663c"}, 286 | {file = "duckdb-1.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:83b7091d4da3e9301c4f9378833f5ffe934fb1ad2b387b439ee067b2c10c8bb0"}, 287 | {file = "duckdb-1.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:6a8058d0148b544694cb5ea331db44f6c2a00a7b03776cc4dd1470735c3d5ff7"}, 288 | {file = "duckdb-1.0.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e40cb20e5ee19d44bc66ec99969af791702a049079dc5f248c33b1c56af055f4"}, 289 | {file = "duckdb-1.0.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7bce1bc0de9af9f47328e24e6e7e39da30093179b1c031897c042dd94a59c8e"}, 290 | {file = "duckdb-1.0.0-cp37-cp37m-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8355507f7a04bc0a3666958f4414a58e06141d603e91c0fa5a7c50e49867fb6d"}, 291 | {file = "duckdb-1.0.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:39f1a46f5a45ad2886dc9b02ce5b484f437f90de66c327f86606d9ba4479d475"}, 292 | {file = "duckdb-1.0.0-cp37-cp37m-win_amd64.whl", hash = "sha256:a6d29ba477b27ae41676b62c8fae8d04ee7cbe458127a44f6049888231ca58fa"}, 293 | {file = "duckdb-1.0.0-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:1bea713c1925918714328da76e79a1f7651b2b503511498ccf5e007a7e67d49e"}, 294 | {file = "duckdb-1.0.0-cp38-cp38-macosx_12_0_universal2.whl", hash = "sha256:bfe67f3bcf181edbf6f918b8c963eb060e6aa26697d86590da4edc5707205450"}, 295 | {file = "duckdb-1.0.0-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:dbc6093a75242f002be1d96a6ace3fdf1d002c813e67baff52112e899de9292f"}, 296 | {file = "duckdb-1.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba1881a2b11c507cee18f8fd9ef10100be066fddaa2c20fba1f9a664245cd6d8"}, 297 | {file = "duckdb-1.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:445d0bb35087c522705c724a75f9f1c13f1eb017305b694d2686218d653c8142"}, 298 | {file = "duckdb-1.0.0-cp38-cp38-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:224553432e84432ffb9684f33206572477049b371ce68cc313a01e214f2fbdda"}, 299 | {file = "duckdb-1.0.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:d3914032e47c4e76636ad986d466b63fdea65e37be8a6dfc484ed3f462c4fde4"}, 300 | {file = "duckdb-1.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:af9128a2eb7e1bb50cd2c2020d825fb2946fdad0a2558920cd5411d998999334"}, 301 | {file = "duckdb-1.0.0-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:dd2659a5dbc0df0de68f617a605bf12fe4da85ba24f67c08730984a0892087e8"}, 302 | {file = "duckdb-1.0.0-cp39-cp39-macosx_12_0_universal2.whl", hash = "sha256:ac5a4afb0bc20725e734e0b2c17e99a274de4801aff0d4e765d276b99dad6d90"}, 303 | {file = "duckdb-1.0.0-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:2c5a53bee3668d6e84c0536164589d5127b23d298e4c443d83f55e4150fafe61"}, 304 | {file = "duckdb-1.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b980713244d7708b25ee0a73de0c65f0e5521c47a0e907f5e1b933d79d972ef6"}, 305 | {file = "duckdb-1.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21cbd4f9fe7b7a56eff96c3f4d6778770dd370469ca2212eddbae5dd63749db5"}, 306 | {file = "duckdb-1.0.0-cp39-cp39-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ed228167c5d49888c5ef36f6f9cbf65011c2daf9dcb53ea8aa7a041ce567b3e4"}, 307 | {file = "duckdb-1.0.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:46d8395fbcea7231fd5032a250b673cc99352fef349b718a23dea2c0dd2b8dec"}, 308 | {file = "duckdb-1.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:6ad1fc1a4d57e7616944166a5f9417bdbca1ea65c490797e3786e3a42e162d8a"}, 309 | {file = "duckdb-1.0.0.tar.gz", hash = "sha256:a2a059b77bc7d5b76ae9d88e267372deff19c291048d59450c431e166233d453"}, 310 | ] 311 | 312 | [[package]] 313 | name = "idna" 314 | version = "3.7" 315 | description = "Internationalized Domain Names in Applications (IDNA)" 316 | optional = false 317 | python-versions = ">=3.5" 318 | files = [ 319 | {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, 320 | {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, 321 | ] 322 | 323 | [[package]] 324 | name = "importlib-metadata" 325 | version = "7.1.0" 326 | description = "Read metadata from Python packages" 327 | optional = false 328 | python-versions = ">=3.8" 329 | files = [ 330 | {file = "importlib_metadata-7.1.0-py3-none-any.whl", hash = "sha256:30962b96c0c223483ed6cc7280e7f0199feb01a0e40cfae4d4450fc6fab1f570"}, 331 | {file = "importlib_metadata-7.1.0.tar.gz", hash = "sha256:b78938b926ee8d5f020fc4772d487045805a55ddbad2ecf21c6d60938dc7fcd2"}, 332 | ] 333 | 334 | [package.dependencies] 335 | zipp = ">=0.5" 336 | 337 | [package.extras] 338 | docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] 339 | perf = ["ipython"] 340 | testing = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"] 341 | 342 | [[package]] 343 | name = "jaraco-classes" 344 | version = "3.4.0" 345 | description = "Utility functions for Python class constructs" 346 | optional = false 347 | python-versions = ">=3.8" 348 | files = [ 349 | {file = "jaraco.classes-3.4.0-py3-none-any.whl", hash = "sha256:f662826b6bed8cace05e7ff873ce0f9283b5c924470fe664fff1c2f00f581790"}, 350 | {file = "jaraco.classes-3.4.0.tar.gz", hash = "sha256:47a024b51d0239c0dd8c8540c6c7f484be3b8fcf0b2d85c13825780d3b3f3acd"}, 351 | ] 352 | 353 | [package.dependencies] 354 | more-itertools = "*" 355 | 356 | [package.extras] 357 | docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] 358 | testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-ruff (>=0.2.1)"] 359 | 360 | [[package]] 361 | name = "jaraco-context" 362 | version = "5.3.0" 363 | description = "Useful decorators and context managers" 364 | optional = false 365 | python-versions = ">=3.8" 366 | files = [ 367 | {file = "jaraco.context-5.3.0-py3-none-any.whl", hash = "sha256:3e16388f7da43d384a1a7cd3452e72e14732ac9fe459678773a3608a812bf266"}, 368 | {file = "jaraco.context-5.3.0.tar.gz", hash = "sha256:c2f67165ce1f9be20f32f650f25d8edfc1646a8aeee48ae06fb35f90763576d2"}, 369 | ] 370 | 371 | [package.dependencies] 372 | "backports.tarfile" = {version = "*", markers = "python_version < \"3.12\""} 373 | 374 | [package.extras] 375 | docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] 376 | testing = ["portend", "pytest (>=6,!=8.1.1)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-ruff (>=0.2.1)"] 377 | 378 | [[package]] 379 | name = "jaraco-functools" 380 | version = "4.0.1" 381 | description = "Functools like those found in stdlib" 382 | optional = false 383 | python-versions = ">=3.8" 384 | files = [ 385 | {file = "jaraco.functools-4.0.1-py3-none-any.whl", hash = "sha256:3b24ccb921d6b593bdceb56ce14799204f473976e2a9d4b15b04d0f2c2326664"}, 386 | {file = "jaraco_functools-4.0.1.tar.gz", hash = "sha256:d33fa765374c0611b52f8b3a795f8900869aa88c84769d4d1746cd68fb28c3e8"}, 387 | ] 388 | 389 | [package.dependencies] 390 | more-itertools = "*" 391 | 392 | [package.extras] 393 | docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] 394 | testing = ["jaraco.classes", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-ruff (>=0.2.1)"] 395 | 396 | [[package]] 397 | name = "jeepney" 398 | version = "0.8.0" 399 | description = "Low-level, pure Python DBus protocol wrapper." 400 | optional = false 401 | python-versions = ">=3.7" 402 | files = [ 403 | {file = "jeepney-0.8.0-py3-none-any.whl", hash = "sha256:c0a454ad016ca575060802ee4d590dd912e35c122fa04e70306de3d076cce755"}, 404 | {file = "jeepney-0.8.0.tar.gz", hash = "sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806"}, 405 | ] 406 | 407 | [package.extras] 408 | test = ["async-timeout", "pytest", "pytest-asyncio (>=0.17)", "pytest-trio", "testpath", "trio"] 409 | trio = ["async_generator", "trio"] 410 | 411 | [[package]] 412 | name = "keyring" 413 | version = "25.2.1" 414 | description = "Store and access your passwords safely." 415 | optional = false 416 | python-versions = ">=3.8" 417 | files = [ 418 | {file = "keyring-25.2.1-py3-none-any.whl", hash = "sha256:2458681cdefc0dbc0b7eb6cf75d0b98e59f9ad9b2d4edd319d18f68bdca95e50"}, 419 | {file = "keyring-25.2.1.tar.gz", hash = "sha256:daaffd42dbda25ddafb1ad5fec4024e5bbcfe424597ca1ca452b299861e49f1b"}, 420 | ] 421 | 422 | [package.dependencies] 423 | importlib-metadata = {version = ">=4.11.4", markers = "python_version < \"3.12\""} 424 | "jaraco.classes" = "*" 425 | "jaraco.context" = "*" 426 | "jaraco.functools" = "*" 427 | jeepney = {version = ">=0.4.2", markers = "sys_platform == \"linux\""} 428 | pywin32-ctypes = {version = ">=0.2.0", markers = "sys_platform == \"win32\""} 429 | SecretStorage = {version = ">=3.2", markers = "sys_platform == \"linux\""} 430 | 431 | [package.extras] 432 | completion = ["shtab (>=1.1.0)"] 433 | docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] 434 | testing = ["pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-ruff (>=0.2.1)"] 435 | 436 | [[package]] 437 | name = "linkify-it-py" 438 | version = "2.0.3" 439 | description = "Links recognition library with FULL unicode support." 440 | optional = false 441 | python-versions = ">=3.7" 442 | files = [ 443 | {file = "linkify-it-py-2.0.3.tar.gz", hash = "sha256:68cda27e162e9215c17d786649d1da0021a451bdc436ef9e0fa0ba5234b9b048"}, 444 | {file = "linkify_it_py-2.0.3-py3-none-any.whl", hash = "sha256:6bcbc417b0ac14323382aef5c5192c0075bf8a9d6b41820a2b66371eac6b6d79"}, 445 | ] 446 | 447 | [package.dependencies] 448 | uc-micro-py = "*" 449 | 450 | [package.extras] 451 | benchmark = ["pytest", "pytest-benchmark"] 452 | dev = ["black", "flake8", "isort", "pre-commit", "pyproject-flake8"] 453 | doc = ["myst-parser", "sphinx", "sphinx-book-theme"] 454 | test = ["coverage", "pytest", "pytest-cov"] 455 | 456 | [[package]] 457 | name = "markdown-it-py" 458 | version = "3.0.0" 459 | description = "Python port of markdown-it. Markdown parsing, done right!" 460 | optional = false 461 | python-versions = ">=3.8" 462 | files = [ 463 | {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, 464 | {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, 465 | ] 466 | 467 | [package.dependencies] 468 | linkify-it-py = {version = ">=1,<3", optional = true, markers = "extra == \"linkify\""} 469 | mdit-py-plugins = {version = "*", optional = true, markers = "extra == \"plugins\""} 470 | mdurl = ">=0.1,<1.0" 471 | 472 | [package.extras] 473 | benchmarking = ["psutil", "pytest", "pytest-benchmark"] 474 | code-style = ["pre-commit (>=3.0,<4.0)"] 475 | compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"] 476 | linkify = ["linkify-it-py (>=1,<3)"] 477 | plugins = ["mdit-py-plugins"] 478 | profiling = ["gprof2dot"] 479 | rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"] 480 | testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] 481 | 482 | [[package]] 483 | name = "mdit-py-plugins" 484 | version = "0.4.1" 485 | description = "Collection of plugins for markdown-it-py" 486 | optional = false 487 | python-versions = ">=3.8" 488 | files = [ 489 | {file = "mdit_py_plugins-0.4.1-py3-none-any.whl", hash = "sha256:1020dfe4e6bfc2c79fb49ae4e3f5b297f5ccd20f010187acc52af2921e27dc6a"}, 490 | {file = "mdit_py_plugins-0.4.1.tar.gz", hash = "sha256:834b8ac23d1cd60cec703646ffd22ae97b7955a6d596eb1d304be1e251ae499c"}, 491 | ] 492 | 493 | [package.dependencies] 494 | markdown-it-py = ">=1.0.0,<4.0.0" 495 | 496 | [package.extras] 497 | code-style = ["pre-commit"] 498 | rtd = ["myst-parser", "sphinx-book-theme"] 499 | testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] 500 | 501 | [[package]] 502 | name = "mdurl" 503 | version = "0.1.2" 504 | description = "Markdown URL utilities" 505 | optional = false 506 | python-versions = ">=3.7" 507 | files = [ 508 | {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, 509 | {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, 510 | ] 511 | 512 | [[package]] 513 | name = "more-itertools" 514 | version = "10.2.0" 515 | description = "More routines for operating on iterables, beyond itertools" 516 | optional = false 517 | python-versions = ">=3.8" 518 | files = [ 519 | {file = "more-itertools-10.2.0.tar.gz", hash = "sha256:8fccb480c43d3e99a00087634c06dd02b0d50fbf088b380de5a41a015ec239e1"}, 520 | {file = "more_itertools-10.2.0-py3-none-any.whl", hash = "sha256:686b06abe565edfab151cb8fd385a05651e1fdf8f0a14191e4439283421f8684"}, 521 | ] 522 | 523 | [[package]] 524 | name = "nh3" 525 | version = "0.2.17" 526 | description = "Python bindings to the ammonia HTML sanitization library." 527 | optional = false 528 | python-versions = "*" 529 | files = [ 530 | {file = "nh3-0.2.17-cp37-abi3-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:551672fd71d06cd828e282abdb810d1be24e1abb7ae2543a8fa36a71c1006fe9"}, 531 | {file = "nh3-0.2.17-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:c551eb2a3876e8ff2ac63dff1585236ed5dfec5ffd82216a7a174f7c5082a78a"}, 532 | {file = "nh3-0.2.17-cp37-abi3-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:66f17d78826096291bd264f260213d2b3905e3c7fae6dfc5337d49429f1dc9f3"}, 533 | {file = "nh3-0.2.17-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0316c25b76289cf23be6b66c77d3608a4fdf537b35426280032f432f14291b9a"}, 534 | {file = "nh3-0.2.17-cp37-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:22c26e20acbb253a5bdd33d432a326d18508a910e4dcf9a3316179860d53345a"}, 535 | {file = "nh3-0.2.17-cp37-abi3-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:85cdbcca8ef10733bd31f931956f7fbb85145a4d11ab9e6742bbf44d88b7e351"}, 536 | {file = "nh3-0.2.17-cp37-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:40015514022af31975c0b3bca4014634fa13cb5dc4dbcbc00570acc781316dcc"}, 537 | {file = "nh3-0.2.17-cp37-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ba73a2f8d3a1b966e9cdba7b211779ad8a2561d2dba9674b8a19ed817923f65f"}, 538 | {file = "nh3-0.2.17-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c21bac1a7245cbd88c0b0e4a420221b7bfa838a2814ee5bb924e9c2f10a1120b"}, 539 | {file = "nh3-0.2.17-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:d7a25fd8c86657f5d9d576268e3b3767c5cd4f42867c9383618be8517f0f022a"}, 540 | {file = "nh3-0.2.17-cp37-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:c790769152308421283679a142dbdb3d1c46c79c823008ecea8e8141db1a2062"}, 541 | {file = "nh3-0.2.17-cp37-abi3-musllinux_1_2_i686.whl", hash = "sha256:b4427ef0d2dfdec10b641ed0bdaf17957eb625b2ec0ea9329b3d28806c153d71"}, 542 | {file = "nh3-0.2.17-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a3f55fabe29164ba6026b5ad5c3151c314d136fd67415a17660b4aaddacf1b10"}, 543 | {file = "nh3-0.2.17-cp37-abi3-win32.whl", hash = "sha256:1a814dd7bba1cb0aba5bcb9bebcc88fd801b63e21e2450ae6c52d3b3336bc911"}, 544 | {file = "nh3-0.2.17-cp37-abi3-win_amd64.whl", hash = "sha256:1aa52a7def528297f256de0844e8dd680ee279e79583c76d6fa73a978186ddfb"}, 545 | {file = "nh3-0.2.17.tar.gz", hash = "sha256:40d0741a19c3d645e54efba71cb0d8c475b59135c1e3c580f879ad5514cbf028"}, 546 | ] 547 | 548 | [[package]] 549 | name = "numpy" 550 | version = "1.26.4" 551 | description = "Fundamental package for array computing in Python" 552 | optional = false 553 | python-versions = ">=3.9" 554 | files = [ 555 | {file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"}, 556 | {file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"}, 557 | {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d209d8969599b27ad20994c8e41936ee0964e6da07478d6c35016bc386b66ad4"}, 558 | {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffa75af20b44f8dba823498024771d5ac50620e6915abac414251bd971b4529f"}, 559 | {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:62b8e4b1e28009ef2846b4c7852046736bab361f7aeadeb6a5b89ebec3c7055a"}, 560 | {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a4abb4f9001ad2858e7ac189089c42178fcce737e4169dc61321660f1a96c7d2"}, 561 | {file = "numpy-1.26.4-cp310-cp310-win32.whl", hash = "sha256:bfe25acf8b437eb2a8b2d49d443800a5f18508cd811fea3181723922a8a82b07"}, 562 | {file = "numpy-1.26.4-cp310-cp310-win_amd64.whl", hash = "sha256:b97fe8060236edf3662adfc2c633f56a08ae30560c56310562cb4f95500022d5"}, 563 | {file = "numpy-1.26.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71"}, 564 | {file = "numpy-1.26.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef"}, 565 | {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab55401287bfec946ced39700c053796e7cc0e3acbef09993a9ad2adba6ca6e"}, 566 | {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5"}, 567 | {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:96ff0b2ad353d8f990b63294c8986f1ec3cb19d749234014f4e7eb0112ceba5a"}, 568 | {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60dedbb91afcbfdc9bc0b1f3f402804070deed7392c23eb7a7f07fa857868e8a"}, 569 | {file = "numpy-1.26.4-cp311-cp311-win32.whl", hash = "sha256:1af303d6b2210eb850fcf03064d364652b7120803a0b872f5211f5234b399f20"}, 570 | {file = "numpy-1.26.4-cp311-cp311-win_amd64.whl", hash = "sha256:cd25bcecc4974d09257ffcd1f098ee778f7834c3ad767fe5db785be9a4aa9cb2"}, 571 | {file = "numpy-1.26.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b3ce300f3644fb06443ee2222c2201dd3a89ea6040541412b8fa189341847218"}, 572 | {file = "numpy-1.26.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:03a8c78d01d9781b28a6989f6fa1bb2c4f2d51201cf99d3dd875df6fbd96b23b"}, 573 | {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fad7dcb1aac3c7f0584a5a8133e3a43eeb2fe127f47e3632d43d677c66c102b"}, 574 | {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675d61ffbfa78604709862923189bad94014bef562cc35cf61d3a07bba02a7ed"}, 575 | {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab47dbe5cc8210f55aa58e4805fe224dac469cde56b9f731a4c098b91917159a"}, 576 | {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1dda2e7b4ec9dd512f84935c5f126c8bd8b9f2fc001e9f54af255e8c5f16b0e0"}, 577 | {file = "numpy-1.26.4-cp312-cp312-win32.whl", hash = "sha256:50193e430acfc1346175fcbdaa28ffec49947a06918b7b92130744e81e640110"}, 578 | {file = "numpy-1.26.4-cp312-cp312-win_amd64.whl", hash = "sha256:08beddf13648eb95f8d867350f6a018a4be2e5ad54c8d8caed89ebca558b2818"}, 579 | {file = "numpy-1.26.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7349ab0fa0c429c82442a27a9673fc802ffdb7c7775fad780226cb234965e53c"}, 580 | {file = "numpy-1.26.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:52b8b60467cd7dd1e9ed082188b4e6bb35aa5cdd01777621a1658910745b90be"}, 581 | {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5241e0a80d808d70546c697135da2c613f30e28251ff8307eb72ba696945764"}, 582 | {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f870204a840a60da0b12273ef34f7051e98c3b5961b61b0c2c1be6dfd64fbcd3"}, 583 | {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:679b0076f67ecc0138fd2ede3a8fd196dddc2ad3254069bcb9faf9a79b1cebcd"}, 584 | {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:47711010ad8555514b434df65f7d7b076bb8261df1ca9bb78f53d3b2db02e95c"}, 585 | {file = "numpy-1.26.4-cp39-cp39-win32.whl", hash = "sha256:a354325ee03388678242a4d7ebcd08b5c727033fcff3b2f536aea978e15ee9e6"}, 586 | {file = "numpy-1.26.4-cp39-cp39-win_amd64.whl", hash = "sha256:3373d5d70a5fe74a2c1bb6d2cfd9609ecf686d47a2d7b1d37a8f3b6bf6003aea"}, 587 | {file = "numpy-1.26.4-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:afedb719a9dcfc7eaf2287b839d8198e06dcd4cb5d276a3df279231138e83d30"}, 588 | {file = "numpy-1.26.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95a7476c59002f2f6c590b9b7b998306fba6a5aa646b1e22ddfeaf8f78c3a29c"}, 589 | {file = "numpy-1.26.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7e50d0a0cc3189f9cb0aeb3a6a6af18c16f59f004b866cd2be1c14b36134a4a0"}, 590 | {file = "numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010"}, 591 | ] 592 | 593 | [[package]] 594 | name = "pkginfo" 595 | version = "1.11.0" 596 | description = "Query metadata from sdists / bdists / installed packages." 597 | optional = false 598 | python-versions = ">=3.8" 599 | files = [ 600 | {file = "pkginfo-1.11.0-py3-none-any.whl", hash = "sha256:6d4998d1cd42c297af72cc0eab5f5bab1d356fb8a55b828fa914173f8bc1ba05"}, 601 | {file = "pkginfo-1.11.0.tar.gz", hash = "sha256:dba885aa82e31e80d615119874384923f4e011c2a39b0c4b7104359e36cb7087"}, 602 | ] 603 | 604 | [package.extras] 605 | testing = ["pytest", "pytest-cov", "wheel"] 606 | 607 | [[package]] 608 | name = "pycparser" 609 | version = "2.22" 610 | description = "C parser in Python" 611 | optional = false 612 | python-versions = ">=3.8" 613 | files = [ 614 | {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, 615 | {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, 616 | ] 617 | 618 | [[package]] 619 | name = "pygments" 620 | version = "2.18.0" 621 | description = "Pygments is a syntax highlighting package written in Python." 622 | optional = false 623 | python-versions = ">=3.8" 624 | files = [ 625 | {file = "pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a"}, 626 | {file = "pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199"}, 627 | ] 628 | 629 | [package.extras] 630 | windows-terminal = ["colorama (>=0.4.6)"] 631 | 632 | [[package]] 633 | name = "pywin32-ctypes" 634 | version = "0.2.2" 635 | description = "A (partial) reimplementation of pywin32 using ctypes/cffi" 636 | optional = false 637 | python-versions = ">=3.6" 638 | files = [ 639 | {file = "pywin32-ctypes-0.2.2.tar.gz", hash = "sha256:3426e063bdd5fd4df74a14fa3cf80a0b42845a87e1d1e81f6549f9daec593a60"}, 640 | {file = "pywin32_ctypes-0.2.2-py3-none-any.whl", hash = "sha256:bf490a1a709baf35d688fe0ecf980ed4de11d2b3e37b51e5442587a75d9957e7"}, 641 | ] 642 | 643 | [[package]] 644 | name = "readme-renderer" 645 | version = "43.0" 646 | description = "readme_renderer is a library for rendering readme descriptions for Warehouse" 647 | optional = false 648 | python-versions = ">=3.8" 649 | files = [ 650 | {file = "readme_renderer-43.0-py3-none-any.whl", hash = "sha256:19db308d86ecd60e5affa3b2a98f017af384678c63c88e5d4556a380e674f3f9"}, 651 | {file = "readme_renderer-43.0.tar.gz", hash = "sha256:1818dd28140813509eeed8d62687f7cd4f7bad90d4db586001c5dc09d4fde311"}, 652 | ] 653 | 654 | [package.dependencies] 655 | docutils = ">=0.13.1" 656 | nh3 = ">=0.2.14" 657 | Pygments = ">=2.5.1" 658 | 659 | [package.extras] 660 | md = ["cmarkgfm (>=0.8.0)"] 661 | 662 | [[package]] 663 | name = "requests" 664 | version = "2.32.3" 665 | description = "Python HTTP for Humans." 666 | optional = false 667 | python-versions = ">=3.8" 668 | files = [ 669 | {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, 670 | {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, 671 | ] 672 | 673 | [package.dependencies] 674 | certifi = ">=2017.4.17" 675 | charset-normalizer = ">=2,<4" 676 | idna = ">=2.5,<4" 677 | urllib3 = ">=1.21.1,<3" 678 | 679 | [package.extras] 680 | socks = ["PySocks (>=1.5.6,!=1.5.7)"] 681 | use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] 682 | 683 | [[package]] 684 | name = "requests-toolbelt" 685 | version = "1.0.0" 686 | description = "A utility belt for advanced users of python-requests" 687 | optional = false 688 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 689 | files = [ 690 | {file = "requests-toolbelt-1.0.0.tar.gz", hash = "sha256:7681a0a3d047012b5bdc0ee37d7f8f07ebe76ab08caeccfc3921ce23c88d5bc6"}, 691 | {file = "requests_toolbelt-1.0.0-py2.py3-none-any.whl", hash = "sha256:cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06"}, 692 | ] 693 | 694 | [package.dependencies] 695 | requests = ">=2.0.1,<3.0.0" 696 | 697 | [[package]] 698 | name = "rfc3986" 699 | version = "2.0.0" 700 | description = "Validating URI References per RFC 3986" 701 | optional = false 702 | python-versions = ">=3.7" 703 | files = [ 704 | {file = "rfc3986-2.0.0-py2.py3-none-any.whl", hash = "sha256:50b1502b60e289cb37883f3dfd34532b8873c7de9f49bb546641ce9cbd256ebd"}, 705 | {file = "rfc3986-2.0.0.tar.gz", hash = "sha256:97aacf9dbd4bfd829baad6e6309fa6573aaf1be3f6fa735c8ab05e46cecb261c"}, 706 | ] 707 | 708 | [package.extras] 709 | idna2008 = ["idna"] 710 | 711 | [[package]] 712 | name = "rich" 713 | version = "13.7.1" 714 | description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" 715 | optional = false 716 | python-versions = ">=3.7.0" 717 | files = [ 718 | {file = "rich-13.7.1-py3-none-any.whl", hash = "sha256:4edbae314f59eb482f54e9e30bf00d33350aaa94f4bfcd4e9e3110e64d0d7222"}, 719 | {file = "rich-13.7.1.tar.gz", hash = "sha256:9be308cb1fe2f1f57d67ce99e95af38a1e2bc71ad9813b0e247cf7ffbcc3a432"}, 720 | ] 721 | 722 | [package.dependencies] 723 | markdown-it-py = ">=2.2.0" 724 | pygments = ">=2.13.0,<3.0.0" 725 | 726 | [package.extras] 727 | jupyter = ["ipywidgets (>=7.5.1,<9)"] 728 | 729 | [[package]] 730 | name = "secretstorage" 731 | version = "3.3.3" 732 | description = "Python bindings to FreeDesktop.org Secret Service API" 733 | optional = false 734 | python-versions = ">=3.6" 735 | files = [ 736 | {file = "SecretStorage-3.3.3-py3-none-any.whl", hash = "sha256:f356e6628222568e3af06f2eba8df495efa13b3b63081dafd4f7d9a7b7bc9f99"}, 737 | {file = "SecretStorage-3.3.3.tar.gz", hash = "sha256:2403533ef369eca6d2ba81718576c5e0f564d5cca1b58f73a8b23e7d4eeebd77"}, 738 | ] 739 | 740 | [package.dependencies] 741 | cryptography = ">=2.0" 742 | jeepney = ">=0.6" 743 | 744 | [[package]] 745 | name = "textual" 746 | version = "0.65.1" 747 | description = "Modern Text User Interface framework" 748 | optional = false 749 | python-versions = "<4.0,>=3.8" 750 | files = [ 751 | {file = "textual-0.65.1-py3-none-any.whl", hash = "sha256:12a2a00f9b5675577fffa0f71171be5cdd138c42460b7ee95b3182542ee25aa0"}, 752 | {file = "textual-0.65.1.tar.gz", hash = "sha256:22b05430aa2c7f90adc38fb7458bdde9395ca90960063432ae1273510e35e682"}, 753 | ] 754 | 755 | [package.dependencies] 756 | markdown-it-py = {version = ">=2.1.0", extras = ["linkify", "plugins"]} 757 | rich = ">=13.3.3" 758 | typing-extensions = ">=4.4.0,<5.0.0" 759 | 760 | [package.extras] 761 | syntax = ["tree-sitter (>=0.20.1,<0.21.0)", "tree-sitter-languages (==1.10.2)"] 762 | 763 | [[package]] 764 | name = "tree-sitter" 765 | version = "0.22.3" 766 | description = "Python bindings to the Tree-sitter parsing library" 767 | optional = false 768 | python-versions = ">=3.9" 769 | files = [ 770 | {file = "tree-sitter-0.22.3.tar.gz", hash = "sha256:6516bcef5d36e0365670b97c91a169c8b1aa82ea4b60946b879020820718ce3d"}, 771 | {file = "tree_sitter-0.22.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d9a26dd80cf10763527483b02ba35a0b8d9168f324dbbce3f07860256c29bf15"}, 772 | {file = "tree_sitter-0.22.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4bcbe0a7358628629d9ec8e5687477e12f7c6aae6943b0872afb7170db039b86"}, 773 | {file = "tree_sitter-0.22.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dfa45e6bf2542862ce987482fe212ef3153bd331d5bba5873b9f485f8923f65a"}, 774 | {file = "tree_sitter-0.22.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4545b142da82f9668007180e0081583054682d0154cd6349796ac77dc8520d63"}, 775 | {file = "tree_sitter-0.22.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4978d22fe2868ab9a91125f49bd576ce5f954cc887c19471e0c33e104f37ba71"}, 776 | {file = "tree_sitter-0.22.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0ec593a69f8c4f1c81494147814d11b7fc6c903e5299e084ae7b89caf95cef84"}, 777 | {file = "tree_sitter-0.22.3-cp310-cp310-win_amd64.whl", hash = "sha256:0f66b88b8e9993630613d594e845f3cf2695fef87d0ca1475437cb17eeb72dc5"}, 778 | {file = "tree_sitter-0.22.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9e627eb129421f63378e936b5d0e13b8befa6e7c5267a8a7621a397a84e8f1f7"}, 779 | {file = "tree_sitter-0.22.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3cfa2a9860bfb0404ae28a9cf056dab8f2eb7f1673d8cc9b3f7e21452daad0e0"}, 780 | {file = "tree_sitter-0.22.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d9a66cc5f19635119a9d8325bcb00a58ed48427e3c3d307caf7c00d745ac83a5"}, 781 | {file = "tree_sitter-0.22.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de16468ea22c910e67caa91c99be9d6eb73e97e5164480a890f678b22d32faca"}, 782 | {file = "tree_sitter-0.22.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:98c697427f82abab6b39cfe2ade6547d844dd419fa8cfc89031bcdf7c10579b6"}, 783 | {file = "tree_sitter-0.22.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:548aa34f15a29aef1fc8e85507f13e0678a54f1de16461f844d86179b19bb5f6"}, 784 | {file = "tree_sitter-0.22.3-cp311-cp311-win_amd64.whl", hash = "sha256:2fc0e1097fb86623b340141e80a0f2b7668b09d953501d91adc715a577e32c61"}, 785 | {file = "tree_sitter-0.22.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:7cb5c145fbd4bcc0cd4851dc4d0a6079a8e2f61257f8c0effc92434f6fb19b14"}, 786 | {file = "tree_sitter-0.22.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d4a592080db6b9472a886f4593b4705d02630721fdbe4a700085fe775fcab20e"}, 787 | {file = "tree_sitter-0.22.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f36bf523763f05edf924126583ea997f905162046c0f184d6fd040cc1ccbf2c5"}, 788 | {file = "tree_sitter-0.22.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8e1193f27c25aab299f4fc154664122c7bfe80633b726bb457356d371479a5b"}, 789 | {file = "tree_sitter-0.22.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:156df7e71a6c6b542ff29526cad6886a41115e42dc768c55101398d68325db54"}, 790 | {file = "tree_sitter-0.22.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:82e1d467ce23dd2ecc37d4fb83965e891fc37b943639c517cd5acf54a2df0ff7"}, 791 | {file = "tree_sitter-0.22.3-cp312-cp312-win_amd64.whl", hash = "sha256:e541a0c08a04f229ba9479a8c441dd267fdaa3e5842ae70a744c178bcaf53fa3"}, 792 | {file = "tree_sitter-0.22.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a85a1d0fdff21cc524a959b3277c311941a9b5b91a862e462c1b55470893884a"}, 793 | {file = "tree_sitter-0.22.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f96c6acd2799bafa28543a267937eec6a3d9ccbdeb6e1d05858114d4cd882da9"}, 794 | {file = "tree_sitter-0.22.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed2708aecd3a4c8d20a89350d3c89ac2f964985ee9117c39357cee3098a9498a"}, 795 | {file = "tree_sitter-0.22.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b2f99535aa4195b20fef18559defaabd9e12fe8ed8806c101d51820f240ca64"}, 796 | {file = "tree_sitter-0.22.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:459a0f3bf8d6dbb9e9f651d67cee3a60f0b799fefd4a33f49a7e9501ada98e35"}, 797 | {file = "tree_sitter-0.22.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4a51bfe99dcd8bbfb0fe95113f0197e6e540db3077abce77a058235beec747a3"}, 798 | {file = "tree_sitter-0.22.3-cp39-cp39-win_amd64.whl", hash = "sha256:8d54ef562492493bf091cb3fd605cb7e60bf1d56634a94ab48075741d823e3a5"}, 799 | ] 800 | 801 | [package.extras] 802 | docs = ["sphinx (>=7.3,<8.0)", "sphinx-book-theme"] 803 | tests = ["tree-sitter-html", "tree-sitter-javascript", "tree-sitter-json", "tree-sitter-python", "tree-sitter-rust"] 804 | 805 | [[package]] 806 | name = "twine" 807 | version = "5.1.0" 808 | description = "Collection of utilities for publishing packages on PyPI" 809 | optional = false 810 | python-versions = ">=3.8" 811 | files = [ 812 | {file = "twine-5.1.0-py3-none-any.whl", hash = "sha256:fe1d814395bfe50cfbe27783cb74efe93abeac3f66deaeb6c8390e4e92bacb43"}, 813 | {file = "twine-5.1.0.tar.gz", hash = "sha256:4d74770c88c4fcaf8134d2a6a9d863e40f08255ff7d8e2acb3cbbd57d25f6e9d"}, 814 | ] 815 | 816 | [package.dependencies] 817 | importlib-metadata = ">=3.6" 818 | keyring = ">=15.1" 819 | pkginfo = ">=1.8.1" 820 | readme-renderer = ">=35.0" 821 | requests = ">=2.20" 822 | requests-toolbelt = ">=0.8.0,<0.9.0 || >0.9.0" 823 | rfc3986 = ">=1.4.0" 824 | rich = ">=12.0.0" 825 | urllib3 = ">=1.26.0" 826 | 827 | [[package]] 828 | name = "typing-extensions" 829 | version = "4.12.1" 830 | description = "Backported and Experimental Type Hints for Python 3.8+" 831 | optional = false 832 | python-versions = ">=3.8" 833 | files = [ 834 | {file = "typing_extensions-4.12.1-py3-none-any.whl", hash = "sha256:6024b58b69089e5a89c347397254e35f1bf02a907728ec7fee9bf0fe837d203a"}, 835 | {file = "typing_extensions-4.12.1.tar.gz", hash = "sha256:915f5e35ff76f56588223f15fdd5938f9a1cf9195c0de25130c627e4d597f6d1"}, 836 | ] 837 | 838 | [[package]] 839 | name = "uc-micro-py" 840 | version = "1.0.3" 841 | description = "Micro subset of unicode data files for linkify-it-py projects." 842 | optional = false 843 | python-versions = ">=3.7" 844 | files = [ 845 | {file = "uc-micro-py-1.0.3.tar.gz", hash = "sha256:d321b92cff673ec58027c04015fcaa8bb1e005478643ff4a500882eaab88c48a"}, 846 | {file = "uc_micro_py-1.0.3-py3-none-any.whl", hash = "sha256:db1dffff340817673d7b466ec86114a9dc0e9d4d9b5ba229d9d60e5c12600cd5"}, 847 | ] 848 | 849 | [package.extras] 850 | test = ["coverage", "pytest", "pytest-cov"] 851 | 852 | [[package]] 853 | name = "urllib3" 854 | version = "2.2.1" 855 | description = "HTTP library with thread-safe connection pooling, file post, and more." 856 | optional = false 857 | python-versions = ">=3.8" 858 | files = [ 859 | {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, 860 | {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, 861 | ] 862 | 863 | [package.extras] 864 | brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] 865 | h2 = ["h2 (>=4,<5)"] 866 | socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] 867 | zstd = ["zstandard (>=0.18.0)"] 868 | 869 | [[package]] 870 | name = "zipp" 871 | version = "3.19.2" 872 | description = "Backport of pathlib-compatible object wrapper for zip files" 873 | optional = false 874 | python-versions = ">=3.8" 875 | files = [ 876 | {file = "zipp-3.19.2-py3-none-any.whl", hash = "sha256:f091755f667055f2d02b32c53771a7a6c8b47e1fdbc4b72a8b9072b3eef8015c"}, 877 | {file = "zipp-3.19.2.tar.gz", hash = "sha256:bf1dcf6450f873a13e952a29504887c89e6de7506209e5b1bcc3460135d4de19"}, 878 | ] 879 | 880 | [package.extras] 881 | doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] 882 | test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] 883 | 884 | [metadata] 885 | lock-version = "2.0" 886 | python-versions = "^3.10" 887 | content-hash = "9b602a4bff2b92dfed5dcc5e32516a72c6068892f85627dcbdc713267b107ebb" 888 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | name = "filequery" 3 | version = "0.2.6" 4 | description = "Query CSV and Parquet files using SQL" 5 | authors = ["MarkyMan4 "] 6 | license = "MIT" 7 | readme = "README.md" 8 | 9 | [tool.poetry.dependencies] 10 | python = "^3.10" 11 | textual = "^0.65.1" 12 | duckdb = "^1.0.0" 13 | numpy = "^1.26.4" 14 | tree-sitter = "^0.22.3" 15 | 16 | [tool.poetry.group.dev.dependencies] 17 | twine = "^5.1.0" 18 | 19 | [build-system] 20 | requires = ["poetry-core"] 21 | build-backend = "poetry.core.masonry.api" 22 | 23 | [tool.poetry.scripts] 24 | filequery = "filequery:fq_cli_handler" 25 | 26 | [tool.poetry.urls] 27 | repository = "https://github.com/MarkyMan4/filequery" 28 | -------------------------------------------------------------------------------- /src/filequery/__init__.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import json 3 | import sys 4 | from typing import List 5 | 6 | import duckdb 7 | 8 | from filequery.__version__ import __version__ 9 | from filequery.file_query_args import FileQueryArgs 10 | from filequery.filedb import FileDb, FileType 11 | from filequery.tui.duckui import DuckUI 12 | 13 | 14 | def parse_arguments(parser: argparse.ArgumentParser) -> FileQueryArgs: 15 | parser.add_argument( 16 | "-f", "--filename", required=False, help="path to a CSV, Parquet or JSON file" 17 | ) 18 | parser.add_argument( 19 | "-d", 20 | "--filesdir", 21 | required=False, 22 | help="path to a directory which can contain a combination of CSV, Parquet and JSON files", 23 | ) 24 | parser.add_argument( 25 | "-q", "--query", required=False, help="SQL query to execute against file" 26 | ) 27 | parser.add_argument( 28 | "-Q", "--query_file", required=False, help="path to file with query to execute" 29 | ) 30 | parser.add_argument( 31 | "-o", 32 | "--out_file", 33 | nargs="+", 34 | required=False, 35 | help="file to write results to instead of printing to standard output", 36 | ) 37 | parser.add_argument( 38 | "-F", 39 | "--out_file_format", 40 | required=False, 41 | help="either csv or parquet, defaults to csv", 42 | ) 43 | parser.add_argument( 44 | "-D", 45 | "--delimiter", 46 | required=False, 47 | help="delimiter to use when printing result or writing to CSV file", 48 | ) 49 | parser.add_argument( 50 | "-c", "--config", required=False, help="path to JSON config file" 51 | ) 52 | parser.add_argument( 53 | "-e", 54 | "--editor", 55 | required=False, 56 | help="run SQL editor UI for exploring data", 57 | action="store_true", 58 | ) 59 | parser.add_argument("-v", "--version", action="version", version=__version__) 60 | args = parser.parse_args() 61 | 62 | cli_args = None 63 | 64 | # if config file given, all other arguments are ignored 65 | if args.config: 66 | try: 67 | cli_args = parse_config_file(args.config) 68 | except: 69 | print("failed to load config file") 70 | sys.exit() 71 | else: 72 | cli_args = FileQueryArgs( 73 | args.filename, 74 | args.filesdir, 75 | args.query, 76 | args.query_file, 77 | args.out_file, 78 | args.out_file_format, 79 | args.delimiter, 80 | args.editor, 81 | ) 82 | 83 | return cli_args 84 | 85 | 86 | def parse_config_file(config_file: str): 87 | args = None 88 | 89 | with open(config_file) as cf: 90 | config = json.load(cf) 91 | 92 | # need to convert outfile to list if a single outfile is specified 93 | outfiles = config.get("out_file") 94 | if outfiles and type(outfiles) == str: 95 | outfiles = [config.get("out_file")] 96 | 97 | args = FileQueryArgs( 98 | filename=config.get("filename"), 99 | filesdir=config.get("filesdir"), 100 | query=config.get("query"), 101 | query_file=config.get("query_file"), 102 | out_file=outfiles, 103 | out_file_format=config.get("out_file_format"), 104 | delimiter=config.get("delimiter"), 105 | editor=False, 106 | ) 107 | 108 | return args 109 | 110 | 111 | def validate_args(args: FileQueryArgs) -> str: 112 | err_msg = None 113 | 114 | # if using editor, other args are optional 115 | if args.editor: 116 | return err_msg 117 | 118 | if not args.filename and not args.filesdir: 119 | err_msg = "you must provide either a file name or a path to a directory containing CSV and/or Parquet files" 120 | 121 | if args.filename and args.filesdir: 122 | err_msg = "you cannot provide both filename and filesdir" 123 | 124 | if not args.query and not args.query_file and not args.editor: 125 | err_msg = "you must provide either a query or a path to a file with a query" 126 | 127 | if args.query and args.query_file: 128 | err_msg = "you cannot provide both query and query_file" 129 | 130 | return err_msg 131 | 132 | 133 | def split_queries(sql: str) -> List[str]: 134 | """ 135 | Split semicolon separated SQL to a list 136 | 137 | :param sql: SQL to split 138 | :type sql: str 139 | :return: List of SQL statements 140 | :rtype: List[str] 141 | """ 142 | queries = sql.split(";") 143 | 144 | if queries[-1].strip() in ("", "\n", "\r", "\t"): 145 | queries = queries[:-1] 146 | 147 | return queries 148 | 149 | 150 | def run_sql(fdb: FileDb, queries: List[str]): 151 | if len(queries) > 1: 152 | query_results = fdb.exec_many_queries(queries) 153 | for qr in query_results: 154 | yield qr 155 | else: 156 | query_result = fdb.exec_query(queries[0]) 157 | yield query_result 158 | 159 | 160 | def get_query_list(args: FileQueryArgs) -> List[str]: 161 | query = args.query 162 | 163 | if args.query_file: 164 | with open(args.query_file) as f: 165 | query = "".join(f.readlines()) 166 | 167 | return split_queries(query) 168 | 169 | 170 | # determines what to do based on arguments provided 171 | # having this separate from fq_cli_handler() makes unit testing easier 172 | def handle_args(args: FileQueryArgs): 173 | # if using editor and no files specified, run DuckUI with an empty database 174 | if args.editor and not args.filename and not args.filesdir: 175 | ui = DuckUI(conn=duckdb.connect(":memory:")) 176 | ui.run() 177 | return 178 | 179 | try: 180 | filepath = args.filename if args.filename else args.filesdir 181 | fdb = FileDb(filepath) 182 | except Exception as e: 183 | print("failed to load files") 184 | print(e) 185 | sys.exit() 186 | 187 | # if editor mode, run the editor and return afterwards 188 | if args.editor: 189 | ui = DuckUI(conn=fdb.db) 190 | ui.run() 191 | return 192 | 193 | try: 194 | queries = get_query_list(args) 195 | except Exception as e: 196 | print("failed to read query") 197 | print(e) 198 | sys.exit() 199 | 200 | if args.out_file: 201 | if len(args.out_file) != len(queries): 202 | print("number of queries and output files do not match") 203 | sys.exit() 204 | 205 | outfile_type = ( 206 | FileType.PARQUET if args.out_file_format == "parquet" else FileType.CSV 207 | ) 208 | 209 | for i in range(len(queries)): 210 | delimiter = args.delimiter if args.delimiter else "," 211 | fdb.export_query( 212 | queries[i], args.out_file[i], outfile_type, delimiter=delimiter 213 | ) 214 | else: 215 | for query_result in run_sql(fdb, queries): 216 | query_result.format_as_table(args.delimiter) 217 | 218 | 219 | def fq_cli_handler(): 220 | parser = argparse.ArgumentParser() 221 | args = parse_arguments(parser) 222 | err = validate_args(args) 223 | 224 | if err: 225 | print(f"{err}\n") 226 | parser.print_help() 227 | sys.exit() 228 | 229 | handle_args(args) 230 | -------------------------------------------------------------------------------- /src/filequery/__main__.py: -------------------------------------------------------------------------------- 1 | from . import fq_cli_handler 2 | 3 | if __name__ == "__main__": 4 | fq_cli_handler() 5 | -------------------------------------------------------------------------------- /src/filequery/__version__.py: -------------------------------------------------------------------------------- 1 | __version__ = "0.2.6" 2 | -------------------------------------------------------------------------------- /src/filequery/exceptions.py: -------------------------------------------------------------------------------- 1 | class InvalidFileTypeException(Exception): 2 | """Exception raised for file types that cannot be queried""" 3 | 4 | def __init__(self, file_type): 5 | super().__init__("file type must be one of: csv, parquet, json, ndjson") 6 | -------------------------------------------------------------------------------- /src/filequery/file_query_args.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass 2 | from typing import List 3 | 4 | 5 | @dataclass 6 | class FileQueryArgs: 7 | filename: str 8 | filesdir: str 9 | query: str 10 | query_file: str 11 | out_file: List[str] 12 | out_file_format: str 13 | delimiter: str 14 | editor: bool 15 | -------------------------------------------------------------------------------- /src/filequery/filedb.py: -------------------------------------------------------------------------------- 1 | import os 2 | import re 3 | from typing import List 4 | 5 | import duckdb 6 | 7 | from .exceptions import InvalidFileTypeException 8 | from .filetype import FileType 9 | from .queryresult import QueryResult 10 | 11 | READ_FUNCS = { 12 | FileType.CSV: "read_csv", 13 | FileType.PARQUET: "read_parquet", 14 | FileType.JSON: "read_json_auto", 15 | FileType.NDJSON: "read_ndjson_auto", 16 | } 17 | 18 | # mapping from file extension to FileType 19 | FILE_EXT_MAP = { 20 | "csv": FileType.CSV, 21 | "parquet": FileType.PARQUET, 22 | "json": FileType.JSON, 23 | "ndjson": FileType.NDJSON, 24 | } 25 | 26 | 27 | class FileDb: 28 | def __init__(self, filepath: str): 29 | """ 30 | FileDb constructor 31 | 32 | :param filepath: path to a file or directory containing files which will be read into tables 33 | :type filepath: str 34 | """ 35 | self.db = duckdb.connect(":memory:") 36 | 37 | if os.path.isdir(filepath): 38 | # only take accepted file types 39 | files = [] 40 | for file in os.listdir(filepath): 41 | is_accepted_type = ( 42 | file.lower().endswith(".csv") 43 | or file.lower().endswith(".parquet") 44 | or file.lower().endswith(".json") 45 | or file.lower().endswith(".ndjson") 46 | ) 47 | 48 | if is_accepted_type: 49 | files.append(file) 50 | 51 | for file in files: 52 | self._create_table_from_file(os.path.join(filepath, file)) 53 | else: 54 | self._create_table_from_file(filepath) 55 | 56 | def _create_table_from_file(self, filepath: str): 57 | """ 58 | create a table in the database from a file 59 | 60 | :param filename: path to a CSV, JSON or Parquet file 61 | :type filename: str 62 | :raises InvalidFileTypeException: raised if file is not CSV, JSON or Parquet 63 | """ 64 | base_filename = os.path.basename(filepath).lower() 65 | table_name, file_ext = os.path.splitext(base_filename) 66 | file_ext = file_ext.replace(".", "") 67 | filetype = FILE_EXT_MAP.get(file_ext) 68 | 69 | if filetype is None: 70 | raise InvalidFileTypeException 71 | 72 | read_func = READ_FUNCS[filetype] 73 | 74 | if self._should_quote_table_name(table_name): 75 | table_name = f'"{table_name}"' 76 | 77 | # for csv, json and ndjson, set sample size to -1 (sample all records) 78 | # this is not needed for parquet 79 | if filetype == FileType.PARQUET: 80 | self.db.execute( 81 | f"create table {table_name} as select * from {read_func}('{filepath}');" 82 | ) 83 | else: 84 | self.db.execute( 85 | f"create table {table_name} as select * from {read_func}('{filepath}', SAMPLE_SIZE=-1);" 86 | ) 87 | 88 | def _should_quote_table_name(self, table_name: str) -> bool: 89 | """ 90 | Determine if a table name needs to be wrapped in double quotes. It needs to be wrapped 91 | in quotes if it does not follow these rules: 92 | - starts with a number or special characters 93 | - contains whitespace or special characters 94 | - it is a reserved word 95 | 96 | :param table_name: name of table to check - this is the file name without the extension 97 | :type table_name: str 98 | :return: whether the table name needs to be wrapped in double quotes 99 | :rtype: bool 100 | """ 101 | # first check if it starts with a number or special character, or if it contains special characters 102 | valid_table_name_regex = re.compile("^[a-zA-Z_]+[a-zA-Z0-9_]+$") 103 | if not valid_table_name_regex.match(table_name): 104 | return True 105 | 106 | # then check if it's a reserved word - using DuckDB's function duckdb_keywords() 107 | query = f""" 108 | select * 109 | from duckdb_keywords() 110 | where keyword_name = '{table_name.lower()}' 111 | """ 112 | 113 | res = self.db.execute(query) 114 | if len(res.fetchall()) > 0: 115 | return True 116 | 117 | return False 118 | 119 | def exec_query(self, query: str) -> QueryResult: 120 | """ 121 | Executes a query in the database created from the file. If more than one semicolon separated queries are given, 122 | the result will only be given for the last one. Use the exec_many_queries() to get the result from multiple queries. 123 | 124 | :param query: query to execute 125 | :type query: str 126 | :return: result of executing the query 127 | :rtype: QueryResult 128 | """ 129 | res = self.db.execute(query) 130 | return QueryResult(res.fetchnumpy()) 131 | 132 | def exec_many_queries(self, queries: List[str]) -> List[QueryResult]: 133 | results = [self.exec_query(query) for query in queries] 134 | return results 135 | 136 | def export_query( 137 | self, query: str, output_filepath: str, filetype: int = FileType.CSV, **kwargs 138 | ): 139 | """ 140 | Writes query result to a file 141 | 142 | :param query: query to execute 143 | :type query: str 144 | :param output_filepath: path to output file 145 | :type output_filepath: str 146 | :param filetype: output file format (either FileType.CSV or FileType.Parquet), defaults to FileType.CSV 147 | :type filetype: FileType.CSV 148 | """ 149 | if filetype == FileType.CSV: 150 | delimiter = "," if "delimiter" not in kwargs else kwargs["delimiter"] 151 | self.db.execute( 152 | f"copy ({query}) to '{output_filepath}' (header, delimiter '{delimiter}')" 153 | ) 154 | elif filetype == FileType.JSON: 155 | self.db.execute(f"copy ({query}) to '{output_filepath}' (ARRAY true)") 156 | elif filetype == FileType.PARQUET: 157 | self.db.execute(f"copy ({query}) to '{output_filepath}'") 158 | -------------------------------------------------------------------------------- /src/filequery/filetype.py: -------------------------------------------------------------------------------- 1 | from enum import Enum 2 | 3 | 4 | class FileType(Enum): 5 | CSV = 0 6 | PARQUET = 1 7 | JSON = 2 8 | NDJSON = 3 9 | -------------------------------------------------------------------------------- /src/filequery/queryresult.py: -------------------------------------------------------------------------------- 1 | from typing import Any, Dict, List 2 | 3 | import numpy as np 4 | from rich import markup 5 | from rich.console import Console 6 | from rich.table import Table 7 | 8 | 9 | class QueryResult: 10 | def __init__(self, result: Dict[str, np.ndarray]): 11 | self.result_cols = {} 12 | 13 | # result_cols is a dict with keys as column names and values as data type 14 | for col in result: 15 | self.result_cols[col] = result[col].dtype 16 | 17 | recs_as_cols = [result[col] for col in result] 18 | self.records = np.transpose(recs_as_cols).tolist() 19 | 20 | def __format_field(self, field) -> str: 21 | formatted = field 22 | 23 | if type(field) == str: 24 | formatted = f'"{field}"' 25 | elif field == None: 26 | formatted = "" 27 | else: 28 | formatted = str(field) 29 | 30 | return formatted 31 | 32 | def __str__(self) -> str: 33 | # formats as a csv 34 | return self.format_with_delimiter(",") 35 | 36 | @property 37 | def dict_records(self) -> List[Dict[str, Any]]: 38 | result = [] 39 | 40 | for rec in self.records: 41 | dict_record = {} 42 | 43 | for key, value in zip(self.result_cols, rec): 44 | dict_record[key] = value 45 | 46 | result.append(dict_record) 47 | 48 | return result 49 | 50 | def format_with_delimiter(self, delimiter): 51 | col_names = list(self.result_cols.keys()) 52 | header_str = delimiter.join(map(self.__format_field, col_names)) 53 | records_str = "\n".join( 54 | [delimiter.join(map(self.__format_field, rec)) for rec in self.records] 55 | ) 56 | 57 | return f"{header_str}\n{records_str}" 58 | 59 | def format_as_table(self, delimiter: str = None): 60 | """ 61 | Formats and prints query result as a string in a tabular format 62 | 63 | :param delimiter: specify a delimiter to format like a delimited file, if not specified, the result will be a "pretty" format, defaults to None 64 | :type delimiter: str, optional 65 | :return: query result as a tabular-formatted string 66 | :rtype: str 67 | """ 68 | if delimiter: 69 | print(self.format_with_delimiter(delimiter)) 70 | else: 71 | # otherwise create a table using rich 72 | table = Table() 73 | for col in self.result_cols: 74 | justify = "left" if self.result_cols[col] == "object" else "right" 75 | table.add_column(col, justify=justify) 76 | 77 | for rec in self.records: 78 | stringified = [markup.escape(str(r)) for r in rec] 79 | table.add_row(*stringified) 80 | 81 | console = Console() 82 | console.print(table) 83 | 84 | def save_to_file(self, filepath: str, delimiter: str = ","): 85 | """ 86 | Saves query reslt as a CSV 87 | 88 | :param filepath: path to output file 89 | :type filepath: str 90 | :param delimiter: delimiter to use in output file, defaults to ',' 91 | :type filepath: str 92 | """ 93 | with open(filepath, "w") as outfile: 94 | outfile.write(self.format_with_delimiter(delimiter)) 95 | -------------------------------------------------------------------------------- /src/filequery/tui/duckui.py: -------------------------------------------------------------------------------- 1 | import re 2 | from collections import defaultdict 3 | from pathlib import Path 4 | from typing import List, Tuple 5 | 6 | import duckdb 7 | from textual import events, on 8 | from textual.app import App, ComposeResult 9 | from textual.binding import Binding 10 | from textual.containers import Horizontal, Vertical 11 | from textual.widgets import (DataTable, Footer, Input, Markdown, Tab, Tabs, 12 | TextArea, Tree) 13 | from textual.widgets.text_area import Selection 14 | 15 | from .help_content import help_md 16 | from .screens.file_browser import FileBrowser 17 | from .screens.menu import MenuModal 18 | from .screens.menu_events import MenuEvent 19 | 20 | 21 | class DuckUI(App): 22 | BINDINGS = [ 23 | Binding(key="f1", action="toggle_menu", description="menu"), 24 | Binding(key="f2", action="toggle_help", description="help"), 25 | Binding(key="f9", action="execute_query", description="execute query"), 26 | Binding(key="ctrl+p", action="close_dialog", description="close dialog"), 27 | ] 28 | CSS_PATH = "./styles/style.tcss" 29 | 30 | def __init__(self, conn: duckdb.DuckDBPyConnection = None): 31 | self.conn = conn 32 | 33 | if self.conn is None: 34 | self.conn = duckdb.connect(":memory:") 35 | 36 | # mapping from tab ID to editor content, tab IDs are "tab-1", "tab-2" and so on 37 | self.tab_content = defaultdict(str) 38 | 39 | # keep track of last query ran, so if user exports result, can use a duckdb copy statement 40 | self.last_query = "" 41 | 42 | super().__init__() 43 | 44 | def _get_table_list(self) -> List[str]: 45 | """ 46 | get the list of tables in the database 47 | 48 | :return: list of tables currently in the database 49 | :rtype: List[str] 50 | """ 51 | cur = self.conn.cursor() 52 | tables = [] 53 | cur.execute("show all tables") 54 | 55 | for rec in cur.fetchall(): 56 | tables.append(rec[2]) # third column is table name 57 | 58 | cur.close() 59 | 60 | return tables 61 | 62 | def _refresh_table_tree(self): 63 | # refreshing causes the tree to get recreated, which makes all nodes collapsed 64 | # keep track of what is expanded right now and expand them again after recreation 65 | nodes_to_expand = [] 66 | 67 | for child in self.tables.root.children: 68 | if child.is_expanded: 69 | nodes_to_expand.append(str(child.label)) 70 | 71 | self.tables.root.remove_children() 72 | cur = self.conn.cursor() 73 | 74 | for table in self._get_table_list(): 75 | table_node = self.tables.root.add(table) 76 | 77 | cur.execute(f"describe table {table}") 78 | 79 | for rec in cur.fetchall(): 80 | table_node.add_leaf(f"{rec[0]}: {rec[1]}") 81 | 82 | # expand the table_node if it was expanded before recreation 83 | if table in nodes_to_expand: 84 | table_node.expand() 85 | 86 | cur.close() 87 | 88 | def compose(self) -> ComposeResult: 89 | self.tables = Tree("tables", classes="table-browser-area") 90 | self.tables.root.expand() 91 | self._refresh_table_tree() 92 | 93 | self.text_area = TextArea( 94 | language="sql", classes="editor-box", theme="dracula", id="editor" 95 | ) 96 | self.text_area.focus() 97 | 98 | self.result_table = DataTable(classes="result-box") 99 | self.result_table.zebra_stripes = True 100 | 101 | self.help_box = Markdown(help_md, classes="popup-box") 102 | self.save_sql_input = Input( 103 | placeholder="sql file name...", 104 | classes="file-name-input", 105 | id="sql-file-input", 106 | ) 107 | self.save_result_input = Input( 108 | placeholder="result file name...", 109 | classes="file-name-input", 110 | id="result-file-input", 111 | ) 112 | 113 | self.tabs = Tabs(Tab("tab 1")) 114 | 115 | yield Horizontal( 116 | Vertical( 117 | self.tables, 118 | ), 119 | Vertical( 120 | self.tabs, 121 | self.text_area, 122 | self.result_table, 123 | classes="editor-area", 124 | ), 125 | ) 126 | 127 | yield self.help_box 128 | yield self.save_sql_input 129 | yield self.save_result_input 130 | 131 | yield Footer() 132 | 133 | @on(Input.Submitted, selector="#sql-file-input") 134 | def handle_sql_file_name_input(self): 135 | try: 136 | with open(self.save_sql_input.value, "w") as f: 137 | f.write(self.text_area.text) 138 | except: 139 | # ignore for now, find a way to display an error message 140 | pass 141 | 142 | # after submit, hide this dialog and refocus on text editor 143 | self.save_sql_input.display = False 144 | self.text_area.focus() 145 | 146 | @on(Input.Submitted, selector="#result-file-input") 147 | def handle_result_file_name_input(self): 148 | try: 149 | self.conn.execute(f"copy ({self.last_query}) to '{self.save_result_input.value}' (header)") 150 | except: 151 | # ignore for now, find a way to display an error message 152 | pass 153 | 154 | # after submit, hide this dialog and refocus on text editor 155 | self.save_result_input.display = False 156 | self.text_area.focus() 157 | 158 | @on(TextArea.Changed, selector="#editor") 159 | def handle_editor_content_changed(self): 160 | cur_tab = self.tabs.active_tab.id 161 | self.tab_content[cur_tab] = self.text_area.text 162 | 163 | @on(Tabs.TabActivated) 164 | def handle_tab_activated(self, event: Tabs.TabActivated): 165 | self.text_area.text = self.tab_content[event.tab.id] 166 | self.result_table.clear(columns=True) 167 | 168 | def on_descendant_focus(self, event: events.DescendantFocus): 169 | if type(event.widget) == DataTable: 170 | self.result_table.add_class("focused") 171 | self.text_area.remove_class("focused") 172 | self.tables.remove_class("focused") 173 | if type(event.widget) == TextArea: 174 | self.text_area.add_class("focused") 175 | self.result_table.remove_class("focused") 176 | self.tables.remove_class("focused") 177 | if type(event.widget) == Tabs: 178 | self.result_table.remove_class("focused") 179 | self.text_area.remove_class("focused") 180 | self.tables.remove_class("focused") 181 | if type(event.widget) == Tree: 182 | self.tables.add_class("focused") 183 | self.result_table.remove_class("focused") 184 | self.text_area.remove_class("focused") 185 | 186 | # handle key events outside of bindings 187 | async def on_key(self, event: events.Key): 188 | if event.key == "ctrl+shift+up": 189 | if self.text_area.has_focus: 190 | self.tabs.focus() 191 | elif self.result_table.has_focus: 192 | self.text_area.focus() 193 | elif event.key == "ctrl+shift+down": 194 | if self.tabs.has_focus: 195 | self.text_area.focus() 196 | elif self.text_area.has_focus: 197 | self.result_table.focus() 198 | elif event.key == "ctrl+shift+left": 199 | self.tables.focus() 200 | elif event.key == "ctrl+shift+right": 201 | self.text_area.focus() 202 | elif event.key == "ctrl+n": 203 | await self.action_new_tab() 204 | elif event.key == "ctrl+t": 205 | await self.action_close_tab() 206 | 207 | def handle_menu_event(self, event: MenuEvent): 208 | if event == MenuEvent.SAVE_SQL: 209 | self.save_sql_input.display = True 210 | self.save_sql_input.focus() 211 | elif event == MenuEvent.LOAD_SQL: 212 | self.push_screen(FileBrowser(), callback=self.handle_file_browser_event) 213 | elif event == MenuEvent.SAVE_RESULT: 214 | self.save_result_input.display = True 215 | self.save_result_input.focus() 216 | elif event == MenuEvent.EXIT: 217 | self.exit() 218 | 219 | def handle_file_browser_event(self, path: Path): 220 | if path is None: 221 | return 222 | 223 | with open(path) as file: 224 | self.text_area.text = file.read() 225 | 226 | # manually call this to ensure tab content is saved 227 | self.handle_editor_content_changed() 228 | 229 | def action_toggle_menu(self): 230 | self.push_screen(MenuModal(), callback=self.handle_menu_event) 231 | 232 | def action_close_dialog(self): 233 | # close help and file name inputs and refocus on editor 234 | self.help_box.display = False 235 | self.save_sql_input.display = False 236 | self.save_result_input.display = False 237 | self.text_area.focus() 238 | 239 | def action_toggle_help(self): 240 | self.help_box.display = not self.help_box.display 241 | 242 | async def action_new_tab(self): 243 | # find the max tab ID and add one to get the next tab ID 244 | # tab IDs are tab-, so split on "-" and take the second element to get the ID 245 | tab_ids = [int(tab_id.split("-")[1]) for tab_id in self.tab_content.keys()] 246 | next_id = max(tab_ids) + 1 247 | await self.tabs.add_tab( 248 | Tab(f"tab {next_id}", id=f"tab-{next_id}"), after=self.tabs.active_tab 249 | ) 250 | self.tabs.action_next_tab() 251 | 252 | async def action_close_tab(self): 253 | # don't allow closing if only one tab open 254 | if self.tabs.tab_count == 1: 255 | return 256 | 257 | active_tab_id = self.tabs.active_tab.id 258 | await self.tabs.remove_tab(active_tab_id) 259 | del self.tab_content[active_tab_id] 260 | self.tabs.action_previous_tab() 261 | 262 | def _display_error_in_table(self, error_msg: str): 263 | """ 264 | Displays an error message in the result table 265 | 266 | :param error_msg: error message to display 267 | :type error: str 268 | """ 269 | self.result_table.clear(columns=True) 270 | self.result_table.add_column("error") 271 | self.result_table.add_row(error_msg) 272 | 273 | def _find_query_at_cursor( 274 | self, cursor_x: int, cursor_y: int 275 | ) -> Tuple[str, Selection]: 276 | """ 277 | Find the query at cursor_x, cursor_y 278 | 279 | :param cursor_x: line cursor is on 280 | :type cursor_x: int 281 | :param cursor_y: column cursor is on 282 | :type cursor_y: int 283 | :return: tuple with the query selection and the span of the query given as a Selection which 284 | can be used to highlight the query 285 | :rtype: Tuple[str, Selection] 286 | """ 287 | # remove comments 288 | text = re.sub("--.+", "", self.text_area.text) 289 | queries = text.split(";") 290 | 291 | # add character to the end of each query so that query is selected if cursor 292 | # is right before semicolon 293 | queries = [q + " " for q in queries] 294 | 295 | line = 0 296 | col = 0 297 | query_idx_to_run = -1 298 | found_query = False 299 | selection_start = (0, 0) 300 | selection_end = (0, 0) 301 | 302 | for i, query in enumerate(queries): 303 | selection_start = (line, col) 304 | 305 | # whether we've seen any characters in the current query besides newline and space 306 | found_non_empty_chars = False 307 | 308 | for c in query: 309 | if line == cursor_x and col == cursor_y: 310 | query_idx_to_run = i 311 | found_query = True 312 | 313 | if c == "\n": 314 | col = 0 315 | line += 1 316 | else: 317 | col += 1 318 | if c != " ": 319 | found_non_empty_chars = True 320 | 321 | # if we've only seen newlines and whitespace, keep incrementing selection start 322 | # this way the highlighted portion actually starts at the code 323 | if (c == " " or c == "\n") and not found_non_empty_chars: 324 | selection_start = (line, col) 325 | 326 | selection_end = (line, col) 327 | 328 | if found_query: 329 | break 330 | 331 | query = queries[query_idx_to_run] if query_idx_to_run != -1 else "" 332 | 333 | return query, Selection(selection_start, selection_end) 334 | 335 | def action_execute_query(self): 336 | """ 337 | Executes the query at the cursor 338 | """ 339 | cursor_x, cursor_y = self.text_area.cursor_location 340 | query, selection = self._find_query_at_cursor(cursor_x, cursor_y) 341 | 342 | if query.strip() == "": 343 | self._display_error_in_table("no query to run") 344 | return 345 | 346 | self.text_area.selection = selection 347 | 348 | result = None 349 | cur = self.conn.cursor() 350 | 351 | try: 352 | cur.execute(query) 353 | result = cur.fetchall() 354 | self.last_query = query 355 | except Exception as e: 356 | self._display_error_in_table(str(e)) 357 | cur.close() 358 | return 359 | 360 | try: 361 | col_names = [col[0] for col in cur.description] 362 | self.result_table.clear(columns=True) 363 | self.result_table.add_columns(*col_names) 364 | self.result_table.add_rows(result) 365 | except Exception as e: 366 | self._display_error_in_table(str(e)) 367 | finally: 368 | cur.close() 369 | 370 | # after executing a statement, update the table list in case any tables were created or dropped 371 | self._refresh_table_tree() 372 | -------------------------------------------------------------------------------- /src/filequery/tui/help_content.py: -------------------------------------------------------------------------------- 1 | help_md = """\ 2 | # Help 3 | 4 | controls 5 | 6 | |key|action| 7 | |---|------| 8 | |ctrl+c|quit| 9 | |f2|toggle help screen| 10 | |f9|execute SQL in the editor| 11 | |ctrl+q|save editor content| 12 | |ctrl+r|save result| 13 | |ctrl+p|close all open dialogs (help screen, save file dialogs)| 14 | |ctrl+n|open a new tab| 15 | |ctrl+shift+arrow keys|navigate panes| 16 | |left or right arrow|switch tabs (must have the tab section focused to use this, i.e. press `ctrl+shift+up` until the tab section at the top of the screen is focused)| 17 | 18 | --- 19 | 20 | editor controls 21 | 22 | |key|action| 23 | |---|------| 24 | |escape|focus on the next item| 25 | |up|move the cursor up| 26 | |down|move the cursor down| 27 | |left|move the cursor left| 28 | |ctrl+left|move the cursor to the start of the word| 29 | |ctrl+shift+left|move the cursor to the start of the word and select| 30 | |right|move the cursor right| 31 | |ctrl+right|move the cursor to the end of the word| 32 | |ctrl+shift+right|move the cursor to the end of the word and select| 33 | |home,ctrl+a|move the cursor to the start of the line| 34 | |end,ctrl+e|move the cursor to the end of the line| 35 | |shift+home|move the cursor to the start of the line and select| 36 | |shift+end|move the cursor to the end of the line and select| 37 | |pageup|move the cursor one page up| 38 | |pagedown|move the cursor one page down| 39 | |shift+up|select while moving the cursor up| 40 | |shift+down|select while moving the cursor down| 41 | |shift+left|select while moving the cursor left| 42 | |shift+right|select while moving the cursor right| 43 | |backspace|delete character to the left of cursor| 44 | |ctrl+w|delete from cursor to start of the word| 45 | |delete,ctrl+d|delete character to the right of cursor| 46 | |ctrl+f|delete from cursor to end of the word| 47 | |ctrl+x|delete the current line| 48 | |ctrl+u|delete from cursor to the start of the line| 49 | |ctrl+k|delete from cursor to the end of the line| 50 | |f6|select the current line| 51 | |f7|select all text in the document| 52 | 53 | --- 54 | 55 | When you navigate to the table list on the left side, press `space` or `enter` to expand/collapse table columns. 56 | 57 | --- 58 | 59 | helpful SQL statements 60 | 61 | ```sql 62 | show tables; # list tables in the database 63 | describe table ; # get information about the columns in a table 64 | ``` 65 | """ 66 | -------------------------------------------------------------------------------- /src/filequery/tui/screens/file_browser.py: -------------------------------------------------------------------------------- 1 | from textual import on 2 | from textual.binding import Binding 3 | from textual.containers import Container, Vertical 4 | from textual.screen import ModalScreen 5 | from textual.widgets import DirectoryTree, Rule, Static 6 | 7 | 8 | class FileBrowser(ModalScreen): 9 | BINDINGS = [ 10 | Binding("escape", "exit") 11 | ] 12 | 13 | def compose(self): 14 | with Container(): 15 | yield Vertical( 16 | Static("Select file to load", id="menu-title"), 17 | Static("(esc to close this)", classes="centered"), 18 | Rule(), 19 | DirectoryTree("./"), 20 | ) 21 | 22 | def action_exit(self): 23 | self.dismiss() 24 | 25 | @on(DirectoryTree.FileSelected) 26 | def handle_file_selected(self, event: DirectoryTree.FileSelected): 27 | self.dismiss(event.path) 28 | -------------------------------------------------------------------------------- /src/filequery/tui/screens/menu.py: -------------------------------------------------------------------------------- 1 | from textual import on 2 | from textual.binding import Binding 3 | from textual.containers import Container, Vertical 4 | from textual.screen import ModalScreen 5 | from textual.widgets import Button, Rule, Static 6 | 7 | from .menu_events import MenuEvent 8 | 9 | 10 | class MenuModal(ModalScreen): 11 | BINDINGS = [ 12 | Binding("up", "focus_previous"), 13 | Binding("down", "focus_next"), 14 | ] 15 | 16 | def compose(self): 17 | with Container(): 18 | yield Vertical( 19 | Static("Menu", id="menu-title"), 20 | Rule(), 21 | Button("load SQL", id="load-sql-btn", classes="menu-btn"), 22 | Button("save SQL", id="save-sql-btn", classes="menu-btn"), 23 | Button("save query result", id="save-result-btn", classes="menu-btn"), 24 | Button("close menu", id="close-btn", classes="menu-btn"), 25 | Button("exit filequery", id="exit-btn", classes="menu-btn menu-exit-btn"), 26 | ) 27 | 28 | @on(Button.Pressed) 29 | def handle_button(self, event: Button.Pressed): 30 | if event.button.id == "load-sql-btn": 31 | self.dismiss(MenuEvent.LOAD_SQL) 32 | elif event.button.id == "save-sql-btn": 33 | self.dismiss(MenuEvent.SAVE_SQL) 34 | elif event.button.id == "save-result-btn": 35 | self.dismiss(MenuEvent.SAVE_RESULT) 36 | elif event.button.id == "close-btn": 37 | self.dismiss() 38 | elif event.button.id == "exit-btn": 39 | self.dismiss(MenuEvent.EXIT) 40 | -------------------------------------------------------------------------------- /src/filequery/tui/screens/menu_events.py: -------------------------------------------------------------------------------- 1 | from enum import Enum, auto 2 | 3 | 4 | class MenuEvent(Enum): 5 | CLOSE = auto() 6 | LOAD_SQL = auto() 7 | SAVE_SQL = auto() 8 | SAVE_RESULT = auto() 9 | EXIT = auto() 10 | -------------------------------------------------------------------------------- /src/filequery/tui/styles/style.tcss: -------------------------------------------------------------------------------- 1 | Screen { 2 | layout: vertical; 3 | layers: below above top; 4 | } 5 | 6 | .editor-box { 7 | height: 1fr; 8 | border: round green; 9 | } 10 | 11 | .result-box { 12 | height: 1fr; 13 | border: round green; 14 | } 15 | 16 | .editor-area { 17 | width: 3fr; 18 | } 19 | 20 | .table-browser-area { 21 | width: 1fr; 22 | border: round green; 23 | } 24 | 25 | .focused { 26 | border: ascii yellow; 27 | } 28 | 29 | .title { 30 | text-style: bold; 31 | } 32 | 33 | .popup-box { 34 | layer: top; 35 | margin-left: 20; 36 | margin-right: 20; 37 | margin-top: 5; 38 | height: 75%; 39 | border: solid rgb(101, 6, 165); 40 | overflow-y: scroll; 41 | display: none; 42 | padding: 1; 43 | } 44 | 45 | .file-name-input { 46 | layer: above; 47 | margin-left: 20; 48 | margin-right: 20; 49 | margin-top: 5; 50 | border: solid rgb(101, 6, 165); 51 | display: none; 52 | } 53 | 54 | ModalScreen { 55 | align: center middle; 56 | } 57 | 58 | ModalScreen > Container { 59 | width: 40%; 60 | height: 50%; 61 | align: center middle; 62 | padding: 1 2; 63 | background: $panel; 64 | border: solid green; 65 | overflow-y: scroll; 66 | } 67 | 68 | ModalScreen > Container > Vertical { 69 | height: auto; 70 | width: auto; 71 | } 72 | 73 | .centered { 74 | text-align: center; 75 | } 76 | 77 | #menu-title { 78 | text-align: center; 79 | text-style: bold; 80 | } 81 | 82 | .menu-btn { 83 | margin-top: 1; 84 | border: ascii green; 85 | text-style: none; 86 | } 87 | 88 | .menu-btn:hover { 89 | border: ascii yellow; 90 | } 91 | 92 | .menu-btn:focus { 93 | border: ascii yellow; 94 | } 95 | 96 | .menu-exit-btn:focus { 97 | border: ascii red; 98 | } 99 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MarkyMan4/filequery/c6233d836393517b2b2ac7f2cf5f2be788724a33/tests/__init__.py -------------------------------------------------------------------------------- /tests/test_filequery.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | import unittest 4 | 5 | # add src folder to path so filequery can be imported 6 | src_path = os.path.join(os.getcwd(), "src") 7 | sys.path.append(src_path) 8 | 9 | # add sample data dir to path 10 | sample_data_path = os.path.join(os.getcwd(), "example") 11 | sys.path.append(sample_data_path) 12 | 13 | from filequery import handle_args, validate_args 14 | from filequery.file_query_args import FileQueryArgs 15 | from filequery.filedb import FileDb, FileType 16 | from filequery.queryresult import QueryResult 17 | 18 | 19 | class TestFileQuery(unittest.TestCase): 20 | def check_select_star_from_test(self, res: QueryResult): 21 | self.assertEqual(len(res.records), 3) 22 | 23 | for rec in res.records: 24 | self.assertEqual(len(rec), 3) 25 | 26 | def test_exec_query(self): 27 | fdb = FileDb("example/test.csv") 28 | res = fdb.exec_query("select * from test") 29 | 30 | self.check_select_star_from_test(res) 31 | 32 | def test_filesdir_no_trailing_slash(self): 33 | fdb = FileDb("example/data") 34 | res = fdb.exec_query("select * from test") 35 | 36 | self.check_select_star_from_test(res) 37 | 38 | def test_filesdir_trailing_slash(self): 39 | fdb = FileDb("example/data/") 40 | res = fdb.exec_query("select * from test") 41 | 42 | self.check_select_star_from_test(res) 43 | 44 | def test_multi_query(self): 45 | fdb = FileDb("example/data/") 46 | res = fdb.exec_many_queries( 47 | [ 48 | "select * from test;", 49 | "select * from test1;", 50 | """ 51 | select * 52 | from test t1 53 | inner join test1 t2 54 | on t1.col1 = t2.col1; 55 | """, 56 | ] 57 | ) 58 | 59 | self.assertEqual(len(res), 3) 60 | 61 | def test_select_star_json(self): 62 | fdb = FileDb("example/json_test.json") 63 | res = fdb.exec_query("select * from json_test") 64 | 65 | self.assertEqual(res.records[0][0], 1) 66 | self.assertEqual(res.records[0][1], "test 1") 67 | self.assertEqual(res.records[0][2], 1.0) 68 | 69 | def test_select_struct_field(self): 70 | fdb = FileDb("example/json_test.json") 71 | res = fdb.exec_query("select nested.nest_id, nested.nest_val from json_test") 72 | 73 | self.assertEqual(res.records[0][0], 2) 74 | self.assertEqual(res.records[0][1], "nested test 1") 75 | 76 | def test_select_list_field(self): 77 | fdb = FileDb("example/json_test.json") 78 | res = fdb.exec_query("select list[1], list[2], list[3], list[4] from json_test") 79 | 80 | self.assertEqual(res.records[0][0], 1) 81 | self.assertEqual(res.records[0][1], 2) 82 | self.assertEqual(res.records[0][2], 3) 83 | self.assertEqual(res.records[0][3], 4) 84 | 85 | def test_json_list_file(self): 86 | fdb = FileDb("example/json_list_test.json") 87 | res = fdb.exec_query("select * from json_list_test") 88 | 89 | self.assertEqual(len(res.records), 3) 90 | 91 | # TODO add tests for joining JSON, CSV and parquet files 92 | def test_join_json_and_csv(self): 93 | fdb = FileDb("example/data/") 94 | res = fdb.exec_query( 95 | """ 96 | select * 97 | from 98 | test t1 99 | inner join test1 t2 100 | on t1.col1 = t2.col1; 101 | """ 102 | ) 103 | 104 | self.assertEqual(len(res.records), 2) 105 | 106 | def test_ndjson_file(self): 107 | fdb = FileDb("example/ndjson_test.ndjson") 108 | res = fdb.exec_query( 109 | "select id, value, nested.subid, nested.subval from ndjson_test" 110 | ) 111 | 112 | self.assertEqual(len(res.records), 4) 113 | 114 | def test_dict_records_length_csv(self): 115 | fdb = FileDb("example/test.csv") 116 | res = fdb.exec_query("select * from test") 117 | 118 | self.assertEqual(len(res.dict_records), 3) 119 | 120 | def test_dict_records_keys_csv(self): 121 | fdb = FileDb("example/test.csv") 122 | res = fdb.exec_query("select * from test") 123 | 124 | for rec in res.dict_records: 125 | self.assertListEqual(list(rec.keys()), ["col1", "col2", "col3"]) 126 | 127 | def test_dict_records_length_json(self): 128 | fdb = FileDb("example/ndjson_test.ndjson") 129 | res = fdb.exec_query("select * from ndjson_test") 130 | 131 | self.assertEqual(len(res.dict_records), 4) 132 | 133 | def test_dict_records_keys_json(self): 134 | fdb = FileDb("example/ndjson_test.ndjson") 135 | res = fdb.exec_query("select * from ndjson_test") 136 | 137 | for rec in res.dict_records: 138 | self.assertListEqual(list(rec.keys()), ["id", "value", "nested"]) 139 | 140 | def test_dict_records_nested_keys_json(self): 141 | fdb = FileDb("example/ndjson_test.ndjson") 142 | res = fdb.exec_query("select * from ndjson_test") 143 | 144 | for rec in res.dict_records: 145 | self.assertListEqual(list(rec["nested"].keys()), ["subid", "subval"]) 146 | 147 | def test_valid_unquoted_identifier(self): 148 | fdb = FileDb("example/test.csv") 149 | should_quote = fdb._should_quote_table_name("test_table") 150 | 151 | self.assertFalse(should_quote) 152 | 153 | def test_filename_with_leading_underscore(self): 154 | fdb = FileDb("example/test.csv") 155 | should_quote = fdb._should_quote_table_name("_test_table") 156 | 157 | self.assertFalse(should_quote) 158 | 159 | def test_filename_with_numbers(self): 160 | fdb = FileDb("example/test.csv") 161 | should_quote = fdb._should_quote_table_name("test_table_1") 162 | 163 | self.assertFalse(should_quote) 164 | 165 | def test_filename_with_hyphen(self): 166 | fdb = FileDb("example/test.csv") 167 | should_quote = fdb._should_quote_table_name("test-table") 168 | 169 | self.assertTrue(should_quote) 170 | 171 | def test_filename_with_leading_number(self): 172 | fdb = FileDb("example/test.csv") 173 | should_quote = fdb._should_quote_table_name("1test") 174 | 175 | self.assertTrue(should_quote) 176 | 177 | def test_filename_with_spaces(self): 178 | fdb = FileDb("example/test.csv") 179 | should_quote = fdb._should_quote_table_name("test table") 180 | 181 | self.assertTrue(should_quote) 182 | 183 | def test_filename_with_leading_special_char(self): 184 | fdb = FileDb("example/test.csv") 185 | should_quote = fdb._should_quote_table_name("$test") 186 | 187 | self.assertTrue(should_quote) 188 | 189 | def test_filename_with_leading_space(self): 190 | fdb = FileDb("example/test.csv") 191 | should_quote = fdb._should_quote_table_name(" test") 192 | 193 | self.assertTrue(should_quote) 194 | 195 | 196 | class TestFileQueryCli(unittest.TestCase): 197 | ##################################################### 198 | # tests for invalid arguments 199 | ##################################################### 200 | 201 | def test_no_filename_or_filesdir(self): 202 | args = FileQueryArgs( 203 | filename=None, 204 | filesdir=None, 205 | query="select * from test", 206 | query_file=None, 207 | out_file=None, 208 | out_file_format=None, 209 | delimiter=None, 210 | editor=False, 211 | ) 212 | 213 | err = validate_args(args) 214 | 215 | self.assertIsNotNone(err) 216 | 217 | def test_provide_filename_and_filesdir(self): 218 | args = FileQueryArgs( 219 | filename="example/test.csv", 220 | filesdir="example/data", 221 | query="select * from test", 222 | query_file=None, 223 | out_file=None, 224 | out_file_format=None, 225 | delimiter=None, 226 | editor=False, 227 | ) 228 | 229 | err = validate_args(args) 230 | 231 | self.assertIsNotNone(err) 232 | 233 | def test_no_query_or_query_file(self): 234 | args = FileQueryArgs( 235 | filename="example/test.csv", 236 | filesdir=None, 237 | query=None, 238 | query_file=None, 239 | out_file=None, 240 | out_file_format=None, 241 | delimiter=None, 242 | editor=False, 243 | ) 244 | 245 | err = validate_args(args) 246 | 247 | self.assertIsNotNone(err) 248 | 249 | def test_provide_query_and_query_file(self): 250 | args = FileQueryArgs( 251 | filename="example/test.csv", 252 | filesdir=None, 253 | query="select * from test", 254 | query_file="example/queries/join.sql", 255 | out_file=None, 256 | out_file_format=None, 257 | delimiter=None, 258 | editor=False, 259 | ) 260 | 261 | err = validate_args(args) 262 | 263 | self.assertIsNotNone(err) 264 | 265 | ##################################################### 266 | # tests for handling arguments 267 | ##################################################### 268 | 269 | def handle_args_single_out_file(self, args: FileQueryArgs, out_file: str): 270 | # call handle_args for creating an output file, check that the file exists, then delete the file 271 | handle_args(args) 272 | self.assertTrue(os.path.exists(out_file)) 273 | 274 | # cleanup 275 | os.remove(out_file) 276 | 277 | def handle_args_multiple_out_files(self, args: FileQueryArgs): 278 | # call handle_args for creating multiple output files, check that each file exists, then delete the files 279 | handle_args(args) 280 | 281 | for file in args.out_file: 282 | self.assertTrue(os.path.exists(file)) 283 | 284 | # cleanup 285 | os.remove(file) 286 | 287 | def test_single_output_file_default(self): 288 | out_file = "test_result.csv" 289 | 290 | args = FileQueryArgs( 291 | filename="example/test.csv", 292 | filesdir=None, 293 | query="select * from test", 294 | query_file=None, 295 | out_file=[out_file], 296 | out_file_format=None, 297 | delimiter=None, 298 | editor=False, 299 | ) 300 | 301 | self.handle_args_single_out_file(args, out_file) 302 | 303 | def test_single_output_file_csv(self): 304 | out_file = "test_result.csv" 305 | 306 | args = FileQueryArgs( 307 | filename="example/test.csv", 308 | filesdir=None, 309 | query="select * from test", 310 | query_file=None, 311 | out_file=[out_file], 312 | out_file_format="csv", 313 | delimiter=None, 314 | editor=False, 315 | ) 316 | 317 | self.handle_args_single_out_file(args, out_file) 318 | 319 | def test_single_output_file_parquet(self): 320 | out_file = "test_result.parquet" 321 | 322 | args = FileQueryArgs( 323 | filename="example/test.csv", 324 | filesdir=None, 325 | query="select * from test", 326 | query_file=None, 327 | out_file=[out_file], 328 | out_file_format="parquet", 329 | delimiter=None, 330 | editor=False, 331 | ) 332 | 333 | self.handle_args_single_out_file(args, out_file) 334 | 335 | def test_multiple_output_files_default(self): 336 | out_files = ["test_result1.csv", "test_result2.csv", "test_result3.csv"] 337 | 338 | args = FileQueryArgs( 339 | filename="example/test.csv", 340 | filesdir=None, 341 | query="select * from test; select sum(col3) from test; select col1 from test where col1 = 1;", 342 | query_file=None, 343 | out_file=out_files, 344 | out_file_format=None, 345 | delimiter=None, 346 | editor=False, 347 | ) 348 | 349 | self.handle_args_multiple_out_files(args) 350 | 351 | def test_multiple_output_files_csv(self): 352 | out_files = ["test_result1.csv", "test_result2.csv", "test_result3.csv"] 353 | 354 | args = FileQueryArgs( 355 | filename="example/test.csv", 356 | filesdir=None, 357 | query="select * from test; select sum(col3) from test; select col1 from test where col1 = 1;", 358 | query_file=None, 359 | out_file=out_files, 360 | out_file_format="csv", 361 | delimiter=None, 362 | editor=False, 363 | ) 364 | 365 | self.handle_args_multiple_out_files(args) 366 | 367 | def test_multiple_output_files_parquet(self): 368 | out_files = [ 369 | "test_result1.parquet", 370 | "test_result2.parquet", 371 | "test_result3.parquet", 372 | ] 373 | 374 | args = FileQueryArgs( 375 | filename="example/test.csv", 376 | filesdir=None, 377 | query="select * from test; select sum(col3) from test; select col1 from test where col1 = 1;", 378 | query_file=None, 379 | out_file=out_files, 380 | out_file_format="parquet", 381 | delimiter=None, 382 | editor=False, 383 | ) 384 | 385 | self.handle_args_multiple_out_files(args) 386 | 387 | 388 | if __name__ == "__main__": 389 | unittest.main() 390 | 391 | # for one-off testing 392 | # fdb = FileDb('example/test.csv') 393 | # fdb.export_database('test') 394 | # res = fdb.exec_query('select * from test') 395 | 396 | # print(res) 397 | --------------------------------------------------------------------------------