├── .coveragerc ├── .github └── workflows │ ├── ci.yaml │ └── release.yaml ├── .gitignore ├── LICENSE ├── MANIFEST.in ├── Makefile ├── README.md ├── databend_py ├── VERSION ├── __init__.py ├── client.py ├── connection.py ├── context.py ├── datetypes.py ├── defines.py ├── errors.py ├── log.py ├── result.py ├── retry.py ├── sdk_info.py ├── uploader.py └── util │ ├── __init__.py │ ├── escape.py │ └── helper.py ├── docker-compose.yaml ├── docs └── connection.md ├── examples ├── batch_insert.py ├── iter_query.py ├── ordinary_query.py ├── replace_into.py ├── session_setting.py └── upload_to_stage.py ├── pyproject.toml ├── setup.cfg ├── setup.py ├── tests ├── __init__.py ├── test_client.py └── test_simple.py └── uv.lock /.coveragerc: -------------------------------------------------------------------------------- 1 | [run] 2 | plugins = Cython.Coverage 3 | source = databend_py -------------------------------------------------------------------------------- /.github/workflows/ci.yaml: -------------------------------------------------------------------------------- 1 | name: CI 2 | 3 | on: 4 | push: 5 | branches: [main] 6 | pull_request: 7 | branches: [main] 8 | 9 | jobs: 10 | test: 11 | runs-on: ubuntu-latest 12 | 13 | steps: 14 | - name: Checkout 15 | uses: actions/checkout@v2 16 | 17 | - name: Install uv 18 | uses: astral-sh/setup-uv@v4 19 | 20 | - name: Set up Python 21 | run: uv python install 22 | 23 | - name: Install the project 24 | run: uv sync --all-extras --dev 25 | 26 | - name: Start databend-server 27 | run: make up 28 | 29 | - name: Test 30 | env: 31 | TEST_DATABEND_DSN: "http://root:@localhost:8000/default" 32 | run: | 33 | make lint 34 | make ci 35 | -------------------------------------------------------------------------------- /.github/workflows/release.yaml: -------------------------------------------------------------------------------- 1 | name: Release on Version Change 2 | 3 | on: 4 | pull_request: 5 | branches: [ main ] 6 | paths: 7 | - "databend_py/VERSION" 8 | 9 | jobs: 10 | release: 11 | runs-on: ubuntu-latest 12 | 13 | steps: 14 | - name: Checkout Repository 15 | uses: actions/checkout@v3 16 | 17 | - name: Install uv 18 | uses: astral-sh/setup-uv@v4 19 | 20 | - name: Set up Python 21 | run: uv python install 22 | 23 | - name: Install the project 24 | run: uv sync --all-extras --dev 25 | 26 | - name: Release Package and Tag 27 | env: 28 | TWINE_USERNAME: ${{ secrets.TWINE_USERNAME }} 29 | TWINE_PASSWORD: ${{ secrets.TWINE_PASSWORD }} 30 | run: | 31 | export VERSION=$(cat databend_py/VERSION) 32 | git config user.name "hantmac" 33 | git config user.email "hantmac@outlook.com" 34 | git tag -a "v$VERSION" -m "Release Version $VERSION" 35 | git push origin "v$VERSION" 36 | uv publish 37 | echo "show user name:" 38 | echo ${{ secrets.TWINE_USERNAME }} 39 | twine upload -u ${{ secrets.TWINE_USERNAME }} -p ${{ secrets.TWINE_PASSWORD }} dist/* 40 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | upload.csv 2 | .envrc 3 | # Byte-compiled / optimized / DLL files 4 | __pycache__/ 5 | *.py[cod] 6 | *$py.class 7 | 8 | # C extensions 9 | *.so 10 | 11 | # Distribution / packaging 12 | .Python 13 | env/ 14 | build/ 15 | develop-eggs/ 16 | dist/ 17 | downloads/ 18 | eggs/ 19 | .eggs/ 20 | lib/ 21 | lib64/ 22 | parts/ 23 | sdist/ 24 | var/ 25 | *.egg-info/ 26 | .installed.cfg 27 | *.egg 28 | 29 | # PyInstaller 30 | # Usually these files are written by a python script from a template 31 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 32 | *.manifest 33 | *.spec 34 | 35 | # Installer logs 36 | pip-log.txt 37 | pip-delete-this-directory.txt 38 | 39 | # Unit test / coverage reports 40 | htmlcov/ 41 | .tox/ 42 | .coverage 43 | .coverage.* 44 | .cache 45 | nosetests.xml 46 | coverage.xml 47 | *,cover 48 | .hypothesis/ 49 | 50 | # Translations 51 | *.mo 52 | *.pot 53 | 54 | # Django stuff: 55 | *.log 56 | local_settings.py 57 | 58 | # Flask stuff: 59 | instance/ 60 | .webassets-cache 61 | 62 | # Scrapy stuff: 63 | .scrapy 64 | 65 | # Sphinx documentation 66 | docs/_build/ 67 | 68 | # PyBuilder 69 | target/ 70 | 71 | # IPython Notebook 72 | .ipynb_checkpoints 73 | 74 | # pyenv 75 | .python-version 76 | 77 | # celery beat schedule file 78 | celerybeat-schedule 79 | 80 | # dotenv 81 | .env 82 | 83 | # virtualenv 84 | venv/ 85 | ENV/ 86 | 87 | # Spyder project settings 88 | .spyderproject 89 | 90 | # Rope project settings 91 | .ropeproject 92 | 93 | # PyCharm project settings 94 | .idea/ 95 | 96 | /dist 97 | /CHANGELOG.md 98 | /script/build 99 | 100 | # VS Code 101 | .vscode 102 | 103 | # IntelliJ 104 | .idea 105 | 106 | # macOS 107 | .DS_Store 108 | 109 | # vim 110 | *.swp 111 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include databend_py/* -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | prepare: 2 | mkdir -p data/databend 3 | 4 | up: prepare 5 | docker compose -f docker-compose.yaml up --quiet-pull -d databend --wait 6 | curl -u root: -XPOST "http://localhost:8000/v1/query" -H 'Content-Type: application/json' -d '{"sql": "select version()", "pagination": { "wait_time_secs": 10}}' 7 | 8 | start: up 9 | 10 | test: 11 | uv run pytest . 12 | 13 | ci: 14 | uv run pytest . 15 | 16 | lint: 17 | uv run ruff check 18 | 19 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Notice 2 | We strongly recommend using the [databend-driver](https://pypi.org/project/databend-driver) as it provides more comprehensive features. 3 | 4 | 5 | # databend-py 6 | 7 | Databend Cloud Python Driver with native http interface support 8 | 9 | [![image](https://img.shields.io/pypi/v/databend-py.svg)](https://pypi.org/project/databend-py) 10 | 11 | [![image](https://coveralls.io/repos/github/databendcloud/databend-py/badge.svg?branch=master)](https://coveralls.io/github/databendcloud/databend-py?branch=master) 12 | 13 | [![image](https://img.shields.io/pypi/l/databend-py.svg)](https://pypi.org/project/databend-py) 14 | 15 | [![image](https://img.shields.io/pypi/pyversions/databend-py.svg)](https://pypi.org/project/databend-py) 16 | 17 | # Installation 18 | 19 | pip install databend-py 20 | 21 | # Usage 22 | 23 | Use the next code to check connection: 24 | 25 | > ``` python 26 | > >>> from databend_py import Client 27 | > >>> client = Client( 28 | > host='tenant--warehouse.ch.datafusecloud.com', 29 | > database="default", 30 | > user="user", 31 | > port="443", 32 | > secure=True, 33 | > password="password",settings={"copy_purge":True,"force":True}) 34 | > >>> print(client.execute("SELECT 1")) 35 | > ``` 36 | 37 | The [host]{.title-ref}, [user]{.title-ref}, [password]{.title-ref} info 38 | will be found in databend cloud warehouse connect page as flows: 39 | 40 | Pure Client example: 41 | 42 | > ``` python 43 | > >>> from databend_py import Client 44 | > >>> 45 | > >>> client = Client.from_url('http://root@localhost:8000/db?secure=False©_purge=True') 46 | > >>> 47 | > >>> client.execute('SHOW TABLES') 48 | > [[], [('test',)]] 49 | > >>> client.execute("show tables",with_column_types=True) 50 | > [[('Tables_in_default', 'String')], [('test',)]] # [[(column_name, column_type)], [(data,)]] 51 | > >>> client.execute('DROP TABLE IF EXISTS test') 52 | > [] 53 | > >>> client.execute('CREATE TABLE test (x Int32)') 54 | > [] 55 | > >>> client.execute( 56 | > ... 'INSERT INTO test (x) VALUES', [(1,)] 57 | > ... ) 58 | > 1 59 | > >>> client.execute('INSERT INTO test (x) VALUES', [(200,)]) 60 | > 1 61 | > ``` 62 | 63 | More usages examples find [here](./examples). 64 | 65 | # Features 66 | 67 | - Basic SQL. 68 | - TLS support. 69 | - Query settings. 70 | - Types support: 71 | - Float32/64 72 | - \[U\]Int8/16/32/64/128/256 73 | - Date/Date32/DateTime(\'timezone\')/DateTime64(\'timezone\') 74 | - String 75 | - Array(T) 76 | - Nullable(T) 77 | - Bool 78 | 79 | # Compatibility 80 | 81 | - If databend version \>= v0.9.0 or later, you need to use databend-py 82 | version \>= v0.3.0. 83 | 84 | # License 85 | 86 | Databend Python Driver is distributed under the [Apache 87 | license]{.title-ref}. 88 | -------------------------------------------------------------------------------- /databend_py/VERSION: -------------------------------------------------------------------------------- 1 | 0.6.3 2 | -------------------------------------------------------------------------------- /databend_py/__init__.py: -------------------------------------------------------------------------------- 1 | from .client import Client 2 | from .connection import Connection 3 | from .datetypes import DatabendDataType 4 | 5 | from databend_driver import ( 6 | AsyncDatabendClient, 7 | AsyncDatabendConnection, 8 | BlockingDatabendClient, 9 | BlockingDatabendConnection, 10 | Row, 11 | RowIterator, 12 | Field, 13 | Schema, 14 | ServerStats, 15 | ConnectionInfo, 16 | ) 17 | 18 | __all__ = [ 19 | "Client", 20 | "Connection", 21 | "DatabendDataType", 22 | "AsyncDatabendClient", 23 | "AsyncDatabendConnection", 24 | "BlockingDatabendClient", 25 | "BlockingDatabendConnection", 26 | "Row", 27 | "RowIterator", 28 | "Field", 29 | "Schema", 30 | "ServerStats", 31 | "ConnectionInfo", 32 | ] 33 | -------------------------------------------------------------------------------- /databend_py/client.py: -------------------------------------------------------------------------------- 1 | import json 2 | from urllib.parse import urlparse, parse_qs, unquote 3 | 4 | from .connection import Connection 5 | from .uploader import DataUploader 6 | from .result import QueryResult 7 | from .util.escape import escape_params 8 | from .util.helper import asbool, Helper 9 | 10 | 11 | class Client(object): 12 | """ 13 | Client for communication with the databend http server. 14 | Single connection is established per each connected instance of the client. 15 | """ 16 | 17 | def __init__(self, *args, **kwargs): 18 | self.settings = (kwargs.pop("settings", None) or {}).copy() 19 | self.result_config = (kwargs.pop("result_config", None) or {}).copy() 20 | self.connection = Connection(*args, **kwargs) 21 | self.query_result_cls = QueryResult 22 | self.helper = Helper 23 | self._debug = asbool(self.settings.get("debug", False)) 24 | self._uploader = DataUploader( 25 | self, 26 | self.connection, 27 | self.settings, 28 | debug=self._debug, 29 | compress=self.settings.get("compress", False), 30 | ) 31 | 32 | def __enter__(self): 33 | return self 34 | 35 | def disconnect(self): 36 | self.disconnect_connection() 37 | 38 | def disconnect_connection(self): 39 | self.connection.disconnect() 40 | 41 | def _data_generator(self, raw_data): 42 | while raw_data["next_uri"] is not None: 43 | try: 44 | raw_data = self._receive_data(raw_data["next_uri"]) 45 | yield raw_data 46 | except (Exception, KeyboardInterrupt): 47 | self.disconnect() 48 | raise 49 | 50 | def _receive_data(self, next_uri: str): 51 | resp = self.connection.next_page(next_uri) 52 | raw_data = json.loads(resp.content) 53 | helper = self.helper() 54 | helper.response = raw_data 55 | helper.check_error() 56 | return raw_data 57 | 58 | def _receive_result(self, query, query_id=None, with_column_types=False): 59 | raw_data = self.connection.query(query) 60 | helper = self.helper() 61 | helper.response = raw_data 62 | helper.check_error() 63 | gen = self._data_generator(raw_data) 64 | result = self.query_result_cls( 65 | gen, raw_data, with_column_types=with_column_types, **self.result_config 66 | ) 67 | return result.get_result() 68 | 69 | def _iter_receive_result(self, query, query_id=None, with_column_types=False): 70 | raw_data = self.connection.query(query) 71 | helper = self.helper() 72 | helper.response = raw_data 73 | helper.check_error() 74 | gen = self._data_generator(raw_data) 75 | result = self.query_result_cls( 76 | gen, raw_data, with_column_types=with_column_types, **self.result_config 77 | ) 78 | _, rows = result.get_result() 79 | for row in rows: 80 | yield row 81 | 82 | def execute( 83 | self, query, params=None, with_column_types=False, query_id=None, settings=None 84 | ): 85 | """ 86 | Executes query. 87 | :param query: query that will be send to server. 88 | :param params: substitution parameters for SELECT queries and data for 89 | INSERT queries. Data for INSERT can be `list`, `tuple` 90 | or :data:`~types.GeneratorType`. 91 | Defaults to ``None`` (no parameters or data). 92 | :param with_column_types: if specified column names and types will be 93 | returned alongside with result. 94 | Defaults to ``False``. 95 | :param query_id: the query identifier. If no query id specified 96 | Databend server will generate it. 97 | :param settings: dictionary of query settings. 98 | Defaults to ``None`` (no additional settings). 99 | 100 | :return: * number of inserted rows for INSERT queries with data. 101 | Returning rows count from INSERT FROM SELECT is not 102 | supported. 103 | * if `with_column_types=False`: `list` of `tuples` with 104 | rows/columns. 105 | * if `with_column_types=True`: `tuple` of 2 elements: 106 | * The first element is `list` of `tuples` with 107 | rows/columns. 108 | * The second element information is about columns: names 109 | and types. 110 | """ 111 | # INSERT queries can use list/tuple/generator of list/tuples/dicts. 112 | # For SELECT parameters can be passed in only in dict right now. 113 | is_insert = isinstance(params, (list, tuple)) 114 | 115 | if is_insert: 116 | # remove the `\n` '\s' `\t` in the SQL 117 | query = " ".join([s.strip() for s in query.splitlines()]).strip() 118 | rv = self._process_insert_query(query, params) 119 | return [], rv 120 | 121 | column_types, rv = self._process_ordinary_query( 122 | query, params=params, with_column_types=with_column_types, query_id=query_id 123 | ) 124 | return column_types, rv 125 | 126 | # params = [(1,),(2,)] or params = [(1,2),(2,3)] 127 | def _process_insert_query(self, query, params): 128 | insert_rows = 0 129 | if "values" in query: 130 | query = query.split("values")[0] + "values" 131 | elif "VALUES" in query: 132 | query = query.split("VALUES")[0] + "VALUES" 133 | if len(query.split(" ")) < 3: 134 | raise Exception("Not standard insert/replace statement") 135 | batch_size = query.count(",") + 1 136 | if params is not None and len(params) > 0: 137 | if isinstance(params[0], tuple): 138 | tuple_ls = params 139 | elif isinstance(params[0], dict): 140 | # if params type is list[dictionary], then it's a insert query 141 | tuple_ls = [tuple(p.values()) for p in params] 142 | else: 143 | tuple_ls = [ 144 | tuple(params[i: i + batch_size]) 145 | for i in range(0, len(params), batch_size) 146 | ] 147 | insert_rows = len(tuple_ls) 148 | self._uploader.upload_to_table_by_attachment(query, tuple_ls) 149 | return insert_rows 150 | 151 | def _process_ordinary_query( 152 | self, query, params=None, with_column_types=False, query_id=None 153 | ): 154 | if params is not None: 155 | query = self._substitute_params(query, params, self.connection.context) 156 | return self._receive_result( 157 | query, 158 | query_id=query_id, 159 | with_column_types=with_column_types, 160 | ) 161 | 162 | def execute_iter( 163 | self, query, params=None, with_column_types=False, query_id=None, settings=None 164 | ): 165 | if params is not None: 166 | query = self._substitute_params(query, params, self.connection.context) 167 | return self._iter_receive_result( 168 | query, query_id=query_id, with_column_types=with_column_types 169 | ) 170 | 171 | def _iter_process_ordinary_query( 172 | self, query, with_column_types=False, query_id=None 173 | ): 174 | return self._iter_receive_result( 175 | query, query_id=query_id, with_column_types=with_column_types 176 | ) 177 | 178 | def _substitute_params(self, query, params, context): 179 | if not isinstance(params, dict): 180 | raise ValueError("Parameters are expected in dict form") 181 | 182 | escaped = escape_params(params, context) 183 | return query % escaped 184 | 185 | @classmethod 186 | def from_url(cls, url): 187 | """ 188 | Return a client configured from the given URL. 189 | 190 | For example:: 191 | 192 | https://[user:password]@localhost:8000/default?secure=True 193 | http://[user:password]@localhost:8000/default 194 | databend://[user:password]@localhost:8000/default 195 | 196 | Any additional querystring arguments will be passed along to 197 | the Connection class's initializer. 198 | """ 199 | parsed_url = urlparse(url) 200 | 201 | settings = {} 202 | result_config = {} 203 | kwargs = {} 204 | for name, value in parse_qs(parsed_url.query).items(): 205 | if not value or not len(value): 206 | continue 207 | 208 | timeouts = { 209 | "connect_timeout", 210 | "read_timeout", 211 | "send_receive_timeout", 212 | "sync_request_timeout", 213 | } 214 | 215 | value = value[0] 216 | 217 | if name == "client_name": 218 | kwargs[name] = value 219 | elif name == "tenant": 220 | kwargs[name] = value 221 | elif name == "warehouse": 222 | kwargs[name] = value 223 | elif name == "secure": 224 | kwargs[name] = asbool(value) 225 | elif name == "copy_purge": 226 | kwargs[name] = asbool(value) 227 | settings[name] = asbool(value) 228 | elif name == "debug": 229 | settings[name] = asbool(value) 230 | elif name == "compress": 231 | settings[name] = asbool(value) 232 | elif name in timeouts: 233 | kwargs[name] = float(value) 234 | elif name == "persist_cookies": 235 | kwargs[name] = asbool(value) 236 | elif name == "null_to_none": 237 | result_config[name] = asbool(value) 238 | else: 239 | settings[name] = value # settings={'copy_purge':False} 240 | secure = kwargs.get("secure", False) 241 | kwargs["secure"] = secure 242 | 243 | host = parsed_url.hostname 244 | 245 | if parsed_url.port is not None: 246 | kwargs["port"] = parsed_url.port 247 | 248 | path = parsed_url.path.replace("/", "", 1) 249 | if path: 250 | kwargs["database"] = path 251 | 252 | if parsed_url.username is not None: 253 | kwargs["user"] = unquote(parsed_url.username) 254 | 255 | if parsed_url.password is not None: 256 | kwargs["password"] = unquote(parsed_url.password) 257 | 258 | if settings: 259 | kwargs["settings"] = settings 260 | if result_config: 261 | kwargs["result_config"] = result_config 262 | 263 | return cls(host, **kwargs) 264 | 265 | def insert(self, database_name, table_name, data): 266 | """ 267 | insert the data into database.table according to the file 268 | database_name: the target database 269 | table_name: the table which write into 270 | data: the data which write into, it's a list of tuple 271 | """ 272 | # TODO: escape the database & table name 273 | self._uploader.upload_to_table_by_copy( 274 | "%s.%s" % (database_name, table_name), data 275 | ) 276 | 277 | def replace(self, database_name, table_name, conflict_keys, data): 278 | """ 279 | replace the data into database.table according to the file 280 | database_name: the target database 281 | table_name: the table which write into 282 | conflict_keys: the key that use to replace into 283 | data: the data which write into, it's a list of tuple 284 | """ 285 | self._uploader.replace_into_table( 286 | "%s.%s" % (database_name, table_name), conflict_keys, data 287 | ) 288 | 289 | def upload_to_stage(self, stage_dir, file_name, data): 290 | """ 291 | upload the file to user stage 292 | :param stage_dir: target stage directory 293 | :param file_name: the target file name which placed into the stage_dir 294 | :param data: the data value or file handler 295 | :return: 296 | """ 297 | return self._uploader.upload_to_stage(stage_dir, file_name, data) 298 | 299 | def begin(self): 300 | try: 301 | self.execute("BEGIN") 302 | except Exception as e: 303 | raise e 304 | 305 | def commit(self): 306 | try: 307 | self.execute("COMMIT") 308 | except Exception as e: 309 | raise e 310 | 311 | def rollback(self): 312 | try: 313 | self.execute("ROLLBACK") 314 | except Exception as e: 315 | raise e 316 | -------------------------------------------------------------------------------- /databend_py/connection.py: -------------------------------------------------------------------------------- 1 | import json 2 | import os 3 | import base64 4 | import time 5 | import uuid 6 | 7 | from http.cookiejar import Cookie 8 | from requests.auth import HTTPBasicAuth 9 | from requests.cookies import RequestsCookieJar 10 | 11 | import environs 12 | import requests 13 | from . import log 14 | from . import defines 15 | from .context import Context 16 | from databend_py.errors import ( 17 | WarehouseTimeoutException, 18 | UnexpectedException, 19 | ServerException, 20 | ) 21 | from databend_py.retry import retry 22 | from databend_py.sdk_info import sdk_info 23 | 24 | XDatabendQueryIDHeader = "X-DATABEND-QUERY-ID" 25 | XDatabendTenantHeader = "X-DATABEND-TENANT" 26 | XDatabendWarehouseHeader = "X-DATABEND-WAREHOUSE" 27 | QueryID = "id" 28 | 29 | 30 | class ServerInfo(object): 31 | def __init__( 32 | self, 33 | name, 34 | version_major, 35 | version_minor, 36 | version_patch, 37 | revision, 38 | timezone, 39 | display_name, 40 | ): 41 | self.name = name 42 | self.version_major = version_major 43 | self.version_minor = version_minor 44 | self.version_patch = version_patch 45 | self.revision = revision 46 | self.timezone = timezone 47 | self.display_name = display_name 48 | 49 | super(ServerInfo, self).__init__() 50 | 51 | def version_tuple(self): 52 | return self.version_major, self.version_minor, self.version_patch 53 | 54 | def __repr__(self): 55 | version = "%s.%s.%s" % ( 56 | self.version_major, 57 | self.version_minor, 58 | self.version_patch, 59 | ) 60 | items = [ 61 | ("name", self.name), 62 | ("version", version), 63 | ("revision", self.revision), 64 | ("timezone", self.timezone), 65 | ("display_name", self.display_name), 66 | ] 67 | 68 | params = ", ".join("{}={}".format(key, value) for key, value in items) 69 | return "" % (params) 70 | 71 | 72 | def get_error(response): 73 | if response["error"] is None: 74 | return None 75 | 76 | # Wrap errno into msg, for result check 77 | return ServerException(response["error"]["message"], response["error"]["code"]) 78 | 79 | 80 | class GlobalCookieJar(RequestsCookieJar): 81 | 82 | def __init__(self): 83 | super().__init__() 84 | 85 | def set_cookie(self, cookie: Cookie, *args, **kwargs): 86 | cookie.domain = "" 87 | cookie.path = "/" 88 | super().set_cookie(cookie, *args, **kwargs) 89 | 90 | 91 | class Connection(object): 92 | # Databend http handler doc: https://databend.rs/doc/reference/api/rest 93 | 94 | # Call connect(**driver) 95 | # driver is a dict contains: 96 | # { 97 | # 'user': 'root', 98 | # 'host': '127.0.0.1', 99 | # 'port': 3307, 100 | # 'database': 'default' 101 | # } 102 | def __init__( 103 | self, 104 | host, 105 | tenant=None, 106 | warehouse=None, 107 | port=None, 108 | user=defines.DEFAULT_USER, 109 | password=defines.DEFAULT_PASSWORD, 110 | connect_timeout=defines.DEFAULT_CONNECT_TIMEOUT, 111 | read_timeout=defines.DEFAULT_READ_TIMEOUT, 112 | database=defines.DEFAULT_DATABASE, 113 | secure=False, 114 | copy_purge=False, 115 | session_settings=None, 116 | persist_cookies=False, 117 | ): 118 | self.host = host 119 | self.port = port 120 | self.tenant = tenant 121 | self.warehouse = warehouse 122 | self.user = user 123 | self.password = password 124 | self.database = database 125 | self.connect_timeout = connect_timeout 126 | self.read_timeout = read_timeout 127 | self.secure = secure 128 | self.copy_purge = copy_purge 129 | self.session_max_idle_time = defines.DEFAULT_SESSION_IDLE_TIME 130 | self.client_session = session_settings 131 | self.additional_headers = dict() 132 | self.query_option = None 133 | self.context = Context() 134 | self.requests_session = requests.Session() 135 | self.schema = "http" 136 | cookie_jar = GlobalCookieJar() 137 | cookie_jar.set("cookie_enabled", "true") 138 | self.requests_session.cookies = cookie_jar 139 | self.schema = 'http' 140 | if self.secure: 141 | self.schema = "https" 142 | e = environs.Env() 143 | if os.getenv("ADDITIONAL_HEADERS") is not None: 144 | print(os.getenv("ADDITIONAL_HEADERS")) 145 | self.additional_headers = e.dict("ADDITIONAL_HEADERS") 146 | self.persist_cookies = persist_cookies 147 | self.cookies = None 148 | 149 | def default_session(self): 150 | return {"database": self.database} 151 | 152 | def make_headers(self): 153 | headers = { 154 | "Content-Type": "application/json", 155 | "User-Agent": sdk_info(), 156 | "Accept": "application/json", 157 | "X-DATABEND-ROUTE": "warehouse", 158 | XDatabendTenantHeader: self.tenant, 159 | XDatabendWarehouseHeader: self.warehouse, 160 | } 161 | if "Authorization" not in self.additional_headers: 162 | return { 163 | **headers, 164 | **self.additional_headers, 165 | "Authorization": "Basic " 166 | + base64.b64encode( 167 | "{}:{}".format(self.user, self.password).encode(encoding="utf-8") 168 | ).decode(), 169 | } 170 | else: 171 | return {**headers, **self.additional_headers} 172 | 173 | def get_description(self): 174 | return "{}:{}".format(self.host, self.port) 175 | 176 | def disconnect(self): 177 | self.client_session = dict() 178 | 179 | @retry(times=10, exceptions=WarehouseTimeoutException) 180 | def do_query(self, url, query_sql): 181 | response = self.requests_session.post( 182 | url, 183 | data=json.dumps(query_sql), 184 | headers=self.make_headers(), 185 | auth=HTTPBasicAuth(self.user, self.password), 186 | timeout=(self.connect_timeout, self.read_timeout), 187 | verify=True, 188 | ) 189 | if response.status_code != 200: 190 | try: 191 | resp_dict = json.loads(response.content) 192 | if ( 193 | resp_dict 194 | and resp_dict.get("error") 195 | and "no endpoint" in resp_dict.get("error") 196 | ): 197 | raise WarehouseTimeoutException 198 | except ValueError: 199 | pass 200 | raise UnexpectedException( 201 | "Unexpected status code %d when post query, content: %s, headers: %s" 202 | % (response.status_code, response.content, response.headers) 203 | ) 204 | 205 | if response.content: 206 | try: 207 | resp_dict = json.loads(response.content) 208 | except ValueError: 209 | raise UnexpectedException( 210 | "failed to parse response: %s" % response.content 211 | ) 212 | if ( 213 | resp_dict 214 | and resp_dict.get("error") 215 | and "no endpoint" in resp_dict.get("error") 216 | ): 217 | raise WarehouseTimeoutException 218 | if resp_dict and resp_dict.get("error"): 219 | raise UnexpectedException("failed to query: %s" % response.content) 220 | if self.persist_cookies: 221 | self.cookies = response.cookies 222 | return resp_dict 223 | else: 224 | raise UnexpectedException("response content is empty: %s" % response) 225 | 226 | def query(self, statement): 227 | url = self.format_url() 228 | log.logger.debug(f"http sql: {statement}") 229 | query_sql = {"sql": statement, "string_fields": True} 230 | if self.client_session is not None and len(self.client_session) != 0: 231 | if "database" not in self.client_session: 232 | self.client_session = self.default_session() 233 | query_sql["session"] = self.client_session 234 | else: 235 | self.client_session = self.default_session() 236 | query_sql["session"] = self.client_session 237 | # if XDatabendQueryIDHeader in self.additional_headers: 238 | # del self.additional_headers[XDatabendQueryIDHeader] 239 | self.additional_headers.update({XDatabendQueryIDHeader: str(uuid.uuid4())}) 240 | log.logger.debug(f"http headers {self.make_headers()}") 241 | try: 242 | resp_dict = self.do_query(url, query_sql) 243 | new_session_state = resp_dict.get("session", self.default_session()) 244 | if new_session_state: 245 | self.client_session = new_session_state 246 | if self.additional_headers: 247 | self.additional_headers.update( 248 | {XDatabendQueryIDHeader: resp_dict.get(QueryID)} 249 | ) 250 | else: 251 | self.additional_headers = { 252 | XDatabendQueryIDHeader: resp_dict.get(QueryID) 253 | } 254 | return self.wait_until_has_schema(resp_dict) 255 | except Exception as err: 256 | log.logger.error( 257 | f"http error on {url}, SQL: {statement} error msg:{str(err)}" 258 | ) 259 | raise 260 | 261 | def format_url(self): 262 | if self.schema == "https" and self.port is None: 263 | self.port = 443 264 | elif self.schema == "http" and self.port is None: 265 | self.port = 80 266 | return f"{self.schema}://{self.host}:{self.port}/v1/query/" 267 | 268 | def reset_session(self): 269 | self.client_session = dict() 270 | 271 | def wait_until_has_schema(self, raw_data_dict): 272 | resp_schema = raw_data_dict.get("schema") 273 | while resp_schema is not None and len(resp_schema) == 0: 274 | if raw_data_dict["next_uri"] is None: 275 | break 276 | resp = self.next_page(raw_data_dict["next_uri"]) 277 | 278 | resp_dict = json.loads(resp.content) 279 | raw_data_dict = resp_dict 280 | resp_schema = raw_data_dict.get("schema") 281 | if resp_schema is not None and ( 282 | len(resp_schema) != 0 or len(raw_data_dict.get("data")) != 0 283 | ): 284 | break 285 | return raw_data_dict 286 | 287 | def next_page(self, next_uri): 288 | url = "{}://{}:{}{}".format(self.schema, self.host, self.port, next_uri) 289 | 290 | response = self.requests_session.get( 291 | url=url, headers=self.make_headers(), cookies=self.cookies 292 | ) 293 | if response.status_code != 200: 294 | raise UnexpectedException( 295 | "Unexpected status code %d when get %s, content: %s" 296 | % (response.status_code, url, response.content) 297 | ) 298 | return response 299 | 300 | # return a list of response util empty next_uri 301 | def query_with_session(self, statement): 302 | response_list = list() 303 | response = self.query(statement) 304 | log.logger.debug(f"response content: {response}") 305 | response_list.append(response) 306 | start_time = time.time() 307 | time_limit = 12 308 | session = response.get("session") 309 | if session: 310 | self.client_session = session 311 | while response["next_uri"] is not None: 312 | resp = self.next_page(response["next_uri"]) 313 | response = json.loads(resp.content) 314 | log.logger.debug(f"Sql in progress, fetch next_uri content: {response}") 315 | self.check_error(response) 316 | session = response.get("session") 317 | if session: 318 | self.client_session = session 319 | response_list.append(response) 320 | if time.time() - start_time > time_limit: 321 | log.logger.warning( 322 | f"after waited for {time_limit} secs, query still not finished (next uri not none)!" 323 | ) 324 | return response_list 325 | 326 | def check_error(self, resp): 327 | error = get_error(resp) 328 | if error: 329 | raise error 330 | -------------------------------------------------------------------------------- /databend_py/context.py: -------------------------------------------------------------------------------- 1 | class Context(object): 2 | def __init__(self): 3 | self._server_info = None 4 | self._settings = None 5 | self._client_settings = None 6 | super(Context, self).__init__() 7 | 8 | @property 9 | def server_info(self): 10 | return self._server_info 11 | 12 | @server_info.setter 13 | def server_info(self, value): 14 | self._server_info = value 15 | 16 | @property 17 | def settings(self): 18 | return self._settings.copy() 19 | 20 | @settings.setter 21 | def settings(self, value): 22 | self._settings = value.copy() 23 | 24 | @property 25 | def client_settings(self): 26 | return self._client_settings.copy() 27 | 28 | @client_settings.setter 29 | def client_settings(self, value): 30 | self._client_settings = value.copy() 31 | 32 | def __repr__(self): 33 | return "" % ( 34 | self._server_info, 35 | self._client_settings, 36 | self._settings, 37 | ) 38 | -------------------------------------------------------------------------------- /databend_py/datetypes.py: -------------------------------------------------------------------------------- 1 | import ast 2 | 3 | INTType = "int" 4 | FLOATTYPE = "float" 5 | DOUBLETYPE = "double" 6 | BOOLEANTYPE = "bool" 7 | ARRAYTYPE = "array" 8 | MAPTYPE = "map" 9 | JSONTYPE = "json" 10 | NULLTYPE = "null" 11 | 12 | 13 | class DatabendDataType: 14 | def __init__(self): 15 | pass 16 | 17 | @staticmethod 18 | def type_convert_fn(type_str: str): 19 | if INTType in type_str.lower(): 20 | return int 21 | elif FLOATTYPE in type_str.lower(): 22 | return float 23 | elif DOUBLETYPE in type_str.lower(): 24 | return float 25 | elif BOOLEANTYPE in type_str.lower(): 26 | return str_to_bool 27 | elif MAPTYPE in type_str.lower(): 28 | return ast.literal_eval 29 | elif ARRAYTYPE in type_str.lower(): 30 | return ast.literal_eval 31 | elif JSONTYPE in type_str.lower(): 32 | return ast.literal_eval 33 | else: 34 | return str 35 | 36 | 37 | def str_to_bool(s): 38 | if isinstance(s, str) and s.isdigit(): 39 | return bool(int(s)) 40 | return bool(s) 41 | 42 | 43 | if __name__ == "__main__": 44 | d = DatabendDataType() 45 | print(d.type_convert_fn("Uint64")("0")) 46 | -------------------------------------------------------------------------------- /databend_py/defines.py: -------------------------------------------------------------------------------- 1 | DEFAULT_DATABASE = "default" 2 | DEFAULT_USER = "root" 3 | DEFAULT_PASSWORD = "" 4 | DEFAULT_SESSION_IDLE_TIME = 30 5 | DEFAULT_CONNECT_TIMEOUT = 180 6 | DEFAULT_READ_TIMEOUT = 180 7 | 8 | DBMS_NAME = "Databend" 9 | CLIENT_NAME = "databend-py" 10 | 11 | STRINGS_ENCODING = "utf-8" 12 | -------------------------------------------------------------------------------- /databend_py/errors.py: -------------------------------------------------------------------------------- 1 | class Error(Exception): 2 | code = None 3 | 4 | def __init__(self, message=None): 5 | self.message = message 6 | super(Error, self).__init__(message) 7 | 8 | def __str__(self): 9 | message = " " + self.message if self.message is not None else "" 10 | return "Code: {}.{}".format(self.code, message) 11 | 12 | 13 | class ServerException(Error): 14 | def __init__(self, message, code=None): 15 | self.message = message 16 | self.code = code 17 | super(ServerException, self).__init__(message) 18 | 19 | def __str__(self): 20 | return "Code: {} {}".format(self.code, self.message) 21 | 22 | 23 | class WarehouseTimeoutException(Error): 24 | def __init__(self, message, code=None): 25 | self.message = message 26 | self.code = code 27 | super(WarehouseTimeoutException, self).__init__(message) 28 | 29 | def __str__(self): 30 | return "Provision warehouse timeout: {}".format(self.message) 31 | 32 | 33 | class UnexpectedException(Error): 34 | def __init__(self, message): 35 | self.message = message 36 | super(UnexpectedException, self).__init__(message) 37 | 38 | def __str__(self): 39 | message = " " + self.message if self.message is not None else "" 40 | return "Unexpected: {}".format(message) 41 | -------------------------------------------------------------------------------- /databend_py/log.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | logger = logging.getLogger(__name__) 4 | 5 | 6 | log_priorities = ( 7 | "Unknown", 8 | "Fatal", 9 | "Critical", 10 | "Error", 11 | "Warning", 12 | "Notice", 13 | "Information", 14 | "Debug", 15 | "Trace", 16 | ) 17 | -------------------------------------------------------------------------------- /databend_py/result.py: -------------------------------------------------------------------------------- 1 | from .datetypes import DatabendDataType 2 | import re 3 | 4 | 5 | class QueryResult(object): 6 | """ 7 | Stores query result from multiple response data. 8 | """ 9 | 10 | def __init__( 11 | self, data_generator, first_data, with_column_types=False, null_to_none=False 12 | ): 13 | self.data_generator = data_generator 14 | self.with_column_types = with_column_types 15 | self.first_data = first_data 16 | self.column_data_dict_list = [] 17 | self.columns_with_types = [] 18 | self.column_type_dic = {} 19 | self.type_convert = DatabendDataType.type_convert_fn 20 | self.null_to_none = null_to_none 21 | 22 | super(QueryResult, self).__init__() 23 | 24 | def store_data(self, raw_data: dict): 25 | fields = raw_data.get("schema") 26 | column_name_ls = [] 27 | datas = raw_data.get("data") 28 | for field in fields: 29 | column_name_ls.append(field["name"]) 30 | 31 | for data in datas: 32 | self.column_data_dict_list.append(dict(zip(column_name_ls, data))) 33 | 34 | def store_columns(self, raw_data: dict): 35 | fields = raw_data.get("schema") 36 | for field in fields: 37 | inner_type = self.extract_type(field["type"]) 38 | column_type = (field["name"], inner_type) 39 | self.column_type_dic[field["name"]] = inner_type 40 | self.columns_with_types.append(column_type) 41 | 42 | def get_result(self): 43 | """ 44 | :return: stored query result. 45 | """ 46 | data = [] 47 | self.store_data(self.first_data) 48 | self.store_columns(self.first_data) 49 | for d in self.data_generator: 50 | self.store_data(d) 51 | 52 | for read_data in self.column_data_dict_list: 53 | tmp_list = [] 54 | for c, d in read_data.items(): 55 | if d == "NULL": 56 | if self.null_to_none: 57 | tmp_list.append(None) 58 | else: 59 | tmp_list.append(d) 60 | else: 61 | tmp_list.append(self.type_convert(self.column_type_dic[c])(d)) 62 | data.append(tuple(tmp_list)) 63 | 64 | if self.with_column_types: 65 | return self.columns_with_types, data 66 | else: 67 | return [], data 68 | 69 | @staticmethod 70 | def extract_type(schema_type): 71 | if "nullable" in schema_type.lower(): 72 | return re.findall(r"[(](.*?)[)]", schema_type)[0] 73 | elif "(" in schema_type: 74 | return schema_type.split("(")[0] 75 | else: 76 | return schema_type 77 | -------------------------------------------------------------------------------- /databend_py/retry.py: -------------------------------------------------------------------------------- 1 | import time 2 | 3 | from databend_py.errors import WarehouseTimeoutException 4 | 5 | 6 | # retry in 550s for WarehouseTimeoutException 7 | def retry(times, exceptions): 8 | """ 9 | Retry Decorator 10 | Retries the wrapped function/method `times` times if the exceptions listed 11 | in ``exceptions`` are thrown 12 | :type times: Int 13 | :param exceptions: Lists of exceptions that trigger a retry attempt 14 | :type exceptions: Tuple of Exceptions 15 | """ 16 | 17 | def decorator(func): 18 | def newfn(*args, **kwargs): 19 | attempt = 1 20 | while attempt <= times: 21 | try: 22 | return func(*args, **kwargs) 23 | except exceptions: 24 | print( 25 | "Exception thrown when attempting to run %s, attempt " 26 | "%d of %d" % (func, attempt, times) 27 | ) 28 | time.sleep(attempt * 10) 29 | attempt += 1 30 | return func(*args, **kwargs) 31 | 32 | return newfn 33 | 34 | return decorator 35 | 36 | 37 | @retry(times=3, exceptions=WarehouseTimeoutException) 38 | def foo1(): 39 | print("Some code here ....") 40 | print("Oh no, we have exception") 41 | raise WarehouseTimeoutException("Some error") 42 | 43 | 44 | if __name__ == "__main__": 45 | foo1() 46 | -------------------------------------------------------------------------------- /databend_py/sdk_info.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | here = os.path.abspath(os.path.dirname(__file__)) 4 | 5 | 6 | def sdk_version(): 7 | version_py = os.path.join(here, "VERSION") 8 | with open(version_py, encoding="utf-8") as f: 9 | first_line = f.readline() 10 | return first_line.strip() 11 | 12 | 13 | def sdk_lan(): 14 | return "databend-py" 15 | 16 | 17 | def sdk_info(): 18 | return f"{sdk_lan()}/{sdk_version()}" 19 | -------------------------------------------------------------------------------- /databend_py/uploader.py: -------------------------------------------------------------------------------- 1 | import requests 2 | import io 3 | import csv 4 | import uuid 5 | import json 6 | import time 7 | import gzip 8 | from . import log 9 | 10 | 11 | class DataUploader: 12 | def __init__( 13 | self, 14 | client, 15 | connection, 16 | settings, 17 | default_stage_dir="@~", 18 | debug=False, 19 | compress=False, 20 | ): 21 | # TODO: make it depends on Connection instead of Client 22 | self.client = client 23 | self.connection = connection 24 | self.settings = settings 25 | self.default_stage_dir = default_stage_dir 26 | self._compress = compress 27 | self._debug = debug 28 | 29 | def upload_to_table_by_copy(self, table_name, data): 30 | if len(data) == 0: 31 | return 32 | stage_path = self._gen_stage_path(self.default_stage_dir) 33 | presigned_url, headers = self._execute_presign(stage_path) 34 | self._upload_to_presigned_url(presigned_url, headers, data) 35 | self._execute_copy(table_name, stage_path, "CSV") 36 | 37 | def upload_to_table_by_attachment(self, sql_statement, data): 38 | if len(data) == 0: 39 | return 40 | stage_path = self._gen_stage_path(self.default_stage_dir) 41 | presigned_url, headers = self._execute_presign(stage_path) 42 | self._upload_to_presigned_url(presigned_url, headers, data) 43 | self._execute_with_attachment(sql_statement, stage_path, "CSV") 44 | 45 | def replace_into_table(self, table_name, conflict_keys, data): 46 | """ 47 | :param table_name: table name 48 | :param conflict_keys: if use replace, the conflict_keys can't be None 49 | :param data: list data to insert/replace 50 | :return: 51 | """ 52 | if len(data) == 0: 53 | return 54 | stage_path = self._gen_stage_path(self.default_stage_dir) 55 | presigned_url, headers = self._execute_presign(stage_path) 56 | self._upload_to_presigned_url(presigned_url, headers, data) 57 | sql_statement = ( 58 | f"REPLACE INTO {table_name} ON ({','.join(conflict_keys)}) VALUES" 59 | ) 60 | self._execute_with_attachment(sql_statement, stage_path, "CSV") 61 | 62 | def upload_to_stage(self, stage_dir, filename, data): 63 | stage_path = self._gen_stage_path(stage_dir, filename) 64 | presigned_url, headers = self._execute_presign(stage_path) 65 | self._upload_to_presigned_url(presigned_url, headers, data) 66 | return stage_path 67 | 68 | def _gen_stage_path(self, stage_dir, stage_filename=None): 69 | if stage_filename is None: 70 | suffix = ".csv.gz" if self._compress else ".csv" 71 | stage_filename = "%s%s" % (uuid.uuid4(), suffix) 72 | if stage_filename.startswith("/"): 73 | stage_filename = stage_filename[1:] 74 | # TODO: escape the stage_path if it contains special characters 75 | stage_path = "%s/%s" % (stage_dir, stage_filename) 76 | return stage_path 77 | 78 | def _execute_presign(self, stage_path): 79 | start_time = time.time() 80 | _, row = self.client.execute("presign upload %s" % stage_path) 81 | presigned_url = row[0][2] 82 | headers = json.loads(row[0][1]) 83 | if self._debug: 84 | print( 85 | "upload:_execute_presign %s: %s" 86 | % (stage_path, time.time() - start_time) 87 | ) 88 | return presigned_url, headers 89 | 90 | def _serialize_data(self, data, compress): 91 | # In Python3 csv.writer expects a file-like object opened in text mode. In Python2, csv.writer expects a file-like object opened in binary mode. 92 | start_time = time.time() 93 | buf = io.StringIO() 94 | csvwriter = csv.writer(buf, delimiter=",", quoting=csv.QUOTE_MINIMAL) 95 | csvwriter.writerows(data) 96 | output = buf.getvalue().encode("utf-8") 97 | if compress: 98 | buf = io.BytesIO() 99 | with gzip.GzipFile(fileobj=buf, mode="wb") as gzwriter: 100 | gzwriter.write(output) 101 | output = buf.getvalue() 102 | if self._debug: 103 | print("upload:_serialize_data %s" % (time.time() - start_time)) 104 | return output 105 | 106 | def _upload_to_presigned_url(self, presigned_url, headers, data): 107 | # Check if data is bytes or File 108 | if isinstance(data, (bytes, io.IOBase)): 109 | data = data.read() # Read the data from the buffer 110 | buf = data 111 | buf_size = len(buf) 112 | data_len = 1 113 | elif isinstance(data, list): 114 | buf = self._serialize_data(data, self._compress) 115 | buf_size = len(buf) 116 | data_len = len(data) 117 | else: 118 | raise Exception("data is not bytes, File, or a list: %s" % type(data)) 119 | start_time = time.time() 120 | try: 121 | resp = requests.put(presigned_url, headers=headers, data=buf) 122 | resp.raise_for_status() 123 | finally: 124 | if self._debug: 125 | print( 126 | "upload:_upload_to_presigned_url len=%d bufsize=%d %s" 127 | % (data_len, buf_size, time.time() - start_time) 128 | ) 129 | 130 | def _execute_copy(self, table_name, stage_path, file_type): 131 | start_time = time.time() 132 | sql = self._make_copy_statement(table_name, stage_path, file_type) 133 | self.client.execute(sql) 134 | if self._debug: 135 | print( 136 | "upload:_execute_copy table=%s %s" 137 | % (table_name, time.time() - start_time) 138 | ) 139 | 140 | def _make_copy_statement(self, table_name, stage_path, file_type): 141 | # copy options docs: https://databend.rs/doc/sql-commands/dml/dml-copy-into-table#copyoptions 142 | copy_options = {} 143 | copy_options["PURGE"] = self.settings.get("copy_purge", False) 144 | copy_options["FORCE"] = self.settings.get("force", False) 145 | copy_options["SIZE_LIMIT"] = self.settings.get( 146 | "size_limit", 0 147 | ) # TODO: is this correct to set size_limit = 100? 148 | copy_options["ON_ERROR"] = self.settings.get("on_error", "abort") 149 | return ( 150 | f"COPY INTO {table_name} FROM {stage_path} " 151 | f"FILE_FORMAT = (type = {file_type} RECORD_DELIMITER = '\\r\\n' COMPRESSION = AUTO) " 152 | f"PURGE = {copy_options['PURGE']} FORCE = {copy_options['FORCE']} " 153 | f"SIZE_LIMIT={copy_options['SIZE_LIMIT']} ON_ERROR = {copy_options['ON_ERROR']}" 154 | ) 155 | 156 | def _execute_with_attachment(self, sql_statement, stage_path, file_type): 157 | start_time = time.time() 158 | data = self._make_attachment(sql_statement, stage_path, file_type) 159 | url = self.connection.format_url() 160 | 161 | try: 162 | resp_dict = self.connection.do_query(url, data) 163 | self.client_session = resp_dict.get( 164 | "session", self.connection.default_session() 165 | ) 166 | if self._debug: 167 | print( 168 | "upload:_execute_attachment sql=%s %s" 169 | % (sql_statement, time.time() - start_time) 170 | ) 171 | except Exception as e: 172 | log.logger.error( 173 | f"http error on {url}, SQL: {sql_statement} error msg:{str(e)}" 174 | ) 175 | raise 176 | 177 | def _make_attachment(self, sql_statement, stage_path, file_type): 178 | copy_options = {} 179 | copy_options["PURGE"] = self.settings.get("copy_purge", "False") 180 | copy_options["FORCE"] = self.settings.get("force", "False") 181 | copy_options["SIZE_LIMIT"] = self.settings.get("size_limit", "0") 182 | copy_options["ON_ERROR"] = self.settings.get("on_error", "abort") 183 | 184 | file_format_options = {} 185 | file_format_options["type"] = file_type 186 | file_format_options["RECORD_DELIMITER"] = '\r\n' 187 | file_format_options["COMPRESSION"] = "AUTO" 188 | 189 | data = { 190 | "sql": sql_statement, 191 | "stage_attachment": { 192 | "location": stage_path, 193 | "file_format_options": file_format_options, 194 | "copy_options": copy_options, 195 | }, 196 | } 197 | return data 198 | -------------------------------------------------------------------------------- /databend_py/util/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/databendlabs/databend-py/71e69b389ce16b8eb289eca74dfc1cb98341ddb7/databend_py/util/__init__.py -------------------------------------------------------------------------------- /databend_py/util/escape.py: -------------------------------------------------------------------------------- 1 | from datetime import date, datetime 2 | from enum import Enum 3 | from uuid import UUID 4 | 5 | from pytz import timezone 6 | 7 | escape_chars_map = { 8 | "\b": "\\b", 9 | "\f": "\\f", 10 | "\r": "\\r", 11 | "\n": "\\n", 12 | "\t": "\\t", 13 | "\0": "\\0", 14 | "\a": "\\a", 15 | "\v": "\\v", 16 | "\\": "\\\\", 17 | "'": "\\'", 18 | } 19 | 20 | 21 | def escape_datetime(item, context): 22 | server_tz = timezone(context.server_info.timezone) 23 | 24 | if item.tzinfo is not None: 25 | item = item.astimezone(server_tz) 26 | 27 | return "'%s'" % item.strftime("%Y-%m-%d %H:%M:%S") 28 | 29 | 30 | def escape_param(item, context): 31 | if item is None: 32 | return "NULL" 33 | 34 | elif isinstance(item, datetime): 35 | return escape_datetime(item, context) 36 | 37 | elif isinstance(item, date): 38 | return "'%s'" % item.strftime("%Y-%m-%d") 39 | 40 | elif isinstance(item, str): 41 | return "'%s'" % "".join(escape_chars_map.get(c, c) for c in item) 42 | 43 | elif isinstance(item, list): 44 | return "[%s]" % ", ".join(str(escape_param(x, context)) for x in item) 45 | 46 | elif isinstance(item, tuple): 47 | return "(%s)" % ", ".join(str(escape_param(x, context)) for x in item) 48 | 49 | elif isinstance(item, Enum): 50 | return escape_param(item.value, context) 51 | 52 | elif isinstance(item, UUID): 53 | return "'%s'" % str(item) 54 | 55 | else: 56 | return item 57 | 58 | 59 | def escape_params(params, context): 60 | escaped = {} 61 | 62 | for key, value in params.items(): 63 | escaped[key] = escape_param(value, context) 64 | 65 | return escaped 66 | -------------------------------------------------------------------------------- /databend_py/util/helper.py: -------------------------------------------------------------------------------- 1 | from itertools import islice, tee 2 | from databend_py.errors import ServerException 3 | 4 | 5 | class Helper(object): 6 | def __int__(self, response): 7 | self.response = response 8 | super(Helper, self).__init__() 9 | 10 | def get_result_data(self): 11 | return self.response["data"] 12 | 13 | def get_fields(self): 14 | return self.response["schema"]["fields"] 15 | 16 | def get_next_uri(self): 17 | if "next_uri" in self.response: 18 | return self.response["next_uri"] 19 | return None 20 | 21 | def get_error(self): 22 | if self.response["error"] is None: 23 | return None 24 | 25 | return ServerException( 26 | message=self.response["error"]["message"], 27 | code=self.response["error"].get("code"), 28 | ) 29 | 30 | def check_error(self): 31 | error = self.get_error() 32 | if error: 33 | raise error 34 | 35 | 36 | def chunks(seq, n): 37 | # islice is MUCH slower than slice for lists and tuples. 38 | if isinstance(seq, (list, tuple)): 39 | i = 0 40 | item = seq[i : i + n] 41 | while item: 42 | yield list(item) 43 | i += n 44 | item = seq[i : i + n] 45 | 46 | else: 47 | it = iter(seq) 48 | item = list(islice(it, n)) 49 | while item: 50 | yield item 51 | item = list(islice(it, n)) 52 | 53 | 54 | def pairwise(iterable): 55 | a, b = tee(iterable) 56 | next(b, None) 57 | return zip(a, b) 58 | 59 | 60 | def column_chunks(columns, n): 61 | for column in columns: 62 | if not isinstance(column, (list, tuple)): 63 | raise TypeError( 64 | "Unsupported column type: {}. list or tuple is expected.".format( 65 | type(column) 66 | ) 67 | ) 68 | 69 | # create chunk generator for every column 70 | g = [chunks(column, n) for column in columns] 71 | 72 | while True: 73 | # get next chunk for every column 74 | item = [next(column, []) for column in g] 75 | if not any(item): 76 | break 77 | yield item 78 | 79 | 80 | # from paste.deploy.converters 81 | def asbool(obj): 82 | if isinstance(obj, str): 83 | obj = obj.strip().lower() 84 | if obj in ["true", "yes", "on", "y", "t", "1"]: 85 | return True 86 | elif obj in ["false", "no", "off", "n", "f", "0"]: 87 | return False 88 | else: 89 | raise ValueError("String is not true/false: %r" % obj) 90 | return bool(obj) 91 | -------------------------------------------------------------------------------- /docker-compose.yaml: -------------------------------------------------------------------------------- 1 | services: 2 | minio: 3 | image: docker.io/minio/minio 4 | command: server /data 5 | network_mode: "host" 6 | volumes: 7 | - ./data:/data 8 | databend: 9 | image: docker.io/datafuselabs/databend:nightly 10 | environment: 11 | - QUERY_STORAGE_TYPE=s3 12 | - QUERY_DATABEND_ENTERPRISE_LICENSE 13 | - AWS_S3_ENDPOINT=http://localhost:9000 14 | - AWS_ACCESS_KEY_ID=minioadmin 15 | - AWS_SECRET_ACCESS_KEY=minioadmin 16 | network_mode: "host" 17 | depends_on: 18 | - minio 19 | healthcheck: 20 | test: "curl -f localhost:8080/v1/health || exit 1" 21 | interval: 2s 22 | retries: 10 23 | start_period: 2s 24 | timeout: 1s 25 | -------------------------------------------------------------------------------- /docs/connection.md: -------------------------------------------------------------------------------- 1 | # Databend Python Driver 2 | 3 | ## Connection parameters 4 | 5 | The driver supports various parameters that may be set as URL parameters or as properties passed to Client. Both of the 6 | following examples are equivalent: 7 | 8 | ```python 9 | # URL parameters 10 | client = Client.from_url('http://root@localhost:8000/db?secure=False©_purge=True&debug=True') 11 | 12 | # Client parameters 13 | client = Client( 14 | host='tenant--warehouse.ch.datafusecloud.com', 15 | database="default", 16 | user="user", 17 | port="443", 18 | secure=True, 19 | password="password", settings={"copy_purge": True, "force": True}) 20 | ``` 21 | 22 | ### Parameter References 23 | 24 | | Parameter | Description | Default | example | 25 | |-----------------|----------------------------------------------------------------------------------------------------------|---------|-------------------------------------------------------| 26 | | user | username | root | | 27 | | password | password | None | | | 28 | | port | server port | None | | 29 | | database | selected database | default | 30 | | secure | Enable SSL | false | http://root@localhost:8000/db?secure=False | 31 | | copy_purge | If True, the command will purge the files in the stage after they are loaded successfully into the table | false | http://root@localhost:8000/db?copy_purge=False | 32 | | debug | Enable debug log | False | http://root@localhost:8000/db?debug=True | 33 | | persist_cookies | if using cookies set by server to perform following requests. | False | http://root@localhost:8000/db?persist_cookies=True | 34 | | null_to_none | if the result data NULL which is of type str, change it to NoneType | False | http://root@localhost:8000/db?null_to_none=True | 35 | | connect_timeout | timeout seconds when connect to databend | 20 | http://root:root@localhost:8000/db?connect_timeout=30 | 36 | | read_timeout | timeout seconds when read from server | 20 | http://root:root@localhost:8000/db?read_timeout=30 | 37 | 38 | 39 | -------------------------------------------------------------------------------- /examples/batch_insert.py: -------------------------------------------------------------------------------- 1 | from databend_py import Client 2 | 3 | 4 | def insert(): 5 | client = Client.from_url("http://root:root@localhost:8000") 6 | client.execute("DROP TABLE IF EXISTS test_upload") 7 | client.execute("CREATE TABLE if not exists test_upload (x Int32,y VARCHAR)") 8 | client.execute("DESC test_upload") 9 | client.insert("default", "test_upload", [(1, "a"), (1, "b")]) 10 | _, upload_res = client.execute("select * from test_upload") 11 | # upload_res is [(1, 'a'), (1, 'b')] 12 | 13 | 14 | def batch_insert(): 15 | c = Client.from_url("http://root:root@localhost:8000") 16 | c.execute("DROP TABLE IF EXISTS test") 17 | c.execute("CREATE TABLE if not exists test (x Int32,y VARCHAR)") 18 | c.execute("DESC test") 19 | _, r1 = c.execute("INSERT INTO test (x,y) VALUES (%,%)", [1, "yy", 2, "xx"]) 20 | _, ss = c.execute("select * from test") 21 | # ss is [(1, 'yy'), (2, 'xx')] 22 | 23 | 24 | def batch_insert_with_tuple(): 25 | c = Client.from_url("http://root:root@localhost:8000") 26 | c.execute("DROP TABLE IF EXISTS test") 27 | c.execute("CREATE TABLE if not exists test (x Int32,y VARCHAR)") 28 | c.execute("DESC test") 29 | # data is tuple list 30 | _, r1 = c.execute("INSERT INTO test (x,y) VALUES", [(3, "aa"), (4, "bb")]) 31 | _, ss = c.execute("select * from test") 32 | -------------------------------------------------------------------------------- /examples/iter_query.py: -------------------------------------------------------------------------------- 1 | from databend_py import Client 2 | 3 | 4 | def iter_query(): 5 | client = Client.from_url("http://root:root@localhost:8000") 6 | result = client.execute_iter("select 1, 2, 3 from numbers(3)", with_column_types=False) 7 | result_list = [i for i in result] 8 | # result_list is [(1, 2, 3), (1, 2, 3), (1, 2, 3)] 9 | print(result_list) 10 | -------------------------------------------------------------------------------- /examples/ordinary_query.py: -------------------------------------------------------------------------------- 1 | from databend_py import Client 2 | 3 | 4 | def ordinary_query(): 5 | client = Client.from_url("http://root:root@localhost:8000") 6 | _, res = client.execute("select 1", with_column_types=False) 7 | # res is [(1,)] 8 | 9 | column_type, res2 = client.execute("select 1", with_column_types=True) 10 | # column_type is [('1', 'UInt8')] 11 | # res2 [(1,)] 12 | print(column_type) 13 | print(res2) 14 | 15 | # create table/ drop table 16 | client.execute("DROP TABLE IF EXISTS test") 17 | client.execute("CREATE TABLE if not exists test (x Int32,y VARCHAR)") 18 | -------------------------------------------------------------------------------- /examples/replace_into.py: -------------------------------------------------------------------------------- 1 | from databend_py import Client 2 | 3 | 4 | def replace_into(): 5 | client = Client.from_url("http://root:root@localhost:8000") 6 | client.execute("DROP TABLE IF EXISTS test_replace") 7 | client.execute("CREATE TABLE if not exists test_replace (x Int32,y VARCHAR)") 8 | client.execute("DESC test_replace") 9 | client.replace("default", "test_replace", ["x"], [(1, "a"), (2, "b")]) 10 | client.replace("default", "test_replace", ["x"], [(1, "c"), (2, "d")]) 11 | _, upload_res = client.execute("select * from test_replace") 12 | # upload_res is [(1, 'c\r'), (2, 'd\r')] 13 | -------------------------------------------------------------------------------- /examples/session_setting.py: -------------------------------------------------------------------------------- 1 | from databend_py import Client 2 | 3 | 4 | def session_settings(): 5 | # docs: https://databend.rs/doc/integrations/api/rest#client-side-session 6 | session_settings = {"db": "test"} 7 | client = Client( 8 | host="localhost", 9 | port=8000, 10 | user="root", 11 | password="root", 12 | session_settings=session_settings, 13 | ) 14 | print(client) 15 | -------------------------------------------------------------------------------- /examples/upload_to_stage.py: -------------------------------------------------------------------------------- 1 | from databend_py import Client 2 | import os 3 | 4 | 5 | def create_csv(): 6 | import csv 7 | 8 | with open("upload.csv", "w", newline="") as file: 9 | writer = csv.writer(file) 10 | writer.writerow([1, "a"]) 11 | writer.writerow([1, "b"]) 12 | 13 | 14 | def upload_to_stage(): 15 | client = Client.from_url("http://root:root@localhost:8000") 16 | # upload [(1, 'a'), (1, 'b')] as csv to stage ~ 17 | stage_path = client.upload_to_stage("@~", "upload.csv", [(1, "a"), (1, "b")]) 18 | print(stage_path) 19 | # stage_path is @~/upload.csv 20 | 21 | 22 | def upload_file_to_stage(): 23 | create_csv() 24 | client = Client.from_url("http://root:root@localhost:8000") 25 | with open("upload.csv", "rb") as f: 26 | stage_path = client.upload_to_stage("@~", "upload.csv", f) 27 | print(stage_path) 28 | 29 | os.remove("upload.csv") 30 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [project] 2 | name = "databend-py" 3 | version = "0.6.3" 4 | description = "Add your description here" 5 | readme = "README.md" 6 | authors = [ 7 | {name = "Databend Cloud Team"} 8 | ] 9 | license = {file = "LICENSE"} 10 | requires-python = ">=3.9" 11 | dependencies = [ 12 | "black>=24.10.0", 13 | "databend-driver>=0.23.2", 14 | "environs>=11.2.1", 15 | "pytz>=2024.2", 16 | "requests>=2.32.3", 17 | "setuptools>=75.6.0", 18 | ] 19 | 20 | classifiers = [ 21 | "Development Status :: 4 - Beta", 22 | "Environment :: Console", 23 | "Intended Audience :: Developers", 24 | "Intended Audience :: Information Technology", 25 | "Operating System :: OS Independent", 26 | "Programming Language :: SQL", 27 | "Programming Language :: Python :: 3.9", 28 | "Programming Language :: Python :: 3.10", 29 | "Programming Language :: Python :: 3.11", 30 | "Programming Language :: Python :: 3.12", 31 | "Programming Language :: Python :: Implementation :: PyPy", 32 | "Topic :: Database", 33 | "Topic :: Software Development", 34 | "Topic :: Software Development :: Libraries", 35 | "Topic :: Software Development :: Libraries :: Application Frameworks", 36 | "Topic :: Software Development :: Libraries :: Python Modules", 37 | "Topic :: Scientific/Engineering :: Information Analysis" 38 | ] 39 | 40 | [tool.uv] 41 | dev-dependencies = [ 42 | "pytest>=8.3.4", 43 | "requests>=2.32.3", 44 | "ruff>=0.8.2", 45 | ] 46 | 47 | [build-system] 48 | requires = ["setuptools>=42", "wheel"] 49 | build-backend = "setuptools.build_meta" 50 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [db] 2 | host=localhost 3 | port=8081 4 | database=books 5 | user=root 6 | password= 7 | 8 | [log] 9 | level=ERROR 10 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | import os 2 | from codecs import open 3 | 4 | from setuptools import setup, find_packages 5 | 6 | here = os.path.abspath(os.path.dirname(__file__)) 7 | 8 | 9 | def read_version(): 10 | version_py = os.path.join(here, "databend_py", "VERSION") 11 | with open(version_py, encoding="utf-8") as f: 12 | first_line = f.readline() 13 | return first_line.strip() 14 | 15 | 16 | github_url = "https://github.com/databendcloud/databend-py" 17 | 18 | with open(os.path.join(here, "README.md"), encoding="utf-8") as f: 19 | long_description = f.read() 20 | 21 | setup( 22 | name="databend-py", 23 | version=read_version(), 24 | include_package_data=True, 25 | description="Python driver with native interface for Databend", 26 | long_description=long_description, 27 | url=github_url, 28 | packages=find_packages(".", exclude=["tests*"]), 29 | python_requires=">=3.4, <4", 30 | install_requires=[ 31 | "pytz", 32 | "environs", 33 | "requests", 34 | "databend-driver>=0.11.3", 35 | ], 36 | author="Databend Cloud Team", 37 | author_email="hantmac@outlook.com", 38 | license="Apache License", 39 | classifiers=[ 40 | "Development Status :: 4 - Beta", 41 | "Environment :: Console", 42 | "Intended Audience :: Developers", 43 | "Intended Audience :: Information Technology", 44 | "Operating System :: OS Independent", 45 | "Programming Language :: SQL", 46 | "Programming Language :: Python :: 3", 47 | "Programming Language :: Python :: 3.5", 48 | "Programming Language :: Python :: 3.6", 49 | "Programming Language :: Python :: 3.7", 50 | "Programming Language :: Python :: 3.8", 51 | "Programming Language :: Python :: 3.9", 52 | "Programming Language :: Python :: 3.10", 53 | "Programming Language :: Python :: Implementation :: PyPy", 54 | "Topic :: Database", 55 | "Topic :: Software Development", 56 | "Topic :: Software Development :: Libraries", 57 | "Topic :: Software Development :: Libraries :: Application Frameworks", 58 | "Topic :: Software Development :: Libraries :: Python Modules", 59 | "Topic :: Scientific/Engineering :: Information Analysis", 60 | ], 61 | keywords="databend db database cloud analytics", 62 | test_suite="pytest", 63 | ) 64 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/databendlabs/databend-py/71e69b389ce16b8eb289eca74dfc1cb98341ddb7/tests/__init__.py -------------------------------------------------------------------------------- /tests/test_client.py: -------------------------------------------------------------------------------- 1 | import os 2 | import unittest 3 | import types 4 | from databend_py import Client 5 | 6 | 7 | def sample_insert_data(): 8 | return [(1, "a"), (1, "b")] 9 | 10 | 11 | def create_csv(): 12 | import csv 13 | 14 | with open("upload.csv", "w", newline="") as file: 15 | writer = csv.writer(file) 16 | writer.writerow([1, "a"]) 17 | writer.writerow([1, "b"]) 18 | 19 | 20 | class DatabendPyTestCase(unittest.TestCase): 21 | databend_url = None 22 | 23 | def setUp(self): 24 | self.databend_url = os.getenv("TEST_DATABEND_DSN") 25 | 26 | def assertHostsEqual(self, client, another, msg=None): 27 | self.assertEqual(client.connection.host, another, msg=msg) 28 | 29 | def test_simple(self): 30 | c = Client.from_url( 31 | "https://app.databend.com:443?secure=True©_purge=True&debug=True" 32 | ) 33 | 34 | self.assertHostsEqual(c, "app.databend.com") 35 | self.assertEqual(c.connection.database, "default") 36 | self.assertEqual(c.connection.user, "root") 37 | self.assertEqual(c.connection.copy_purge, True) 38 | self.assertEqual(c.settings.get("debug"), True) 39 | 40 | c = Client.from_url("https://host:443/db") 41 | 42 | self.assertHostsEqual(c, "host") 43 | self.assertEqual(c.connection.database, "db") 44 | self.assertEqual(c.connection.password, "") 45 | 46 | c = Client.from_url("databend://localhost:8000/default?secure=true") 47 | self.assertEqual(c.connection.schema, "https") 48 | c = Client.from_url("databend://root:root@localhost:8000/default") 49 | self.assertEqual(c.connection.schema, "http") 50 | c = Client.from_url("databend://root:root@localhost:8000/default?secure=false") 51 | self.assertEqual(c.connection.schema, "http") 52 | c = Client.from_url("databend://root:root@localhost:8000/default?compress=True") 53 | self.assertEqual(c._uploader._compress, True) 54 | self.assertEqual(c.connection.connect_timeout, 180) 55 | self.assertEqual(c.connection.read_timeout, 180) 56 | 57 | c = Client.from_url( 58 | "databend://root:root@localhost:8000/default?connect_timeout=30&read_timeout=30" 59 | ) 60 | self.assertEqual(c.connection.connect_timeout, 30) 61 | self.assertEqual(c.connection.read_timeout, 30) 62 | 63 | self.assertEqual(c.connection.persist_cookies, False) 64 | c = Client.from_url( 65 | "https://root:root@localhost:8000?persist_cookies=True&tenant=tn1&warehouse=wh1" 66 | ) 67 | self.assertEqual(c.connection.persist_cookies, True) 68 | self.assertEqual(c.connection.tenant, "tn1") 69 | self.assertEqual(c.connection.warehouse, "wh1") 70 | 71 | def test_session_settings(self): 72 | session_settings = {"db": "database"} 73 | c = Client( 74 | host="localhost", 75 | port=8000, 76 | user="root", 77 | password="root", 78 | session_settings={"db": "database"}, 79 | ) 80 | self.assertEqual(c.connection.client_session, session_settings) 81 | 82 | def test_ordinary_query(self): 83 | select_test = """ 84 | select 85 | null as db, 86 | name as name, 87 | database as schema, 88 | if(engine = 'VIEW', 'view', 'table') as type 89 | from system.tables 90 | where database = 'default'; 91 | """ 92 | # if use the host from databend cloud, must set the 'ADDITIONAL_HEADERS': 93 | # os.environ['ADDITIONAL_HEADERS'] = 'X-DATABENDCLOUD-TENANT=TENANT,X-DATABENDCLOUD-WAREHOUSE=WAREHOUSE' 94 | c = Client.from_url(self.databend_url) 95 | _, r = c.execute("select 1", with_column_types=False) 96 | self.assertEqual(r, ([(1,)])) 97 | column_types, _ = c.execute(select_test, with_column_types=True) 98 | print(column_types) 99 | self.assertEqual( 100 | column_types, 101 | [ 102 | ("db", "NULL"), 103 | ("name", "String"), 104 | ("schema", "String"), 105 | ("type", "String"), 106 | ], 107 | ) 108 | 109 | # test with_column_types=True 110 | r = c.execute("select 1", with_column_types=True) 111 | self.assertEqual(r, ([("1", "UInt8")], [(1,)])) 112 | 113 | def test_batch_insert(self): 114 | c = Client.from_url(self.databend_url) 115 | c.execute("DROP TABLE IF EXISTS test") 116 | c.execute("CREATE TABLE if not exists test (x Int32,y VARCHAR)") 117 | c.execute("DESC test") 118 | _, r1 = c.execute("INSERT INTO test (x,y) VALUES (%,%)", [1, "yy", 2, "xx"]) 119 | self.assertEqual(r1, 2) 120 | _, ss = c.execute("select * from test") 121 | print(ss) 122 | self.assertEqual(ss, [(1, "yy"), (2, "xx")]) 123 | 124 | def test_batch_insert_with_tuple(self): 125 | c = Client.from_url(self.databend_url) 126 | c.execute("DROP TABLE IF EXISTS test") 127 | c.execute("CREATE TABLE if not exists test (x Int32,y VARCHAR)") 128 | c.execute("DESC test") 129 | _, r1 = c.execute("INSERT INTO test (x,y) VALUES", [(3, "aa"), (4, "bb")]) 130 | self.assertEqual(r1, 2) 131 | _, ss = c.execute("select * from test") 132 | self.assertEqual(ss, [(3, "aa"), (4, "bb")]) 133 | 134 | def test_batch_insert_with_dict_list(self): 135 | c = Client.from_url(self.databend_url) 136 | c.execute("DROP TABLE IF EXISTS test") 137 | c.execute("CREATE TABLE if not exists test (x Int32,y VARCHAR)") 138 | c.execute("DESC test") 139 | _, r1 = c.execute("INSERT INTO test (x,y) VALUES", [{"x": 5, "y": "cc"}, {"x": 6, "y": "dd"}]) 140 | self.assertEqual(r1, 2) 141 | _, ss = c.execute("select * from test") 142 | self.assertEqual(ss, [(5, "cc"), (6, "dd")]) 143 | 144 | def test_batch_insert_with_dict_multi_fields(self): 145 | c = Client.from_url(self.databend_url) 146 | c.execute("DROP TABLE IF EXISTS test") 147 | c.execute("CREATE TABLE if not exists test (id int, x Int32, y VARCHAR, z Int32)") 148 | c.execute("DESC test") 149 | _, r1 = c.execute("INSERT INTO test (x,y) VALUES", [{"x": 7, "y": "ee"}, {"x": 8, "y": "ff"}]) 150 | self.assertEqual(r1, 2) 151 | _, ss = c.execute("select * from test") 152 | self.assertEqual(ss, [('NULL', 7, 'ee', 'NULL'), ('NULL', 8, 'ff', 'NULL')]) 153 | 154 | def test_iter_query(self): 155 | client = Client.from_url(self.databend_url) 156 | result = client.execute_iter("select 1, 2, 3 from numbers(3)", with_column_types=False) 157 | self.assertIsInstance(result, types.GeneratorType) 158 | result_list = [i for i in result] 159 | print(result_list) 160 | self.assertEqual(result_list, [(1, 2, 3), (1, 2, 3), (1, 2, 3)]) 161 | 162 | def test_insert(self): 163 | client = Client.from_url(self.databend_url) 164 | client.execute("DROP TABLE IF EXISTS test_upload") 165 | client.execute("CREATE TABLE if not exists test_upload (x Int32,y VARCHAR)") 166 | client.execute("DESC test_upload") 167 | client.insert("default", "test_upload", [(1, "a"), (1, "b")]) 168 | _, upload_res = client.execute("select * from test_upload") 169 | self.assertEqual(upload_res, [(1, "a"), (1, "b")]) 170 | 171 | def test_replace(self): 172 | client = Client.from_url(self.databend_url) 173 | client.execute("DROP TABLE IF EXISTS test_replace") 174 | client.execute("CREATE TABLE if not exists test_replace (x Int32,y VARCHAR)") 175 | client.execute("DESC test_replace") 176 | client.replace("default", "test_replace", ["x"], [(1, "a"), (2, "b")]) 177 | client.replace("default", "test_replace", ["x"], [(1, "c"), (2, "d")]) 178 | _, upload_res = client.execute("select * from test_replace") 179 | self.assertEqual(upload_res, [(1, "c"), (2, "d")]) 180 | 181 | def test_insert_with_compress(self): 182 | client = Client.from_url(self.databend_url + "?compress=True&debug=True") 183 | self.assertEqual(client._uploader._compress, True) 184 | client.execute("DROP TABLE IF EXISTS test_upload") 185 | client.execute("CREATE TABLE if not exists test_upload (x Int32,y VARCHAR)") 186 | client.execute("DESC test_upload") 187 | client.insert("default", "test_upload", [(1, "a"), (1, "b")]) 188 | _, upload_res = client.execute("select * from test_upload") 189 | self.assertEqual(upload_res, [(1, "a"), (1, "b")]) 190 | 191 | def test_upload_to_stage(self): 192 | client = Client.from_url(self.databend_url) 193 | stage_path = client.upload_to_stage("@~", "upload.csv", [(1, "a"), (1, "b")]) 194 | self.assertEqual(stage_path, "@~/upload.csv") 195 | 196 | def test_upload_file_to_stage(self): 197 | create_csv() 198 | client = Client.from_url(self.databend_url) 199 | with open("upload.csv", "rb") as f: 200 | stage_path = client.upload_to_stage("@~", "upload.csv", f) 201 | print(stage_path) 202 | self.assertEqual(stage_path, "@~/upload.csv") 203 | 204 | os.remove("upload.csv") 205 | 206 | def test_select_over_paging(self): 207 | expected_column = [("number", "UInt64")] 208 | client = Client.from_url(self.databend_url) 209 | columns, data = client.execute( 210 | "SELECT * FROM numbers(10001)", with_column_types=True 211 | ) 212 | self.assertEqual(expected_column, columns) 213 | 214 | def tearDown(self): 215 | client = Client.from_url(self.databend_url) 216 | client.execute("DROP TABLE IF EXISTS test") 217 | client.disconnect() 218 | 219 | def test_cookies(self): 220 | client = Client.from_url(self.databend_url) 221 | client.execute("select 1") 222 | self.assertIsNone(client.connection.cookies) 223 | 224 | if "?" in self.databend_url: 225 | url_with_persist_cookies = f"{self.databend_url}&persist_cookies=true" 226 | else: 227 | url_with_persist_cookies = f"{self.databend_url}?persist_cookies=true" 228 | client = Client.from_url(url_with_persist_cookies) 229 | client.execute("select 1") 230 | # self.assertIsNotNone(client.connection.cookies) 231 | 232 | def test_null_to_none(self): 233 | client = Client.from_url(self.databend_url) 234 | _, data = client.execute("select NULL as test") 235 | self.assertEqual(data[0][0], "NULL") 236 | 237 | if "?" in self.databend_url: 238 | url_with_null_to_none = f"{self.databend_url}&null_to_none=true" 239 | else: 240 | url_with_null_to_none = f"{self.databend_url}?null_to_none=true" 241 | client = Client.from_url(url_with_null_to_none) 242 | _, data = client.execute("select NULL as test") 243 | self.assertIsNone(data[0][0]) 244 | 245 | def test_special_chars(self): 246 | client = Client.from_url(self.databend_url) 247 | client.execute("create or replace table test_special_chars (x string)") 248 | client.execute("INSERT INTO test_special_chars (x) VALUES", [("ó")]) 249 | _, data = client.execute("select * from test_special_chars") 250 | self.assertEqual(data, [("ó",)]) 251 | 252 | def test_set_query_id_header(self): 253 | os.environ["ADDITIONAL_HEADERS"] = ( 254 | "X-DATABENDCLOUD-TENANT=TENANT,X-DATABENDCLOUD-WAREHOUSE=WAREHOUSE" 255 | ) 256 | client = Client.from_url(self.databend_url) 257 | self.assertEqual( 258 | "X-DATABENDCLOUD-TENANT" in client.connection.additional_headers, True 259 | ) 260 | self.assertEqual( 261 | client.connection.additional_headers["X-DATABENDCLOUD-TENANT"], "TENANT" 262 | ) 263 | client.execute("select 1") 264 | execute_query_id1 = client.connection.additional_headers["X-DATABEND-QUERY-ID"] 265 | self.assertEqual( 266 | "X-DATABEND-QUERY-ID" in client.connection.additional_headers, True 267 | ) 268 | client.execute("select 2") 269 | self.assertNotEqual( 270 | execute_query_id1, 271 | client.connection.additional_headers["X-DATABEND-QUERY-ID"], 272 | ) 273 | 274 | def test_commit(self): 275 | client = Client.from_url(self.databend_url) 276 | client.execute("create or replace table test_commit (x int)") 277 | client.begin() 278 | client.execute("insert into test_commit values (1)") 279 | _, data = client.execute("select * from test_commit") 280 | self.assertEqual(data, [(1,)]) 281 | 282 | client2 = Client.from_url(self.databend_url) 283 | client2.begin() 284 | client2.execute("insert into test_commit values (2)") 285 | _, data = client2.execute("select * from test_commit") 286 | self.assertEqual(data, [(2,)]) 287 | 288 | client.commit() 289 | _, data = client.execute("select * from test_commit") 290 | self.assertEqual(data, [(1,)]) 291 | 292 | def test_rollback(self): 293 | client = Client.from_url(self.databend_url) 294 | client.execute("create or replace table test_rollback (x int)") 295 | client.begin() 296 | client.execute("insert into test_rollback values (1)") 297 | _, data = client.execute("select * from test_rollback") 298 | self.assertEqual(data, [(1,)]) 299 | 300 | client2 = Client.from_url(self.databend_url) 301 | client2.begin() 302 | client2.execute("insert into test_rollback values (2)") 303 | _, data = client2.execute("select * from test_rollback") 304 | self.assertEqual(data, [(2,)]) 305 | 306 | client.rollback() 307 | _, data = client.execute("select * from test_rollback") 308 | self.assertEqual(data, []) 309 | 310 | def test_cast_bool(self): 311 | client = Client.from_url(self.databend_url) 312 | _, data = client.execute("select 'False'::boolean union select 'True'::boolean") 313 | self.assertEqual(len(data), 2) 314 | 315 | def test_temp_table(self): 316 | client = Client.from_url(self.databend_url) 317 | client.execute("create temp table t1(a int)") 318 | client.execute("insert into t1 values (1)") 319 | _, data = client.execute("select * from t1") 320 | self.assertEqual(data, [(1,)]) 321 | client.execute("drop table t1") 322 | 323 | 324 | if __name__ == "__main__": 325 | unittest.main() 326 | -------------------------------------------------------------------------------- /tests/test_simple.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | 3 | 4 | class Dict(dict): 5 | def __init__(self, **kw): 6 | super().__init__(**kw) 7 | 8 | def __getattr__(self, key): 9 | try: 10 | return self[key] 11 | except KeyError: 12 | raise AttributeError(r"'Dict' object has no attribute '%s'" % key) 13 | 14 | def __setattr__(self, key, value): 15 | self[key] = value 16 | 17 | 18 | class TestDict(unittest.TestCase): 19 | databend_url = None 20 | 21 | @classmethod 22 | def setUpClass(cls): 23 | cls.databend_url = "test_url" 24 | 25 | def test_init(self): 26 | d = Dict(a=1, b="test") 27 | self.assertEqual(self.databend_url, "test_url") 28 | self.assertEqual(d.a, 1) 29 | self.assertEqual(d.b, "test") 30 | self.assertTrue(isinstance(d, dict)) 31 | 32 | def test_key(self): 33 | d = Dict() 34 | d["key"] = "value" 35 | self.assertEqual(d.key, "value") 36 | 37 | 38 | if __name__ == "__main__": 39 | unittest.main() 40 | -------------------------------------------------------------------------------- /uv.lock: -------------------------------------------------------------------------------- 1 | version = 1 2 | requires-python = ">=3.9" 3 | 4 | [[package]] 5 | name = "black" 6 | version = "24.10.0" 7 | source = { registry = "https://pypi.org/simple" } 8 | dependencies = [ 9 | { name = "click" }, 10 | { name = "mypy-extensions" }, 11 | { name = "packaging" }, 12 | { name = "pathspec" }, 13 | { name = "platformdirs" }, 14 | { name = "tomli", marker = "python_full_version < '3.11'" }, 15 | { name = "typing-extensions", marker = "python_full_version < '3.11'" }, 16 | ] 17 | sdist = { url = "https://files.pythonhosted.org/packages/d8/0d/cc2fb42b8c50d80143221515dd7e4766995bd07c56c9a3ed30baf080b6dc/black-24.10.0.tar.gz", hash = "sha256:846ea64c97afe3bc677b761787993be4991810ecc7a4a937816dd6bddedc4875", size = 645813 } 18 | wheels = [ 19 | { url = "https://files.pythonhosted.org/packages/a3/f3/465c0eb5cddf7dbbfe1fecd9b875d1dcf51b88923cd2c1d7e9ab95c6336b/black-24.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e6668650ea4b685440857138e5fe40cde4d652633b1bdffc62933d0db4ed9812", size = 1623211 }, 20 | { url = "https://files.pythonhosted.org/packages/df/57/b6d2da7d200773fdfcc224ffb87052cf283cec4d7102fab450b4a05996d8/black-24.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1c536fcf674217e87b8cc3657b81809d3c085d7bf3ef262ead700da345bfa6ea", size = 1457139 }, 21 | { url = "https://files.pythonhosted.org/packages/6e/c5/9023b7673904a5188f9be81f5e129fff69f51f5515655fbd1d5a4e80a47b/black-24.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:649fff99a20bd06c6f727d2a27f401331dc0cc861fb69cde910fe95b01b5928f", size = 1753774 }, 22 | { url = "https://files.pythonhosted.org/packages/e1/32/df7f18bd0e724e0d9748829765455d6643ec847b3f87e77456fc99d0edab/black-24.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:fe4d6476887de70546212c99ac9bd803d90b42fc4767f058a0baa895013fbb3e", size = 1414209 }, 23 | { url = "https://files.pythonhosted.org/packages/c2/cc/7496bb63a9b06a954d3d0ac9fe7a73f3bf1cd92d7a58877c27f4ad1e9d41/black-24.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5a2221696a8224e335c28816a9d331a6c2ae15a2ee34ec857dcf3e45dbfa99ad", size = 1607468 }, 24 | { url = "https://files.pythonhosted.org/packages/2b/e3/69a738fb5ba18b5422f50b4f143544c664d7da40f09c13969b2fd52900e0/black-24.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f9da3333530dbcecc1be13e69c250ed8dfa67f43c4005fb537bb426e19200d50", size = 1437270 }, 25 | { url = "https://files.pythonhosted.org/packages/c9/9b/2db8045b45844665c720dcfe292fdaf2e49825810c0103e1191515fc101a/black-24.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4007b1393d902b48b36958a216c20c4482f601569d19ed1df294a496eb366392", size = 1737061 }, 26 | { url = "https://files.pythonhosted.org/packages/a3/95/17d4a09a5be5f8c65aa4a361444d95edc45def0de887810f508d3f65db7a/black-24.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:394d4ddc64782e51153eadcaaca95144ac4c35e27ef9b0a42e121ae7e57a9175", size = 1423293 }, 27 | { url = "https://files.pythonhosted.org/packages/90/04/bf74c71f592bcd761610bbf67e23e6a3cff824780761f536512437f1e655/black-24.10.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b5e39e0fae001df40f95bd8cc36b9165c5e2ea88900167bddf258bacef9bbdc3", size = 1644256 }, 28 | { url = "https://files.pythonhosted.org/packages/4c/ea/a77bab4cf1887f4b2e0bce5516ea0b3ff7d04ba96af21d65024629afedb6/black-24.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d37d422772111794b26757c5b55a3eade028aa3fde43121ab7b673d050949d65", size = 1448534 }, 29 | { url = "https://files.pythonhosted.org/packages/4e/3e/443ef8bc1fbda78e61f79157f303893f3fddf19ca3c8989b163eb3469a12/black-24.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:14b3502784f09ce2443830e3133dacf2c0110d45191ed470ecb04d0f5f6fcb0f", size = 1761892 }, 30 | { url = "https://files.pythonhosted.org/packages/52/93/eac95ff229049a6901bc84fec6908a5124b8a0b7c26ea766b3b8a5debd22/black-24.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:30d2c30dc5139211dda799758559d1b049f7f14c580c409d6ad925b74a4208a8", size = 1434796 }, 31 | { url = "https://files.pythonhosted.org/packages/d0/a0/a993f58d4ecfba035e61fca4e9f64a2ecae838fc9f33ab798c62173ed75c/black-24.10.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:1cbacacb19e922a1d75ef2b6ccaefcd6e93a2c05ede32f06a21386a04cedb981", size = 1643986 }, 32 | { url = "https://files.pythonhosted.org/packages/37/d5/602d0ef5dfcace3fb4f79c436762f130abd9ee8d950fa2abdbf8bbc555e0/black-24.10.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1f93102e0c5bb3907451063e08b9876dbeac810e7da5a8bfb7aeb5a9ef89066b", size = 1448085 }, 33 | { url = "https://files.pythonhosted.org/packages/47/6d/a3a239e938960df1a662b93d6230d4f3e9b4a22982d060fc38c42f45a56b/black-24.10.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ddacb691cdcdf77b96f549cf9591701d8db36b2f19519373d60d31746068dbf2", size = 1760928 }, 34 | { url = "https://files.pythonhosted.org/packages/dd/cf/af018e13b0eddfb434df4d9cd1b2b7892bab119f7a20123e93f6910982e8/black-24.10.0-cp313-cp313-win_amd64.whl", hash = "sha256:680359d932801c76d2e9c9068d05c6b107f2584b2a5b88831c83962eb9984c1b", size = 1436875 }, 35 | { url = "https://files.pythonhosted.org/packages/fe/02/f408c804e0ee78c367dcea0a01aedde4f1712af93b8b6e60df981e0228c7/black-24.10.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:17374989640fbca88b6a448129cd1745c5eb8d9547b464f281b251dd00155ccd", size = 1622516 }, 36 | { url = "https://files.pythonhosted.org/packages/f8/b9/9b706ed2f55bfb28b436225a9c57da35990c9005b90b8c91f03924454ad7/black-24.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:63f626344343083322233f175aaf372d326de8436f5928c042639a4afbbf1d3f", size = 1456181 }, 37 | { url = "https://files.pythonhosted.org/packages/0a/1c/314d7f17434a5375682ad097f6f4cc0e3f414f3c95a9b1bb4df14a0f11f9/black-24.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ccfa1d0cb6200857f1923b602f978386a3a2758a65b52e0950299ea014be6800", size = 1752801 }, 38 | { url = "https://files.pythonhosted.org/packages/39/a7/20e5cd9237d28ad0b31438de5d9f01c8b99814576f4c0cda1edd62caf4b0/black-24.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:2cd9c95431d94adc56600710f8813ee27eea544dd118d45896bb734e9d7a0dc7", size = 1413626 }, 39 | { url = "https://files.pythonhosted.org/packages/8d/a7/4b27c50537ebca8bec139b872861f9d2bf501c5ec51fcf897cb924d9e264/black-24.10.0-py3-none-any.whl", hash = "sha256:3bb2b7a1f7b685f85b11fed1ef10f8a9148bceb49853e47a294a3dd963c1dd7d", size = 206898 }, 40 | ] 41 | 42 | [[package]] 43 | name = "certifi" 44 | version = "2024.8.30" 45 | source = { registry = "https://pypi.org/simple" } 46 | sdist = { url = "https://files.pythonhosted.org/packages/b0/ee/9b19140fe824b367c04c5e1b369942dd754c4c5462d5674002f75c4dedc1/certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9", size = 168507 } 47 | wheels = [ 48 | { url = "https://files.pythonhosted.org/packages/12/90/3c9ff0512038035f59d279fddeb79f5f1eccd8859f06d6163c58798b9487/certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8", size = 167321 }, 49 | ] 50 | 51 | [[package]] 52 | name = "charset-normalizer" 53 | version = "3.4.0" 54 | source = { registry = "https://pypi.org/simple" } 55 | sdist = { url = "https://files.pythonhosted.org/packages/f2/4f/e1808dc01273379acc506d18f1504eb2d299bd4131743b9fc54d7be4df1e/charset_normalizer-3.4.0.tar.gz", hash = "sha256:223217c3d4f82c3ac5e29032b3f1c2eb0fb591b72161f86d93f5719079dae93e", size = 106620 } 56 | wheels = [ 57 | { url = "https://files.pythonhosted.org/packages/69/8b/825cc84cf13a28bfbcba7c416ec22bf85a9584971be15b21dd8300c65b7f/charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4f9fc98dad6c2eaa32fc3af1417d95b5e3d08aff968df0cd320066def971f9a6", size = 196363 }, 58 | { url = "https://files.pythonhosted.org/packages/23/81/d7eef6a99e42c77f444fdd7bc894b0ceca6c3a95c51239e74a722039521c/charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0de7b687289d3c1b3e8660d0741874abe7888100efe14bd0f9fd7141bcbda92b", size = 125639 }, 59 | { url = "https://files.pythonhosted.org/packages/21/67/b4564d81f48042f520c948abac7079356e94b30cb8ffb22e747532cf469d/charset_normalizer-3.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5ed2e36c3e9b4f21dd9422f6893dec0abf2cca553af509b10cd630f878d3eb99", size = 120451 }, 60 | { url = "https://files.pythonhosted.org/packages/c2/72/12a7f0943dd71fb5b4e7b55c41327ac0a1663046a868ee4d0d8e9c369b85/charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40d3ff7fc90b98c637bda91c89d51264a3dcf210cade3a2c6f838c7268d7a4ca", size = 140041 }, 61 | { url = "https://files.pythonhosted.org/packages/67/56/fa28c2c3e31217c4c52158537a2cf5d98a6c1e89d31faf476c89391cd16b/charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1110e22af8ca26b90bd6364fe4c763329b0ebf1ee213ba32b68c73de5752323d", size = 150333 }, 62 | { url = "https://files.pythonhosted.org/packages/f9/d2/466a9be1f32d89eb1554cf84073a5ed9262047acee1ab39cbaefc19635d2/charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:86f4e8cca779080f66ff4f191a685ced73d2f72d50216f7112185dc02b90b9b7", size = 142921 }, 63 | { url = "https://files.pythonhosted.org/packages/f8/01/344ec40cf5d85c1da3c1f57566c59e0c9b56bcc5566c08804a95a6cc8257/charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f683ddc7eedd742e2889d2bfb96d69573fde1d92fcb811979cdb7165bb9c7d3", size = 144785 }, 64 | { url = "https://files.pythonhosted.org/packages/73/8b/2102692cb6d7e9f03b9a33a710e0164cadfce312872e3efc7cfe22ed26b4/charset_normalizer-3.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27623ba66c183eca01bf9ff833875b459cad267aeeb044477fedac35e19ba907", size = 146631 }, 65 | { url = "https://files.pythonhosted.org/packages/d8/96/cc2c1b5d994119ce9f088a9a0c3ebd489d360a2eb058e2c8049f27092847/charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f606a1881d2663630ea5b8ce2efe2111740df4b687bd78b34a8131baa007f79b", size = 140867 }, 66 | { url = "https://files.pythonhosted.org/packages/c9/27/cde291783715b8ec30a61c810d0120411844bc4c23b50189b81188b273db/charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0b309d1747110feb25d7ed6b01afdec269c647d382c857ef4663bbe6ad95a912", size = 149273 }, 67 | { url = "https://files.pythonhosted.org/packages/3a/a4/8633b0fc1a2d1834d5393dafecce4a1cc56727bfd82b4dc18fc92f0d3cc3/charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:136815f06a3ae311fae551c3df1f998a1ebd01ddd424aa5603a4336997629e95", size = 152437 }, 68 | { url = "https://files.pythonhosted.org/packages/64/ea/69af161062166b5975ccbb0961fd2384853190c70786f288684490913bf5/charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:14215b71a762336254351b00ec720a8e85cada43b987da5a042e4ce3e82bd68e", size = 150087 }, 69 | { url = "https://files.pythonhosted.org/packages/3b/fd/e60a9d9fd967f4ad5a92810138192f825d77b4fa2a557990fd575a47695b/charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:79983512b108e4a164b9c8d34de3992f76d48cadc9554c9e60b43f308988aabe", size = 145142 }, 70 | { url = "https://files.pythonhosted.org/packages/6d/02/8cb0988a1e49ac9ce2eed1e07b77ff118f2923e9ebd0ede41ba85f2dcb04/charset_normalizer-3.4.0-cp310-cp310-win32.whl", hash = "sha256:c94057af19bc953643a33581844649a7fdab902624d2eb739738a30e2b3e60fc", size = 94701 }, 71 | { url = "https://files.pythonhosted.org/packages/d6/20/f1d4670a8a723c46be695dff449d86d6092916f9e99c53051954ee33a1bc/charset_normalizer-3.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:55f56e2ebd4e3bc50442fbc0888c9d8c94e4e06a933804e2af3e89e2f9c1c749", size = 102191 }, 72 | { url = "https://files.pythonhosted.org/packages/9c/61/73589dcc7a719582bf56aae309b6103d2762b526bffe189d635a7fcfd998/charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0d99dd8ff461990f12d6e42c7347fd9ab2532fb70e9621ba520f9e8637161d7c", size = 193339 }, 73 | { url = "https://files.pythonhosted.org/packages/77/d5/8c982d58144de49f59571f940e329ad6e8615e1e82ef84584c5eeb5e1d72/charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c57516e58fd17d03ebe67e181a4e4e2ccab1168f8c2976c6a334d4f819fe5944", size = 124366 }, 74 | { url = "https://files.pythonhosted.org/packages/bf/19/411a64f01ee971bed3231111b69eb56f9331a769072de479eae7de52296d/charset_normalizer-3.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6dba5d19c4dfab08e58d5b36304b3f92f3bd5d42c1a3fa37b5ba5cdf6dfcbcee", size = 118874 }, 75 | { url = "https://files.pythonhosted.org/packages/4c/92/97509850f0d00e9f14a46bc751daabd0ad7765cff29cdfb66c68b6dad57f/charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf4475b82be41b07cc5e5ff94810e6a01f276e37c2d55571e3fe175e467a1a1c", size = 138243 }, 76 | { url = "https://files.pythonhosted.org/packages/e2/29/d227805bff72ed6d6cb1ce08eec707f7cfbd9868044893617eb331f16295/charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce031db0408e487fd2775d745ce30a7cd2923667cf3b69d48d219f1d8f5ddeb6", size = 148676 }, 77 | { url = "https://files.pythonhosted.org/packages/13/bc/87c2c9f2c144bedfa62f894c3007cd4530ba4b5351acb10dc786428a50f0/charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ff4e7cdfdb1ab5698e675ca622e72d58a6fa2a8aa58195de0c0061288e6e3ea", size = 141289 }, 78 | { url = "https://files.pythonhosted.org/packages/eb/5b/6f10bad0f6461fa272bfbbdf5d0023b5fb9bc6217c92bf068fa5a99820f5/charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3710a9751938947e6327ea9f3ea6332a09bf0ba0c09cae9cb1f250bd1f1549bc", size = 142585 }, 79 | { url = "https://files.pythonhosted.org/packages/3b/a0/a68980ab8a1f45a36d9745d35049c1af57d27255eff8c907e3add84cf68f/charset_normalizer-3.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82357d85de703176b5587dbe6ade8ff67f9f69a41c0733cf2425378b49954de5", size = 144408 }, 80 | { url = "https://files.pythonhosted.org/packages/d7/a1/493919799446464ed0299c8eef3c3fad0daf1c3cd48bff9263c731b0d9e2/charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:47334db71978b23ebcf3c0f9f5ee98b8d65992b65c9c4f2d34c2eaf5bcaf0594", size = 139076 }, 81 | { url = "https://files.pythonhosted.org/packages/fb/9d/9c13753a5a6e0db4a0a6edb1cef7aee39859177b64e1a1e748a6e3ba62c2/charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8ce7fd6767a1cc5a92a639b391891bf1c268b03ec7e021c7d6d902285259685c", size = 146874 }, 82 | { url = "https://files.pythonhosted.org/packages/75/d2/0ab54463d3410709c09266dfb416d032a08f97fd7d60e94b8c6ef54ae14b/charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f1a2f519ae173b5b6a2c9d5fa3116ce16e48b3462c8b96dfdded11055e3d6365", size = 150871 }, 83 | { url = "https://files.pythonhosted.org/packages/8d/c9/27e41d481557be53d51e60750b85aa40eaf52b841946b3cdeff363105737/charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:63bc5c4ae26e4bc6be6469943b8253c0fd4e4186c43ad46e713ea61a0ba49129", size = 148546 }, 84 | { url = "https://files.pythonhosted.org/packages/ee/44/4f62042ca8cdc0cabf87c0fc00ae27cd8b53ab68be3605ba6d071f742ad3/charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bcb4f8ea87d03bc51ad04add8ceaf9b0f085ac045ab4d74e73bbc2dc033f0236", size = 143048 }, 85 | { url = "https://files.pythonhosted.org/packages/01/f8/38842422988b795220eb8038745d27a675ce066e2ada79516c118f291f07/charset_normalizer-3.4.0-cp311-cp311-win32.whl", hash = "sha256:9ae4ef0b3f6b41bad6366fb0ea4fc1d7ed051528e113a60fa2a65a9abb5b1d99", size = 94389 }, 86 | { url = "https://files.pythonhosted.org/packages/0b/6e/b13bd47fa9023b3699e94abf565b5a2f0b0be6e9ddac9812182596ee62e4/charset_normalizer-3.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:cee4373f4d3ad28f1ab6290684d8e2ebdb9e7a1b74fdc39e4c211995f77bec27", size = 101752 }, 87 | { url = "https://files.pythonhosted.org/packages/d3/0b/4b7a70987abf9b8196845806198975b6aab4ce016632f817ad758a5aa056/charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0713f3adb9d03d49d365b70b84775d0a0d18e4ab08d12bc46baa6132ba78aaf6", size = 194445 }, 88 | { url = "https://files.pythonhosted.org/packages/50/89/354cc56cf4dd2449715bc9a0f54f3aef3dc700d2d62d1fa5bbea53b13426/charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:de7376c29d95d6719048c194a9cf1a1b0393fbe8488a22008610b0361d834ecf", size = 125275 }, 89 | { url = "https://files.pythonhosted.org/packages/fa/44/b730e2a2580110ced837ac083d8ad222343c96bb6b66e9e4e706e4d0b6df/charset_normalizer-3.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4a51b48f42d9358460b78725283f04bddaf44a9358197b889657deba38f329db", size = 119020 }, 90 | { url = "https://files.pythonhosted.org/packages/9d/e4/9263b8240ed9472a2ae7ddc3e516e71ef46617fe40eaa51221ccd4ad9a27/charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b295729485b06c1a0683af02a9e42d2caa9db04a373dc38a6a58cdd1e8abddf1", size = 139128 }, 91 | { url = "https://files.pythonhosted.org/packages/6b/e3/9f73e779315a54334240353eaea75854a9a690f3f580e4bd85d977cb2204/charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ee803480535c44e7f5ad00788526da7d85525cfefaf8acf8ab9a310000be4b03", size = 149277 }, 92 | { url = "https://files.pythonhosted.org/packages/1a/cf/f1f50c2f295312edb8a548d3fa56a5c923b146cd3f24114d5adb7e7be558/charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d59d125ffbd6d552765510e3f31ed75ebac2c7470c7274195b9161a32350284", size = 142174 }, 93 | { url = "https://files.pythonhosted.org/packages/16/92/92a76dc2ff3a12e69ba94e7e05168d37d0345fa08c87e1fe24d0c2a42223/charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cda06946eac330cbe6598f77bb54e690b4ca93f593dee1568ad22b04f347c15", size = 143838 }, 94 | { url = "https://files.pythonhosted.org/packages/a4/01/2117ff2b1dfc61695daf2babe4a874bca328489afa85952440b59819e9d7/charset_normalizer-3.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07afec21bbbbf8a5cc3651aa96b980afe2526e7f048fdfb7f1014d84acc8b6d8", size = 146149 }, 95 | { url = "https://files.pythonhosted.org/packages/f6/9b/93a332b8d25b347f6839ca0a61b7f0287b0930216994e8bf67a75d050255/charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6b40e8d38afe634559e398cc32b1472f376a4099c75fe6299ae607e404c033b2", size = 140043 }, 96 | { url = "https://files.pythonhosted.org/packages/ab/f6/7ac4a01adcdecbc7a7587767c776d53d369b8b971382b91211489535acf0/charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b8dcd239c743aa2f9c22ce674a145e0a25cb1566c495928440a181ca1ccf6719", size = 148229 }, 97 | { url = "https://files.pythonhosted.org/packages/9d/be/5708ad18161dee7dc6a0f7e6cf3a88ea6279c3e8484844c0590e50e803ef/charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:84450ba661fb96e9fd67629b93d2941c871ca86fc38d835d19d4225ff946a631", size = 151556 }, 98 | { url = "https://files.pythonhosted.org/packages/5a/bb/3d8bc22bacb9eb89785e83e6723f9888265f3a0de3b9ce724d66bd49884e/charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:44aeb140295a2f0659e113b31cfe92c9061622cadbc9e2a2f7b8ef6b1e29ef4b", size = 149772 }, 99 | { url = "https://files.pythonhosted.org/packages/f7/fa/d3fc622de05a86f30beea5fc4e9ac46aead4731e73fd9055496732bcc0a4/charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1db4e7fefefd0f548d73e2e2e041f9df5c59e178b4c72fbac4cc6f535cfb1565", size = 144800 }, 100 | { url = "https://files.pythonhosted.org/packages/9a/65/bdb9bc496d7d190d725e96816e20e2ae3a6fa42a5cac99c3c3d6ff884118/charset_normalizer-3.4.0-cp312-cp312-win32.whl", hash = "sha256:5726cf76c982532c1863fb64d8c6dd0e4c90b6ece9feb06c9f202417a31f7dd7", size = 94836 }, 101 | { url = "https://files.pythonhosted.org/packages/3e/67/7b72b69d25b89c0b3cea583ee372c43aa24df15f0e0f8d3982c57804984b/charset_normalizer-3.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:b197e7094f232959f8f20541ead1d9862ac5ebea1d58e9849c1bf979255dfac9", size = 102187 }, 102 | { url = "https://files.pythonhosted.org/packages/f3/89/68a4c86f1a0002810a27f12e9a7b22feb198c59b2f05231349fbce5c06f4/charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:dd4eda173a9fcccb5f2e2bd2a9f423d180194b1bf17cf59e3269899235b2a114", size = 194617 }, 103 | { url = "https://files.pythonhosted.org/packages/4f/cd/8947fe425e2ab0aa57aceb7807af13a0e4162cd21eee42ef5b053447edf5/charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e9e3c4c9e1ed40ea53acf11e2a386383c3304212c965773704e4603d589343ed", size = 125310 }, 104 | { url = "https://files.pythonhosted.org/packages/5b/f0/b5263e8668a4ee9becc2b451ed909e9c27058337fda5b8c49588183c267a/charset_normalizer-3.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:92a7e36b000bf022ef3dbb9c46bfe2d52c047d5e3f3343f43204263c5addc250", size = 119126 }, 105 | { url = "https://files.pythonhosted.org/packages/ff/6e/e445afe4f7fda27a533f3234b627b3e515a1b9429bc981c9a5e2aa5d97b6/charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54b6a92d009cbe2fb11054ba694bc9e284dad30a26757b1e372a1fdddaf21920", size = 139342 }, 106 | { url = "https://files.pythonhosted.org/packages/a1/b2/4af9993b532d93270538ad4926c8e37dc29f2111c36f9c629840c57cd9b3/charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ffd9493de4c922f2a38c2bf62b831dcec90ac673ed1ca182fe11b4d8e9f2a64", size = 149383 }, 107 | { url = "https://files.pythonhosted.org/packages/fb/6f/4e78c3b97686b871db9be6f31d64e9264e889f8c9d7ab33c771f847f79b7/charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:35c404d74c2926d0287fbd63ed5d27eb911eb9e4a3bb2c6d294f3cfd4a9e0c23", size = 142214 }, 108 | { url = "https://files.pythonhosted.org/packages/2b/c9/1c8fe3ce05d30c87eff498592c89015b19fade13df42850aafae09e94f35/charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4796efc4faf6b53a18e3d46343535caed491776a22af773f366534056c4e1fbc", size = 144104 }, 109 | { url = "https://files.pythonhosted.org/packages/ee/68/efad5dcb306bf37db7db338338e7bb8ebd8cf38ee5bbd5ceaaaa46f257e6/charset_normalizer-3.4.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7fdd52961feb4c96507aa649550ec2a0d527c086d284749b2f582f2d40a2e0d", size = 146255 }, 110 | { url = "https://files.pythonhosted.org/packages/0c/75/1ed813c3ffd200b1f3e71121c95da3f79e6d2a96120163443b3ad1057505/charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:92db3c28b5b2a273346bebb24857fda45601aef6ae1c011c0a997106581e8a88", size = 140251 }, 111 | { url = "https://files.pythonhosted.org/packages/7d/0d/6f32255c1979653b448d3c709583557a4d24ff97ac4f3a5be156b2e6a210/charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ab973df98fc99ab39080bfb0eb3a925181454d7c3ac8a1e695fddfae696d9e90", size = 148474 }, 112 | { url = "https://files.pythonhosted.org/packages/ac/a0/c1b5298de4670d997101fef95b97ac440e8c8d8b4efa5a4d1ef44af82f0d/charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4b67fdab07fdd3c10bb21edab3cbfe8cf5696f453afce75d815d9d7223fbe88b", size = 151849 }, 113 | { url = "https://files.pythonhosted.org/packages/04/4f/b3961ba0c664989ba63e30595a3ed0875d6790ff26671e2aae2fdc28a399/charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:aa41e526a5d4a9dfcfbab0716c7e8a1b215abd3f3df5a45cf18a12721d31cb5d", size = 149781 }, 114 | { url = "https://files.pythonhosted.org/packages/d8/90/6af4cd042066a4adad58ae25648a12c09c879efa4849c705719ba1b23d8c/charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ffc519621dce0c767e96b9c53f09c5d215578e10b02c285809f76509a3931482", size = 144970 }, 115 | { url = "https://files.pythonhosted.org/packages/cc/67/e5e7e0cbfefc4ca79025238b43cdf8a2037854195b37d6417f3d0895c4c2/charset_normalizer-3.4.0-cp313-cp313-win32.whl", hash = "sha256:f19c1585933c82098c2a520f8ec1227f20e339e33aca8fa6f956f6691b784e67", size = 94973 }, 116 | { url = "https://files.pythonhosted.org/packages/65/97/fc9bbc54ee13d33dc54a7fcf17b26368b18505500fc01e228c27b5222d80/charset_normalizer-3.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:707b82d19e65c9bd28b81dde95249b07bf9f5b90ebe1ef17d9b57473f8a64b7b", size = 102308 }, 117 | { url = "https://files.pythonhosted.org/packages/54/2f/28659eee7f5d003e0f5a3b572765bf76d6e0fe6601ab1f1b1dd4cba7e4f1/charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:980b4f289d1d90ca5efcf07958d3eb38ed9c0b7676bf2831a54d4f66f9c27dfa", size = 196326 }, 118 | { url = "https://files.pythonhosted.org/packages/d1/18/92869d5c0057baa973a3ee2af71573be7b084b3c3d428fe6463ce71167f8/charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f28f891ccd15c514a0981f3b9db9aa23d62fe1a99997512b0491d2ed323d229a", size = 125614 }, 119 | { url = "https://files.pythonhosted.org/packages/d6/27/327904c5a54a7796bb9f36810ec4173d2df5d88b401d2b95ef53111d214e/charset_normalizer-3.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8aacce6e2e1edcb6ac625fb0f8c3a9570ccc7bfba1f63419b3769ccf6a00ed0", size = 120450 }, 120 | { url = "https://files.pythonhosted.org/packages/a4/23/65af317914a0308495133b2d654cf67b11bbd6ca16637c4e8a38f80a5a69/charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd7af3717683bea4c87acd8c0d3d5b44d56120b26fd3f8a692bdd2d5260c620a", size = 140135 }, 121 | { url = "https://files.pythonhosted.org/packages/f2/41/6190102ad521a8aa888519bb014a74251ac4586cde9b38e790901684f9ab/charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ff2ed8194587faf56555927b3aa10e6fb69d931e33953943bc4f837dfee2242", size = 150413 }, 122 | { url = "https://files.pythonhosted.org/packages/7b/ab/f47b0159a69eab9bd915591106859f49670c75f9a19082505ff16f50efc0/charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e91f541a85298cf35433bf66f3fab2a4a2cff05c127eeca4af174f6d497f0d4b", size = 142992 }, 123 | { url = "https://files.pythonhosted.org/packages/28/89/60f51ad71f63aaaa7e51a2a2ad37919985a341a1d267070f212cdf6c2d22/charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:309a7de0a0ff3040acaebb35ec45d18db4b28232f21998851cfa709eeff49d62", size = 144871 }, 124 | { url = "https://files.pythonhosted.org/packages/0c/48/0050550275fea585a6e24460b42465020b53375017d8596c96be57bfabca/charset_normalizer-3.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:285e96d9d53422efc0d7a17c60e59f37fbf3dfa942073f666db4ac71e8d726d0", size = 146756 }, 125 | { url = "https://files.pythonhosted.org/packages/dc/b5/47f8ee91455946f745e6c9ddbb0f8f50314d2416dd922b213e7d5551ad09/charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:5d447056e2ca60382d460a604b6302d8db69476fd2015c81e7c35417cfabe4cd", size = 141034 }, 126 | { url = "https://files.pythonhosted.org/packages/84/79/5c731059ebab43e80bf61fa51666b9b18167974b82004f18c76378ed31a3/charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:20587d20f557fe189b7947d8e7ec5afa110ccf72a3128d61a2a387c3313f46be", size = 149434 }, 127 | { url = "https://files.pythonhosted.org/packages/ca/f3/0719cd09fc4dc42066f239cb3c48ced17fc3316afca3e2a30a4756fe49ab/charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:130272c698667a982a5d0e626851ceff662565379baf0ff2cc58067b81d4f11d", size = 152443 }, 128 | { url = "https://files.pythonhosted.org/packages/f7/0e/c6357297f1157c8e8227ff337e93fd0a90e498e3d6ab96b2782204ecae48/charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:ab22fbd9765e6954bc0bcff24c25ff71dcbfdb185fcdaca49e81bac68fe724d3", size = 150294 }, 129 | { url = "https://files.pythonhosted.org/packages/54/9a/acfa96dc4ea8c928040b15822b59d0863d6e1757fba8bd7de3dc4f761c13/charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7782afc9b6b42200f7362858f9e73b1f8316afb276d316336c0ec3bd73312742", size = 145314 }, 130 | { url = "https://files.pythonhosted.org/packages/73/1c/b10a63032eaebb8d7bcb8544f12f063f41f5f463778ac61da15d9985e8b6/charset_normalizer-3.4.0-cp39-cp39-win32.whl", hash = "sha256:2de62e8801ddfff069cd5c504ce3bc9672b23266597d4e4f50eda28846c322f2", size = 94724 }, 131 | { url = "https://files.pythonhosted.org/packages/c5/77/3a78bf28bfaa0863f9cfef278dbeadf55efe064eafff8c7c424ae3c4c1bf/charset_normalizer-3.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:95c3c157765b031331dd4db3c775e58deaee050a3042fcad72cbc4189d7c8dca", size = 102159 }, 132 | { url = "https://files.pythonhosted.org/packages/bf/9b/08c0432272d77b04803958a4598a51e2a4b51c06640af8b8f0f908c18bf2/charset_normalizer-3.4.0-py3-none-any.whl", hash = "sha256:fe9f97feb71aa9896b81973a7bbada8c49501dc73e58a10fcef6663af95e5079", size = 49446 }, 133 | ] 134 | 135 | [[package]] 136 | name = "click" 137 | version = "8.1.7" 138 | source = { registry = "https://pypi.org/simple" } 139 | dependencies = [ 140 | { name = "colorama", marker = "platform_system == 'Windows'" }, 141 | ] 142 | sdist = { url = "https://files.pythonhosted.org/packages/96/d3/f04c7bfcf5c1862a2a5b845c6b2b360488cf47af55dfa79c98f6a6bf98b5/click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de", size = 336121 } 143 | wheels = [ 144 | { url = "https://files.pythonhosted.org/packages/00/2e/d53fa4befbf2cfa713304affc7ca780ce4fc1fd8710527771b58311a3229/click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28", size = 97941 }, 145 | ] 146 | 147 | [[package]] 148 | name = "colorama" 149 | version = "0.4.6" 150 | source = { registry = "https://pypi.org/simple" } 151 | sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697 } 152 | wheels = [ 153 | { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335 }, 154 | ] 155 | 156 | [[package]] 157 | name = "databend-driver" 158 | version = "0.23.2" 159 | source = { registry = "https://pypi.org/simple" } 160 | wheels = [ 161 | { url = "https://files.pythonhosted.org/packages/0b/1c/a5e8e63e47484ac701c42ef5a7cc2a7e122249fc9f5b3bcc07e97c67c03b/databend_driver-0.23.2-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:58c2df0bbb7e7e46e991f0338a606df483fa5e56ac523fedfde1fa7934bc72f2", size = 5639659 }, 162 | { url = "https://files.pythonhosted.org/packages/75/5a/a6601f10559618b4ef9a62a7e0fb870e6bae313cc508ed8e86d2a4571a35/databend_driver-0.23.2-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:887d0b3ad3d8c999bbbef546dcd7c47c96f6c93e346344b65f449ca0d0177068", size = 5269657 }, 163 | { url = "https://files.pythonhosted.org/packages/df/bb/27d79a2fb52f3d08982502ca743e68da1d5e22fe0f3deafbaadf59ab7b15/databend_driver-0.23.2-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8dc4efa61f72ae82b2c422f194ec87b3eccaf31517d93c8df164980aed15da52", size = 6218513 }, 164 | { url = "https://files.pythonhosted.org/packages/8e/38/b5ca980b1d6c08efe9019408289332ada3a08ce2b8d8fbf166aa42c7de87/databend_driver-0.23.2-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:961a194fe7d7e4f94041fc1c75df71298c7a6eae0d57a039273ed90da996b6dd", size = 5995034 }, 165 | { url = "https://files.pythonhosted.org/packages/32/8b/9da9c9aa61d9d688192d00b20fcc598cb0e768ceff7457a7567fa3130f33/databend_driver-0.23.2-cp37-abi3-win_amd64.whl", hash = "sha256:2de67cf61d804bf665ca0ce3bfc64491e58e505678499008f73e570a92cab37b", size = 5090985 }, 166 | ] 167 | 168 | [[package]] 169 | name = "databend-py" 170 | version = "0.1.0" 171 | source = { editable = "." } 172 | dependencies = [ 173 | { name = "black" }, 174 | { name = "databend-driver" }, 175 | { name = "environs" }, 176 | { name = "pytz" }, 177 | { name = "requests" }, 178 | { name = "setuptools" }, 179 | ] 180 | 181 | [package.dev-dependencies] 182 | dev = [ 183 | { name = "pytest" }, 184 | { name = "requests" }, 185 | { name = "ruff" }, 186 | ] 187 | 188 | [package.metadata] 189 | requires-dist = [ 190 | { name = "black", specifier = ">=24.10.0" }, 191 | { name = "databend-driver", specifier = ">=0.23.2" }, 192 | { name = "environs", specifier = ">=11.2.1" }, 193 | { name = "pytz", specifier = ">=2024.2" }, 194 | { name = "requests", specifier = ">=2.32.3" }, 195 | { name = "setuptools", specifier = ">=75.6.0" }, 196 | ] 197 | 198 | [package.metadata.requires-dev] 199 | dev = [ 200 | { name = "pytest", specifier = ">=8.3.4" }, 201 | { name = "requests", specifier = ">=2.32.3" }, 202 | { name = "ruff", specifier = ">=0.8.2" }, 203 | ] 204 | 205 | [[package]] 206 | name = "environs" 207 | version = "11.2.1" 208 | source = { registry = "https://pypi.org/simple" } 209 | dependencies = [ 210 | { name = "marshmallow" }, 211 | { name = "python-dotenv" }, 212 | ] 213 | sdist = { url = "https://files.pythonhosted.org/packages/77/08/2b7d9cacf2b27482c9218ee6762336aa47bdb9d07ee26a136d072a328297/environs-11.2.1.tar.gz", hash = "sha256:e068ae3174cef52ba4b95ead22e639056a02465f616e62323e04ae08e86a75a4", size = 27485 } 214 | wheels = [ 215 | { url = "https://files.pythonhosted.org/packages/1a/21/1e0d8de234e9d0c675ea8fd50f9e7ad66fae32c207bc982f1d14f7c0835b/environs-11.2.1-py3-none-any.whl", hash = "sha256:9d2080cf25807a26fc0d4301e2d7b62c64fbf547540f21e3a30cc02bc5fbe948", size = 12923 }, 216 | ] 217 | 218 | [[package]] 219 | name = "exceptiongroup" 220 | version = "1.2.2" 221 | source = { registry = "https://pypi.org/simple" } 222 | sdist = { url = "https://files.pythonhosted.org/packages/09/35/2495c4ac46b980e4ca1f6ad6db102322ef3ad2410b79fdde159a4b0f3b92/exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc", size = 28883 } 223 | wheels = [ 224 | { url = "https://files.pythonhosted.org/packages/02/cc/b7e31358aac6ed1ef2bb790a9746ac2c69bcb3c8588b41616914eb106eaf/exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b", size = 16453 }, 225 | ] 226 | 227 | [[package]] 228 | name = "idna" 229 | version = "3.10" 230 | source = { registry = "https://pypi.org/simple" } 231 | sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490 } 232 | wheels = [ 233 | { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442 }, 234 | ] 235 | 236 | [[package]] 237 | name = "iniconfig" 238 | version = "2.0.0" 239 | source = { registry = "https://pypi.org/simple" } 240 | sdist = { url = "https://files.pythonhosted.org/packages/d7/4b/cbd8e699e64a6f16ca3a8220661b5f83792b3017d0f79807cb8708d33913/iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3", size = 4646 } 241 | wheels = [ 242 | { url = "https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374", size = 5892 }, 243 | ] 244 | 245 | [[package]] 246 | name = "marshmallow" 247 | version = "3.23.1" 248 | source = { registry = "https://pypi.org/simple" } 249 | dependencies = [ 250 | { name = "packaging" }, 251 | ] 252 | sdist = { url = "https://files.pythonhosted.org/packages/6d/30/14d8609f65c8aeddddd3181c06d2c9582da6278f063b27c910bbf9903441/marshmallow-3.23.1.tar.gz", hash = "sha256:3a8dfda6edd8dcdbf216c0ede1d1e78d230a6dc9c5a088f58c4083b974a0d468", size = 177488 } 253 | wheels = [ 254 | { url = "https://files.pythonhosted.org/packages/ac/a7/a78ff54e67ef92a3d12126b98eb98ab8abab3de4a8c46d240c87e514d6bb/marshmallow-3.23.1-py3-none-any.whl", hash = "sha256:fece2eb2c941180ea1b7fcbd4a83c51bfdd50093fdd3ad2585ee5e1df2508491", size = 49488 }, 255 | ] 256 | 257 | [[package]] 258 | name = "mypy-extensions" 259 | version = "1.0.0" 260 | source = { registry = "https://pypi.org/simple" } 261 | sdist = { url = "https://files.pythonhosted.org/packages/98/a4/1ab47638b92648243faf97a5aeb6ea83059cc3624972ab6b8d2316078d3f/mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782", size = 4433 } 262 | wheels = [ 263 | { url = "https://files.pythonhosted.org/packages/2a/e2/5d3f6ada4297caebe1a2add3b126fe800c96f56dbe5d1988a2cbe0b267aa/mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d", size = 4695 }, 264 | ] 265 | 266 | [[package]] 267 | name = "packaging" 268 | version = "24.2" 269 | source = { registry = "https://pypi.org/simple" } 270 | sdist = { url = "https://files.pythonhosted.org/packages/d0/63/68dbb6eb2de9cb10ee4c9c14a0148804425e13c4fb20d61cce69f53106da/packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f", size = 163950 } 271 | wheels = [ 272 | { url = "https://files.pythonhosted.org/packages/88/ef/eb23f262cca3c0c4eb7ab1933c3b1f03d021f2c48f54763065b6f0e321be/packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759", size = 65451 }, 273 | ] 274 | 275 | [[package]] 276 | name = "pathspec" 277 | version = "0.12.1" 278 | source = { registry = "https://pypi.org/simple" } 279 | sdist = { url = "https://files.pythonhosted.org/packages/ca/bc/f35b8446f4531a7cb215605d100cd88b7ac6f44ab3fc94870c120ab3adbf/pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712", size = 51043 } 280 | wheels = [ 281 | { url = "https://files.pythonhosted.org/packages/cc/20/ff623b09d963f88bfde16306a54e12ee5ea43e9b597108672ff3a408aad6/pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08", size = 31191 }, 282 | ] 283 | 284 | [[package]] 285 | name = "platformdirs" 286 | version = "4.3.6" 287 | source = { registry = "https://pypi.org/simple" } 288 | sdist = { url = "https://files.pythonhosted.org/packages/13/fc/128cc9cb8f03208bdbf93d3aa862e16d376844a14f9a0ce5cf4507372de4/platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907", size = 21302 } 289 | wheels = [ 290 | { url = "https://files.pythonhosted.org/packages/3c/a6/bc1012356d8ece4d66dd75c4b9fc6c1f6650ddd5991e421177d9f8f671be/platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb", size = 18439 }, 291 | ] 292 | 293 | [[package]] 294 | name = "pluggy" 295 | version = "1.5.0" 296 | source = { registry = "https://pypi.org/simple" } 297 | sdist = { url = "https://files.pythonhosted.org/packages/96/2d/02d4312c973c6050a18b314a5ad0b3210edb65a906f868e31c111dede4a6/pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1", size = 67955 } 298 | wheels = [ 299 | { url = "https://files.pythonhosted.org/packages/88/5f/e351af9a41f866ac3f1fac4ca0613908d9a41741cfcf2228f4ad853b697d/pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669", size = 20556 }, 300 | ] 301 | 302 | [[package]] 303 | name = "pytest" 304 | version = "8.3.4" 305 | source = { registry = "https://pypi.org/simple" } 306 | dependencies = [ 307 | { name = "colorama", marker = "sys_platform == 'win32'" }, 308 | { name = "exceptiongroup", marker = "python_full_version < '3.11'" }, 309 | { name = "iniconfig" }, 310 | { name = "packaging" }, 311 | { name = "pluggy" }, 312 | { name = "tomli", marker = "python_full_version < '3.11'" }, 313 | ] 314 | sdist = { url = "https://files.pythonhosted.org/packages/05/35/30e0d83068951d90a01852cb1cef56e5d8a09d20c7f511634cc2f7e0372a/pytest-8.3.4.tar.gz", hash = "sha256:965370d062bce11e73868e0335abac31b4d3de0e82f4007408d242b4f8610761", size = 1445919 } 315 | wheels = [ 316 | { url = "https://files.pythonhosted.org/packages/11/92/76a1c94d3afee238333bc0a42b82935dd8f9cf8ce9e336ff87ee14d9e1cf/pytest-8.3.4-py3-none-any.whl", hash = "sha256:50e16d954148559c9a74109af1eaf0c945ba2d8f30f0a3d3335edde19788b6f6", size = 343083 }, 317 | ] 318 | 319 | [[package]] 320 | name = "python-dotenv" 321 | version = "1.0.1" 322 | source = { registry = "https://pypi.org/simple" } 323 | sdist = { url = "https://files.pythonhosted.org/packages/bc/57/e84d88dfe0aec03b7a2d4327012c1627ab5f03652216c63d49846d7a6c58/python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca", size = 39115 } 324 | wheels = [ 325 | { url = "https://files.pythonhosted.org/packages/6a/3e/b68c118422ec867fa7ab88444e1274aa40681c606d59ac27de5a5588f082/python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a", size = 19863 }, 326 | ] 327 | 328 | [[package]] 329 | name = "pytz" 330 | version = "2024.2" 331 | source = { registry = "https://pypi.org/simple" } 332 | sdist = { url = "https://files.pythonhosted.org/packages/3a/31/3c70bf7603cc2dca0f19bdc53b4537a797747a58875b552c8c413d963a3f/pytz-2024.2.tar.gz", hash = "sha256:2aa355083c50a0f93fa581709deac0c9ad65cca8a9e9beac660adcbd493c798a", size = 319692 } 333 | wheels = [ 334 | { url = "https://files.pythonhosted.org/packages/11/c3/005fcca25ce078d2cc29fd559379817424e94885510568bc1bc53d7d5846/pytz-2024.2-py2.py3-none-any.whl", hash = "sha256:31c7c1817eb7fae7ca4b8c7ee50c72f93aa2dd863de768e1ef4245d426aa0725", size = 508002 }, 335 | ] 336 | 337 | [[package]] 338 | name = "requests" 339 | version = "2.32.3" 340 | source = { registry = "https://pypi.org/simple" } 341 | dependencies = [ 342 | { name = "certifi" }, 343 | { name = "charset-normalizer" }, 344 | { name = "idna" }, 345 | { name = "urllib3" }, 346 | ] 347 | sdist = { url = "https://files.pythonhosted.org/packages/63/70/2bf7780ad2d390a8d301ad0b550f1581eadbd9a20f896afe06353c2a2913/requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760", size = 131218 } 348 | wheels = [ 349 | { url = "https://files.pythonhosted.org/packages/f9/9b/335f9764261e915ed497fcdeb11df5dfd6f7bf257d4a6a2a686d80da4d54/requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6", size = 64928 }, 350 | ] 351 | 352 | [[package]] 353 | name = "ruff" 354 | version = "0.8.2" 355 | source = { registry = "https://pypi.org/simple" } 356 | sdist = { url = "https://files.pythonhosted.org/packages/5e/2b/01245f4f3a727d60bebeacd7ee6d22586c7f62380a2597ddb22c2f45d018/ruff-0.8.2.tar.gz", hash = "sha256:b84f4f414dda8ac7f75075c1fa0b905ac0ff25361f42e6d5da681a465e0f78e5", size = 3349020 } 357 | wheels = [ 358 | { url = "https://files.pythonhosted.org/packages/91/29/366be70216dba1731a00a41f2f030822b0c96c7c4f3b2c0cdce15cbace74/ruff-0.8.2-py3-none-linux_armv6l.whl", hash = "sha256:c49ab4da37e7c457105aadfd2725e24305ff9bc908487a9bf8d548c6dad8bb3d", size = 10530649 }, 359 | { url = "https://files.pythonhosted.org/packages/63/82/a733956540bb388f00df5a3e6a02467b16c0e529132625fe44ce4c5fb9c7/ruff-0.8.2-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:ec016beb69ac16be416c435828be702ee694c0d722505f9c1f35e1b9c0cc1bf5", size = 10274069 }, 360 | { url = "https://files.pythonhosted.org/packages/3d/12/0b3aa14d1d71546c988a28e1b412981c1b80c8a1072e977a2f30c595cc4a/ruff-0.8.2-py3-none-macosx_11_0_arm64.whl", hash = "sha256:f05cdf8d050b30e2ba55c9b09330b51f9f97d36d4673213679b965d25a785f3c", size = 9909400 }, 361 | { url = "https://files.pythonhosted.org/packages/23/08/f9f08cefb7921784c891c4151cce6ed357ff49e84b84978440cffbc87408/ruff-0.8.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:60f578c11feb1d3d257b2fb043ddb47501ab4816e7e221fbb0077f0d5d4e7b6f", size = 10766782 }, 362 | { url = "https://files.pythonhosted.org/packages/e4/71/bf50c321ec179aa420c8ec40adac5ae9cc408d4d37283a485b19a2331ceb/ruff-0.8.2-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:cbd5cf9b0ae8f30eebc7b360171bd50f59ab29d39f06a670b3e4501a36ba5897", size = 10286316 }, 363 | { url = "https://files.pythonhosted.org/packages/f2/83/c82688a2a6117539aea0ce63fdf6c08e60fe0202779361223bcd7f40bd74/ruff-0.8.2-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b402ddee3d777683de60ff76da801fa7e5e8a71038f57ee53e903afbcefdaa58", size = 11338270 }, 364 | { url = "https://files.pythonhosted.org/packages/7f/d7/bc6a45e5a22e627640388e703160afb1d77c572b1d0fda8b4349f334fc66/ruff-0.8.2-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:705832cd7d85605cb7858d8a13d75993c8f3ef1397b0831289109e953d833d29", size = 12058579 }, 365 | { url = "https://files.pythonhosted.org/packages/da/3b/64150c93946ec851e6f1707ff586bb460ca671581380c919698d6a9267dc/ruff-0.8.2-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:32096b41aaf7a5cc095fa45b4167b890e4c8d3fd217603f3634c92a541de7248", size = 11615172 }, 366 | { url = "https://files.pythonhosted.org/packages/e4/9e/cf12b697ea83cfe92ec4509ae414dc4c9b38179cc681a497031f0d0d9a8e/ruff-0.8.2-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e769083da9439508833cfc7c23e351e1809e67f47c50248250ce1ac52c21fb93", size = 12882398 }, 367 | { url = "https://files.pythonhosted.org/packages/a9/27/96d10863accf76a9c97baceac30b0a52d917eb985a8ac058bd4636aeede0/ruff-0.8.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5fe716592ae8a376c2673fdfc1f5c0c193a6d0411f90a496863c99cd9e2ae25d", size = 11176094 }, 368 | { url = "https://files.pythonhosted.org/packages/eb/10/cd2fd77d4a4e7f03c29351be0f53278a393186b540b99df68beb5304fddd/ruff-0.8.2-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:81c148825277e737493242b44c5388a300584d73d5774defa9245aaef55448b0", size = 10771884 }, 369 | { url = "https://files.pythonhosted.org/packages/71/5d/beabb2ff18870fc4add05fa3a69a4cb1b1d2d6f83f3cf3ae5ab0d52f455d/ruff-0.8.2-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:d261d7850c8367704874847d95febc698a950bf061c9475d4a8b7689adc4f7fa", size = 10382535 }, 370 | { url = "https://files.pythonhosted.org/packages/ae/29/6b3fdf3ad3e35b28d87c25a9ff4c8222ad72485ab783936b2b267250d7a7/ruff-0.8.2-py3-none-musllinux_1_2_i686.whl", hash = "sha256:1ca4e3a87496dc07d2427b7dd7ffa88a1e597c28dad65ae6433ecb9f2e4f022f", size = 10886995 }, 371 | { url = "https://files.pythonhosted.org/packages/e9/dc/859d889b4d9356a1a2cdbc1e4a0dda94052bc5b5300098647e51a58c430b/ruff-0.8.2-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:729850feed82ef2440aa27946ab39c18cb4a8889c1128a6d589ffa028ddcfc22", size = 11220750 }, 372 | { url = "https://files.pythonhosted.org/packages/0b/08/e8f519f61f1d624264bfd6b8829e4c5f31c3c61193bc3cff1f19dbe7626a/ruff-0.8.2-py3-none-win32.whl", hash = "sha256:ac42caaa0411d6a7d9594363294416e0e48fc1279e1b0e948391695db2b3d5b1", size = 8729396 }, 373 | { url = "https://files.pythonhosted.org/packages/f8/d4/ba1c7ab72aba37a2b71fe48ab95b80546dbad7a7f35ea28cf66fc5cea5f6/ruff-0.8.2-py3-none-win_amd64.whl", hash = "sha256:2aae99ec70abf43372612a838d97bfe77d45146254568d94926e8ed5bbb409ea", size = 9594729 }, 374 | { url = "https://files.pythonhosted.org/packages/23/34/db20e12d3db11b8a2a8874258f0f6d96a9a4d631659d54575840557164c8/ruff-0.8.2-py3-none-win_arm64.whl", hash = "sha256:fb88e2a506b70cfbc2de6fae6681c4f944f7dd5f2fe87233a7233d888bad73e8", size = 9035131 }, 375 | ] 376 | 377 | [[package]] 378 | name = "setuptools" 379 | version = "75.6.0" 380 | source = { registry = "https://pypi.org/simple" } 381 | sdist = { url = "https://files.pythonhosted.org/packages/43/54/292f26c208734e9a7f067aea4a7e282c080750c4546559b58e2e45413ca0/setuptools-75.6.0.tar.gz", hash = "sha256:8199222558df7c86216af4f84c30e9b34a61d8ba19366cc914424cdbd28252f6", size = 1337429 } 382 | wheels = [ 383 | { url = "https://files.pythonhosted.org/packages/55/21/47d163f615df1d30c094f6c8bbb353619274edccf0327b185cc2493c2c33/setuptools-75.6.0-py3-none-any.whl", hash = "sha256:ce74b49e8f7110f9bf04883b730f4765b774ef3ef28f722cce7c273d253aaf7d", size = 1224032 }, 384 | ] 385 | 386 | [[package]] 387 | name = "tomli" 388 | version = "2.2.1" 389 | source = { registry = "https://pypi.org/simple" } 390 | sdist = { url = "https://files.pythonhosted.org/packages/18/87/302344fed471e44a87289cf4967697d07e532f2421fdaf868a303cbae4ff/tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff", size = 17175 } 391 | wheels = [ 392 | { url = "https://files.pythonhosted.org/packages/43/ca/75707e6efa2b37c77dadb324ae7d9571cb424e61ea73fad7c56c2d14527f/tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249", size = 131077 }, 393 | { url = "https://files.pythonhosted.org/packages/c7/16/51ae563a8615d472fdbffc43a3f3d46588c264ac4f024f63f01283becfbb/tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6", size = 123429 }, 394 | { url = "https://files.pythonhosted.org/packages/f1/dd/4f6cd1e7b160041db83c694abc78e100473c15d54620083dbd5aae7b990e/tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a", size = 226067 }, 395 | { url = "https://files.pythonhosted.org/packages/a9/6b/c54ede5dc70d648cc6361eaf429304b02f2871a345bbdd51e993d6cdf550/tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee", size = 236030 }, 396 | { url = "https://files.pythonhosted.org/packages/1f/47/999514fa49cfaf7a92c805a86c3c43f4215621855d151b61c602abb38091/tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e", size = 240898 }, 397 | { url = "https://files.pythonhosted.org/packages/73/41/0a01279a7ae09ee1573b423318e7934674ce06eb33f50936655071d81a24/tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4", size = 229894 }, 398 | { url = "https://files.pythonhosted.org/packages/55/18/5d8bc5b0a0362311ce4d18830a5d28943667599a60d20118074ea1b01bb7/tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106", size = 245319 }, 399 | { url = "https://files.pythonhosted.org/packages/92/a3/7ade0576d17f3cdf5ff44d61390d4b3febb8a9fc2b480c75c47ea048c646/tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8", size = 238273 }, 400 | { url = "https://files.pythonhosted.org/packages/72/6f/fa64ef058ac1446a1e51110c375339b3ec6be245af9d14c87c4a6412dd32/tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff", size = 98310 }, 401 | { url = "https://files.pythonhosted.org/packages/6a/1c/4a2dcde4a51b81be3530565e92eda625d94dafb46dbeb15069df4caffc34/tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b", size = 108309 }, 402 | { url = "https://files.pythonhosted.org/packages/52/e1/f8af4c2fcde17500422858155aeb0d7e93477a0d59a98e56cbfe75070fd0/tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea", size = 132762 }, 403 | { url = "https://files.pythonhosted.org/packages/03/b8/152c68bb84fc00396b83e7bbddd5ec0bd3dd409db4195e2a9b3e398ad2e3/tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8", size = 123453 }, 404 | { url = "https://files.pythonhosted.org/packages/c8/d6/fc9267af9166f79ac528ff7e8c55c8181ded34eb4b0e93daa767b8841573/tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192", size = 233486 }, 405 | { url = "https://files.pythonhosted.org/packages/5c/51/51c3f2884d7bab89af25f678447ea7d297b53b5a3b5730a7cb2ef6069f07/tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222", size = 242349 }, 406 | { url = "https://files.pythonhosted.org/packages/ab/df/bfa89627d13a5cc22402e441e8a931ef2108403db390ff3345c05253935e/tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77", size = 252159 }, 407 | { url = "https://files.pythonhosted.org/packages/9e/6e/fa2b916dced65763a5168c6ccb91066f7639bdc88b48adda990db10c8c0b/tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6", size = 237243 }, 408 | { url = "https://files.pythonhosted.org/packages/b4/04/885d3b1f650e1153cbb93a6a9782c58a972b94ea4483ae4ac5cedd5e4a09/tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd", size = 259645 }, 409 | { url = "https://files.pythonhosted.org/packages/9c/de/6b432d66e986e501586da298e28ebeefd3edc2c780f3ad73d22566034239/tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e", size = 244584 }, 410 | { url = "https://files.pythonhosted.org/packages/1c/9a/47c0449b98e6e7d1be6cbac02f93dd79003234ddc4aaab6ba07a9a7482e2/tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98", size = 98875 }, 411 | { url = "https://files.pythonhosted.org/packages/ef/60/9b9638f081c6f1261e2688bd487625cd1e660d0a85bd469e91d8db969734/tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4", size = 109418 }, 412 | { url = "https://files.pythonhosted.org/packages/04/90/2ee5f2e0362cb8a0b6499dc44f4d7d48f8fff06d28ba46e6f1eaa61a1388/tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7", size = 132708 }, 413 | { url = "https://files.pythonhosted.org/packages/c0/ec/46b4108816de6b385141f082ba99e315501ccd0a2ea23db4a100dd3990ea/tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c", size = 123582 }, 414 | { url = "https://files.pythonhosted.org/packages/a0/bd/b470466d0137b37b68d24556c38a0cc819e8febe392d5b199dcd7f578365/tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13", size = 232543 }, 415 | { url = "https://files.pythonhosted.org/packages/d9/e5/82e80ff3b751373f7cead2815bcbe2d51c895b3c990686741a8e56ec42ab/tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281", size = 241691 }, 416 | { url = "https://files.pythonhosted.org/packages/05/7e/2a110bc2713557d6a1bfb06af23dd01e7dde52b6ee7dadc589868f9abfac/tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272", size = 251170 }, 417 | { url = "https://files.pythonhosted.org/packages/64/7b/22d713946efe00e0adbcdfd6d1aa119ae03fd0b60ebed51ebb3fa9f5a2e5/tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140", size = 236530 }, 418 | { url = "https://files.pythonhosted.org/packages/38/31/3a76f67da4b0cf37b742ca76beaf819dca0ebef26d78fc794a576e08accf/tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2", size = 258666 }, 419 | { url = "https://files.pythonhosted.org/packages/07/10/5af1293da642aded87e8a988753945d0cf7e00a9452d3911dd3bb354c9e2/tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744", size = 243954 }, 420 | { url = "https://files.pythonhosted.org/packages/5b/b9/1ed31d167be802da0fc95020d04cd27b7d7065cc6fbefdd2f9186f60d7bd/tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec", size = 98724 }, 421 | { url = "https://files.pythonhosted.org/packages/c7/32/b0963458706accd9afcfeb867c0f9175a741bf7b19cd424230714d722198/tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69", size = 109383 }, 422 | { url = "https://files.pythonhosted.org/packages/6e/c2/61d3e0f47e2b74ef40a68b9e6ad5984f6241a942f7cd3bbfbdbd03861ea9/tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc", size = 14257 }, 423 | ] 424 | 425 | [[package]] 426 | name = "typing-extensions" 427 | version = "4.12.2" 428 | source = { registry = "https://pypi.org/simple" } 429 | sdist = { url = "https://files.pythonhosted.org/packages/df/db/f35a00659bc03fec321ba8bce9420de607a1d37f8342eee1863174c69557/typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8", size = 85321 } 430 | wheels = [ 431 | { url = "https://files.pythonhosted.org/packages/26/9f/ad63fc0248c5379346306f8668cda6e2e2e9c95e01216d2b8ffd9ff037d0/typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d", size = 37438 }, 432 | ] 433 | 434 | [[package]] 435 | name = "urllib3" 436 | version = "2.2.3" 437 | source = { registry = "https://pypi.org/simple" } 438 | sdist = { url = "https://files.pythonhosted.org/packages/ed/63/22ba4ebfe7430b76388e7cd448d5478814d3032121827c12a2cc287e2260/urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9", size = 300677 } 439 | wheels = [ 440 | { url = "https://files.pythonhosted.org/packages/ce/d9/5f4c13cecde62396b0d3fe530a50ccea91e7dfc1ccf0e09c228841bb5ba8/urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac", size = 126338 }, 441 | ] 442 | --------------------------------------------------------------------------------