├── .dockerignore ├── .github └── workflows │ ├── ci.yml │ └── pypi.yml ├── .gitignore ├── CONTRIBUTING.rst ├── LICENSE ├── MANIFEST.in ├── Makefile ├── README.rst ├── asyncodbc ├── __init__.py ├── connection.py ├── cursor.py ├── log.py ├── pool.py └── utils.py ├── conftest.py ├── docs ├── Makefile ├── conf.py ├── contributing.rst ├── examples.rst ├── glossary.rst ├── index.rst ├── make.bat └── tuning.rst ├── examples ├── example_complex_queries.py ├── example_context_managers.py ├── example_pool.py └── example_simple.py ├── poetry.lock ├── pyproject.toml └── tests ├── test_connection.py ├── test_cursor.py ├── test_pool.py └── test_slow.py /.dockerignore: -------------------------------------------------------------------------------- 1 | venv* 2 | virtualenv* 3 | *.log 4 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: ci 2 | on: 3 | push: 4 | branches-ignore: 5 | - master 6 | pull_request: 7 | branches-ignore: 8 | - master 9 | jobs: 10 | test: 11 | runs-on: ubuntu-latest 12 | services: 13 | mssql: 14 | image: mcr.microsoft.com/mssql/server:2019-CU15-ubuntu-20.04 15 | ports: 16 | - 1433:1433 17 | env: 18 | ACCEPT_EULA: Y 19 | SA_PASSWORD: Abcd12345678 20 | options: >- 21 | --health-cmd "/opt/mssql-tools/bin/sqlcmd -U sa -P Abcd12345678 -Q 'select 1' -b -o /dev/null" 22 | --health-interval 10s 23 | --health-timeout 5s 24 | --health-retries 5 25 | env: 26 | TEST_MSSQL_PASS: Abcd12345678 27 | strategy: 28 | matrix: 29 | python-version: [ "3.8", "3.9", "3.10" ] 30 | steps: 31 | - uses: actions/cache@v2 32 | with: 33 | path: ~/.cache/pip 34 | key: ${{ runner.os }}-pip-${{ hashFiles('**/poetry.lock') }} 35 | restore-keys: | 36 | ${{ runner.os }}-pip- 37 | - uses: actions/checkout@v2 38 | - uses: actions/setup-python@v2 39 | with: 40 | python-version: ${{ matrix.python-version }} 41 | - name: Install and configure Poetry 42 | run: | 43 | pip install -U pip poetry 44 | poetry config virtualenvs.create false 45 | - name: Install requirements 46 | run: make deps 47 | - name: Install ODBC driver 48 | run: | 49 | sudo curl https://packages.microsoft.com/keys/microsoft.asc | sudo apt-key add - 50 | sudo curl https://packages.microsoft.com/config/ubuntu/$(lsb_release -rs)/prod.list -o /etc/apt/sources.list.d/mssql-release.list 51 | sudo apt-get update 52 | ACCEPT_EULA=Y sudo apt-get install -y msodbcsql18 53 | - name: Run ci 54 | run: make ci 55 | - name: Upload Coverage 56 | run: coveralls --service=github 57 | env: 58 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 59 | COVERALLS_FLAG_NAME: ${{ matrix.python-version }} 60 | COVERALLS_PARALLEL: true 61 | 62 | coveralls: 63 | name: Finish Coveralls 64 | needs: test 65 | runs-on: ubuntu-latest 66 | container: python:3-slim 67 | steps: 68 | - name: Finished 69 | run: | 70 | pip3 install --upgrade coveralls 71 | coveralls --finish 72 | env: 73 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 74 | -------------------------------------------------------------------------------- /.github/workflows/pypi.yml: -------------------------------------------------------------------------------- 1 | name: pypi 2 | on: 3 | release: 4 | types: 5 | - created 6 | jobs: 7 | publish: 8 | runs-on: ubuntu-latest 9 | steps: 10 | - uses: actions/checkout@v2 11 | - uses: actions/setup-python@v1 12 | with: 13 | python-version: '3.x' 14 | - name: Install and configure Poetry 15 | run: | 16 | pip install -U pip poetry 17 | poetry config virtualenvs.create false 18 | - name: Build dists 19 | run: make build 20 | - name: Pypi Publish 21 | uses: pypa/gh-action-pypi-publish@master 22 | with: 23 | user: __token__ 24 | password: ${{ secrets.pypi_password }} 25 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | ># Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | 5 | # C extensions 6 | *.so 7 | 8 | # Distribution / packaging 9 | .Python 10 | env/ 11 | pyvenv/ 12 | build/ 13 | develop-eggs/ 14 | dist/ 15 | downloads/ 16 | eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | *.egg-info/ 23 | .installed.cfg 24 | *.egg 25 | 26 | # PyInstaller 27 | # Usually these files are written by a python script from a template 28 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 29 | *.manifest 30 | *.spec 31 | 32 | # Installer logs 33 | pip-log.txt 34 | pip-delete-this-directory.txt 35 | 36 | # Unit test / coverage reports 37 | htmlcov/ 38 | .tox/ 39 | .coverage 40 | .cache 41 | nosetests.xml 42 | coverage.xml 43 | cover 44 | 45 | # Translations 46 | *.mo 47 | *.pot 48 | 49 | # Django stuff: 50 | *.log 51 | 52 | # Sphinx documentation 53 | docs/_build/ 54 | 55 | # PyBuilder 56 | target/ 57 | 58 | # PyCharm 59 | .idea 60 | *.iml 61 | # rope 62 | *.swp 63 | .ropeproject 64 | 65 | # Project 66 | tags 67 | ci/asyncodbc 68 | sqlite.db 69 | 70 | # virtual envs 71 | venv*/ 72 | virtualenv*/ 73 | -------------------------------------------------------------------------------- /CONTRIBUTING.rst: -------------------------------------------------------------------------------- 1 | Contributing 2 | ============ 3 | 4 | Thanks for your interest in contributing to ``asyncodbc``, there are multiple 5 | ways and places you can contribute. 6 | 7 | Reporting an Issue 8 | ------------------ 9 | If you have found issue with `asyncodbc` please do 10 | not hesitate to file an issue on the GitHub_ project. When filing your 11 | issue please make sure you can express the issue with a reproducible test 12 | case. 13 | 14 | When reporting an issue we also need as much information about your environment 15 | that you can include. We never know what information will be pertinent when 16 | trying narrow down the issue. Please include at least the following 17 | information: 18 | 19 | * Version of `asyncodbc` and `python`. 20 | * Version of your ODBC database 21 | * Version of database ODBC driver 22 | * Version of unixODBC_ 23 | * Platform you're running on (OS X, Linux, Windows). 24 | 25 | 26 | Instructions for contributors 27 | ----------------------------- 28 | 29 | 30 | In order to make a clone of the GitHub_ repo: open the link and press the 31 | "Fork" button on the upper-right menu of the web page. 32 | 33 | I hope everybody knows how to work with git and github nowadays :) 34 | 35 | Work flow is pretty straightforward: 36 | 37 | 1. Clone the GitHub_ repo 38 | 39 | 2. Make a change 40 | 41 | 3. Make sure all tests passed 42 | 43 | 4. Commit changes to own asyncodbc clone 44 | 45 | 5. Make pull request from github page for your clone 46 | 47 | Preconditions for running asyncodbc test suite 48 | --------------------------------------------- 49 | 50 | We expect you to use a python virtual environment and docker_ to run 51 | our tests. 52 | 53 | There are several ways to make a virtual environment. 54 | 55 | If you like to use *virtualenv* please run:: 56 | 57 | $ cd asyncodbc 58 | $ virtualenv --python=`which python3.5` venv 59 | 60 | For standard python *venv*:: 61 | 62 | $ cd asyncodbc 63 | $ python3.5 -m venv venv 64 | 65 | For *virtualenvwrapper*:: 66 | 67 | $ cd asyncodbc 68 | $ mkvirtualenv --python=`which python3.5` asyncodbc 69 | 70 | There are other tools like *pyvenv* but you know the rule of thumb 71 | now: create a python3.5 virtual environment and activate it. 72 | 73 | After that please install libraries required for development:: 74 | 75 | $ pip install -r requirements-dev.txt 76 | 77 | We also recommend to install *ipdb* but it's on your own:: 78 | 79 | $ pip install ipdb 80 | 81 | Congratulations, you are ready to run the test suite 82 | 83 | 84 | Install database 85 | ---------------- 86 | You do not need to install any databases, docker_ will pull images and create 87 | containers for you automatically, after the tests, containers will be removed. 88 | 89 | 90 | Run asyncodbc test suite 91 | ---------------------- 92 | 93 | After all the preconditions are met you can run tests typing the next 94 | command:: 95 | 96 | $ make test 97 | 98 | Or if you want to run only one particular test:: 99 | 100 | $ py.test tests/test_connection.py -k test_basic_cursor 101 | 102 | The command at first will run the static and style checkers (sorry, we don't 103 | accept pull requests with `pep8` or `pyflakes` errors). 104 | 105 | On `flake8` success the tests will be run. 106 | 107 | Please take a look on the produced output. 108 | 109 | Any extra texts (print statements and so on) should be removed. 110 | 111 | 112 | Tests coverage 113 | -------------- 114 | 115 | We are trying hard to have good test coverage; please don't make it worse. 116 | 117 | Use:: 118 | 119 | $ make cov 120 | 121 | to run test suite and collect coverage information. Once the command 122 | has finished check your coverage at the file that appears in the last 123 | line of the output: 124 | ``open file:///.../asyncodbc/htmlcov/index.html`` 125 | 126 | Please go to the link and make sure that your code change is covered. 127 | 128 | 129 | Documentation 130 | ------------- 131 | 132 | We encourage documentation improvements. 133 | 134 | Please before making a Pull Request about documentation changes run:: 135 | 136 | $ make doc 137 | 138 | Once it finishes it will output the index html page 139 | ``open file:///.../asyncodbc/docs/_build/html/index.html``. 140 | 141 | Go to the link and make sure your doc changes looks good. 142 | 143 | The End 144 | ------- 145 | 146 | After finishing all steps make a GitHub_ Pull Request, thanks. 147 | 148 | 149 | .. _unixODBC: http://www.unixodbc.org/ 150 | .. _GitHub: https://github.com/aio-libs/asyncodbc 151 | .. _docker: https://docs.docker.com/engine/installation/ 152 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "{}" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright 2015-2019 Nikolay Novik 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | 203 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include LICENSE 2 | include CHANGES.txt 3 | include README.rst 4 | graft asyncodbc 5 | global-exclude *.pyc *.swp 6 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | checkfiles = asyncodbc/ examples/ tests/ conftest.py 2 | py_warn = PYTHONDEVMODE=1 3 | pytest_opts = -n auto --cov=asyncodbc --tb=native -q 4 | 5 | up: 6 | @poetry update 7 | 8 | deps: 9 | @poetry install 10 | 11 | check: deps build 12 | ifneq ($(shell which black),) 13 | black --check $(checkfiles) || (echo "Please run 'make style' to auto-fix style issues" && false) 14 | endif 15 | pflake8 $(checkfiles) 16 | #mypy $(checkfiles) 17 | #pylint -d C,W,R $(checkfiles) 18 | #bandit -r $(checkfiles) 19 | twine check dist/* 20 | 21 | 22 | test_mssql: deps 23 | $(py_warn) TEST_DSN="DRIVER=ODBC Driver 18 for SQL Server;SERVER=127.0.0.1,1433;UID=sa;PWD=$(TEST_MSSQL_PASS);TrustServerCertificate=YES;MARS_Connection=YES" pytest $(pytest_opts) 24 | 25 | _testall: test_mssql 26 | 27 | testall: deps _testall 28 | coverage report 29 | 30 | ci: check testall 31 | 32 | docs: deps 33 | rm -fR ./build 34 | sphinx-build -M html docs build 35 | 36 | style: deps 37 | isort -src $(checkfiles) 38 | black $(checkfiles) 39 | 40 | build: deps 41 | rm -fR dist/ 42 | poetry build 43 | 44 | publish: deps build 45 | twine upload dist/* 46 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | asyncodbc 2 | ========= 3 | .. image:: https://travis-ci.com/tortoise/asyncodbc.svg?branch=master 4 | :target: https://travis-ci.com/tortoise/asyncodbc 5 | .. image:: https://coveralls.io/repos/tortoise/asyncodbc/badge.svg?branch=master&service=github 6 | :target: https://coveralls.io/github/tortoise/asyncodbc?branch=master 7 | .. image:: https://img.shields.io/pypi/v/asyncodbc.svg 8 | :target: https://pypi.python.org/pypi/asyncodbc 9 | 10 | **asyncodbc** is a Python 3.5+ module that makes it possible to access ODBC_ databases 11 | with asyncio_. It relies on the awesome pyodbc_ library and preserves the same look and 12 | feel. *asyncodbc* was written using `async/await` syntax (PEP492_) and thus is not compatible 13 | with Python versions older than 3.5. Internally *asyncodbc* employs threads to avoid 14 | blocking the event loop, threads_ are not that as bad as you think!. Other 15 | drivers like motor_ use the same approach. 16 | 17 | **asyncodbc** is fully compatible and tested with uvloop_. Take a look at the test 18 | suite, all tests are executed with both the default event loop and uvloop_. 19 | 20 | Supported Databases 21 | ------------------- 22 | 23 | **asyncodbc** should work with all databases supported by pyodbc_. But for now the 24 | library has been tested with: **SQLite**, **MySQL** and **PostgreSQL**. Feel 25 | free to add other databases to the test suite by submitting a PR. 26 | 27 | Basic Example 28 | ------------- 29 | 30 | **asyncodbc** is based on pyodbc_ and provides the same api, you just need 31 | to use ``yield from conn.f()`` or ``await conn.f()`` instead of ``conn.f()`` 32 | 33 | Properties are unchanged, so ``conn.prop`` is correct as well as 34 | ``conn.prop = val``. 35 | 36 | 37 | .. code:: python 38 | 39 | import asyncio 40 | import asyncodbc 41 | 42 | 43 | loop = asyncio.get_event_loop() 44 | 45 | 46 | async def test_example(): 47 | dsn = 'Driver=SQLite;Database=sqlite.db' 48 | conn = await asyncodbc.connect(dsn=dsn, loop=loop) 49 | 50 | cur = await conn.cursor() 51 | await cur.execute("SELECT 42 AS age;") 52 | rows = await cur.fetchall() 53 | print(rows) 54 | print(rows[0]) 55 | print(rows[0].age) 56 | await cur.close() 57 | await conn.close() 58 | 59 | loop.run_until_complete(test_example()) 60 | 61 | 62 | Connection Pool 63 | --------------- 64 | Connection pooling is ported from aiopg_ and relies on PEP492_ features: 65 | 66 | .. code:: python 67 | 68 | import asyncio 69 | import asyncodbc 70 | 71 | 72 | loop = asyncio.get_event_loop() 73 | 74 | 75 | async def test_pool(): 76 | dsn = 'Driver=SQLite;Database=sqlite.db' 77 | pool = await asyncodbc.create_pool(dsn=dsn, loop=loop) 78 | 79 | async with pool.acquire() as conn: 80 | cur = await conn.cursor() 81 | await cur.execute("SELECT 42;") 82 | r = await cur.fetchall() 83 | print(r) 84 | await cur.close() 85 | await conn.close() 86 | pool.close() 87 | await pool.wait_closed() 88 | 89 | loop.run_until_complete(test_pool()) 90 | 91 | 92 | Context Managers 93 | ---------------- 94 | `Pool`, `Connection` and `Cursor` objects support the context management 95 | protocol: 96 | 97 | .. code:: python 98 | 99 | import asyncio 100 | import asyncodbc 101 | 102 | 103 | loop = asyncio.get_event_loop() 104 | 105 | 106 | async def test_example(): 107 | dsn = 'Driver=SQLite;Database=sqlite.db' 108 | 109 | async with asyncodbc.create_pool(dsn=dsn, loop=loop) as pool: 110 | async with pool.acquire() as conn: 111 | async with conn.cursor() as cur: 112 | await cur.execute('SELECT 42 AS age;') 113 | val = await cur.fetchone() 114 | print(val) 115 | print(val.age) 116 | 117 | loop.run_until_complete(test_example()) 118 | 119 | 120 | Installation 121 | ------------ 122 | 123 | In a linux environment pyodbc_ (hence *asyncodbc*) requires the unixODBC_ library. 124 | You can install it using your package manager, for example:: 125 | 126 | $ sudo apt-get install unixodbc 127 | $ sudo apt-get install unixodbc-dev 128 | 129 | then:: 130 | 131 | pip install asyncodbc 132 | 133 | 134 | Run tests 135 | --------- 136 | 137 | For testing purposes you need to install docker_ and the development 138 | requirements:: 139 | 140 | $ pip install -r requirements-dev.txt 141 | 142 | In order to simplify development you should install the provided docker container. 143 | This way you don't need to install any databases or other system libraries, everything happens inside the container. 144 | 145 | Then just execute:: 146 | 147 | $ make docker_build 148 | $ make docker_test 149 | 150 | The test will automatically pull images and build containers with 151 | the required databases. 152 | 153 | *NOTE:* Running tests requires Python 3.6 or higher. 154 | 155 | 156 | Other SQL Drivers 157 | ----------------- 158 | 159 | * aiopg_ - asyncio client for PostgreSQL 160 | * aiomysql_ - asyncio client form MySQL 161 | 162 | 163 | Requirements 164 | ------------ 165 | 166 | * Python_ 3.5+ 167 | * pyodbc_ 168 | * uvloop_ (optional) 169 | 170 | 171 | .. _Python: https://www.python.org 172 | .. _asyncio: http://docs.python.org/3.4/library/asyncio.html 173 | .. _pyodbc: https://github.com/mkleehammer/pyodbc 174 | .. _uvloop: https://github.com/MagicStack/uvloop 175 | .. _ODBC: https://en.wikipedia.org/wiki/Open_Database_Connectivity 176 | .. _aiopg: https://github.com/tortoise/aiopg 177 | .. _aiomysql: https://github.com/tortoise/aiomysql 178 | .. _PEP492: https://www.python.org/dev/peps/pep-0492/ 179 | .. _unixODBC: http://www.unixodbc.org/ 180 | .. _threads: http://techspot.zzzeek.org/2015/02/15/asynchronous-python-and-databases/ 181 | .. _docker: https://docs.docker.com/engine/installation/ 182 | .. _motor: https://emptysqua.re/blog/motor-0-7-beta/ 183 | -------------------------------------------------------------------------------- /asyncodbc/__init__.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | from pyodbc import dataSources as _dataSources 4 | 5 | from .connection import Connection, connect 6 | from .pool import Pool, create_pool 7 | 8 | __version__ = "0.1.1" 9 | __all__ = ["connect", "Connection", "create_pool", "Pool", "data_sources"] 10 | 11 | 12 | async def data_sources(executor=None): 13 | """Returns a dictionary mapping available DSNs to their descriptions. 14 | 15 | :param executor: instance of custom ThreadPoolExecutor, if not supplied 16 | default executor will be used 17 | :return dict: mapping of dsn to driver description 18 | """ 19 | loop = asyncio.get_event_loop() 20 | sources = await loop.run_in_executor(executor, _dataSources) 21 | return sources 22 | -------------------------------------------------------------------------------- /asyncodbc/connection.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import sys 3 | import traceback 4 | import warnings 5 | from functools import partial 6 | 7 | import pyodbc 8 | 9 | from .cursor import Cursor 10 | from .utils import _ConnectionContextManager, _ContextManager, _is_conn_close_error 11 | 12 | __all__ = ["connect", "Connection"] 13 | 14 | 15 | def connect( 16 | *, 17 | dsn, 18 | autocommit=False, 19 | ansi=False, 20 | timeout=0, 21 | executor=None, 22 | echo=False, 23 | after_created=None, 24 | **kwargs 25 | ): 26 | """Accepts an ODBC connection string and returns a new Connection object. 27 | 28 | The connection string can be passed as the string `str`, as a list of 29 | keywords,or a combination of the two. Any keywords except autocommit, 30 | ansi, and timeout are simply added to the connection string. 31 | 32 | param autocommit bool: False or zero, the default, if True or non-zero, 33 | the connection is put into ODBC autocommit mode and statements are 34 | committed automatically. 35 | param ansi bool: By default, pyodbc first attempts to connect using 36 | the Unicode version of SQLDriverConnectW. If the driver returns IM001 37 | indicating it does not support the Unicode version, the ANSI version 38 | is tried. 39 | param timeout int: An integer login timeout in seconds, used to set 40 | the SQL_ATTR_LOGIN_TIMEOUT attribute of the connection. The default is 41 | 0 which means the database's default timeout, if any, is use 42 | param after_created callable: support customize configuration after 43 | connection is connected. Must be an async unary function, or leave it 44 | as None. 45 | param ansi bool: If True, use the ANSI version of SQLDriverConnectW. 46 | """ 47 | return _ConnectionContextManager( 48 | _connect( 49 | dsn=dsn, 50 | autocommit=autocommit, 51 | ansi=ansi, 52 | timeout=timeout, 53 | executor=executor, 54 | echo=echo, 55 | after_created=after_created, 56 | **kwargs 57 | ) 58 | ) 59 | 60 | 61 | async def _connect( 62 | *, 63 | dsn, 64 | autocommit=False, 65 | ansi=False, 66 | timeout=0, 67 | executor=None, 68 | echo=False, 69 | after_created=None, 70 | **kwargs 71 | ): 72 | conn = Connection( 73 | dsn=dsn, 74 | autocommit=autocommit, 75 | ansi=ansi, 76 | timeout=timeout, 77 | echo=echo, 78 | executor=executor, 79 | after_created=after_created, 80 | **kwargs 81 | ) 82 | await conn._connect() 83 | return conn 84 | 85 | 86 | class Connection: 87 | """Connection objects manage connections to the database. 88 | 89 | Connections should only be created by the asyncodbc.connect function. 90 | """ 91 | 92 | _source_traceback = None 93 | 94 | def __init__( 95 | self, 96 | *, 97 | dsn, 98 | autocommit=False, 99 | ansi=None, 100 | timeout=0, 101 | executor=None, 102 | echo=False, 103 | after_created=None, 104 | **kwargs 105 | ): 106 | self._executor = executor 107 | self._loop = asyncio.get_event_loop() 108 | self._conn = None 109 | self._expired = False 110 | self._timeout = timeout 111 | self._last_usage = self._loop.time() 112 | self._autocommit = autocommit 113 | self._ansi = ansi 114 | self._dsn = dsn 115 | self._echo = echo 116 | self._posthook = after_created 117 | self._kwargs = kwargs 118 | self._connected = False 119 | if self.loop.get_debug(): 120 | self._source_traceback = traceback.extract_stack(sys._getframe(1)) 121 | 122 | def _execute(self, func, *args, **kwargs): 123 | # execute function with args and kwargs in thread pool 124 | func = partial(func, *args, **kwargs) 125 | future = asyncio.get_event_loop().run_in_executor(self._executor, func) 126 | return future 127 | 128 | async def _connect(self): 129 | # create pyodbc connection 130 | f = self._execute( 131 | pyodbc.connect, 132 | self._dsn, 133 | autocommit=self._autocommit, 134 | ansi=self._ansi, 135 | timeout=self._timeout, 136 | **self._kwargs 137 | ) 138 | self._conn = await f 139 | self._connected = True 140 | if self._posthook is not None: 141 | await self._posthook(self._conn) 142 | 143 | @property 144 | def connected(self): 145 | return self._connected 146 | 147 | @property 148 | def expired(self): 149 | return self._expired 150 | 151 | @property 152 | def loop(self): 153 | return self._loop 154 | 155 | @property 156 | def closed(self): 157 | if self._conn: 158 | return False 159 | return True 160 | 161 | @property 162 | def autocommit(self): 163 | """Show autocommit mode for current database session. True if the 164 | connection is in autocommit mode; False otherwise. The default 165 | is False 166 | """ 167 | return self._conn.autocommit 168 | 169 | @property 170 | def timeout(self): 171 | return self._conn.timeout 172 | 173 | @property 174 | def last_usage(self): 175 | return self._last_usage 176 | 177 | @property 178 | def echo(self): 179 | return self._echo 180 | 181 | async def _cursor(self): 182 | c = await self._execute(self._conn.cursor) 183 | self._last_usage = self._loop.time() 184 | return Cursor(c, self, echo=self._echo) 185 | 186 | def cursor(self): 187 | return _ContextManager(self._cursor()) 188 | 189 | async def close(self): 190 | """Close pyodbc connection""" 191 | if not self._conn: 192 | return 193 | c = await self._execute(self._conn.close) 194 | self._conn = None 195 | return c 196 | 197 | def commit(self): 198 | """Commit any pending transaction to the database.""" 199 | fut = self._execute(self._conn.commit) 200 | return fut 201 | 202 | def rollback(self): 203 | """Causes the database to roll back to the start of any pending 204 | transaction. 205 | """ 206 | fut = self._execute(self._conn.rollback) 207 | return fut 208 | 209 | async def execute(self, sql, *args): 210 | """Create a new Cursor object, call its execute method, and return it. 211 | 212 | See Cursor.execute for more details.This is a convenience method 213 | that is not part of the DB API. Since a new Cursor is allocated 214 | by each call, this should not be used if more than one SQL 215 | statement needs to be executed. 216 | 217 | :raises pyodbc.Error: When an error is encountered during execution 218 | """ 219 | try: 220 | _cursor = await self._execute(self._conn.execute, sql, *args) 221 | connection = self 222 | cursor = Cursor(_cursor, connection, echo=self._echo) 223 | return cursor 224 | except pyodbc.Error as e: 225 | if _is_conn_close_error(e): 226 | await self.close() 227 | raise 228 | 229 | def getinfo(self, type_): 230 | """Returns general information about the driver and data source 231 | associated with a connection by calling SQLGetInfo and returning its 232 | results. See Microsoft's SQLGetInfo documentation for the types of 233 | information available. 234 | 235 | :param type_: int, pyodbc.SQL_* constant 236 | """ 237 | fut = self._execute(self._conn.getinfo, type_) 238 | return fut 239 | 240 | def add_output_converter(self, sqltype, func): 241 | """Register an output converter function that will be called whenever 242 | a value with the given SQL type is read from the database. 243 | 244 | :param sqltype: the integer SQL type value to convert, which can 245 | be one of the defined standard constants (pyodbc.SQL_VARCHAR) 246 | or a database-specific value (e.g. -151 for the SQL Server 2008 247 | geometry data type). 248 | :param func: the converter function which will be called with a 249 | single parameter, the value, and should return the converted 250 | value. If the value is NULL, the parameter will be None. 251 | Otherwise it will be a Python string. 252 | """ 253 | fut = self._execute(self._conn.add_output_converter, sqltype, func) 254 | return fut 255 | 256 | def clear_output_converters(self): 257 | """Remove all output converter functions added by 258 | add_output_converter. 259 | """ 260 | fut = self._execute(self._conn.clear_output_converters) 261 | return fut 262 | 263 | def set_attr(self, attr_id, value): 264 | """Calls SQLSetConnectAttr with the given values. 265 | 266 | param attr_id: the attribute ID (integer) to set. These are ODBC or 267 | driver constants. 268 | param value: the connection attribute value to set. At this time 269 | only integer values are supported. 270 | """ 271 | fut = self._execute(self._conn.set_attr, attr_id, value) 272 | return fut 273 | 274 | def __del__(self): 275 | if not self.closed: 276 | # This will block the loop, please use close 277 | # coroutine to close connection 278 | self._conn.close() 279 | self._conn = None 280 | 281 | warnings.warn("Unclosed connection {!r}".format(self), ResourceWarning) 282 | 283 | context = {"connection": self, "message": "Unclosed connection"} 284 | if self._source_traceback is not None: 285 | context["source_traceback"] = self._source_traceback 286 | self._loop.call_exception_handler(context) 287 | 288 | async def __aenter__(self): 289 | return self 290 | 291 | async def __aexit__(self, exc_type, exc_val, exc_tb): 292 | await self.close() 293 | return 294 | -------------------------------------------------------------------------------- /asyncodbc/cursor.py: -------------------------------------------------------------------------------- 1 | import pyodbc 2 | 3 | from .log import logger 4 | from .utils import PY_352, _is_conn_close_error 5 | 6 | __all__ = ["Cursor"] 7 | 8 | 9 | class Cursor: 10 | """Cursors represent a database cursor (and map to ODBC HSTMTs), which 11 | is used to manage the context of a fetch operation. 12 | 13 | Cursors created from the same connection are not isolated, i.e., any 14 | changes made to the database by a cursor are immediately visible by 15 | the other cursors. 16 | """ 17 | 18 | def __init__(self, pyodbc_cursor, connection, echo=False): 19 | self._conn = connection 20 | self._impl = pyodbc_cursor 21 | self._loop = connection.loop 22 | self._echo = echo 23 | 24 | async def _run_operation(self, func, *args, **kwargs): 25 | # execute func in thread pool of attached to cursor connection 26 | if not self._conn: 27 | raise pyodbc.OperationalError("Cursor is closed.") 28 | 29 | try: 30 | result = await self._conn._execute(func, *args, **kwargs) 31 | return result 32 | except pyodbc.Error as e: 33 | if self._conn and _is_conn_close_error(e): 34 | await self._conn.close() 35 | raise 36 | 37 | @property 38 | def echo(self): 39 | """Return echo mode status.""" 40 | return self._echo 41 | 42 | @property 43 | def connection(self): 44 | """Cursors database connection""" 45 | return self._conn 46 | 47 | @property 48 | def autocommit(self): 49 | """Show autocommit mode for current database session. True if 50 | connection is in autocommit mode; False otherwse. The default 51 | is False. 52 | """ 53 | return self._conn.autocommit 54 | 55 | @property 56 | def rowcount(self): 57 | """The number of rows modified by the previous DDL statement. 58 | 59 | This is -1 if no SQL has been executed or if the number of rows is 60 | unknown. Note that it is not uncommon for databases to report -1 61 | after a select statement for performance reasons. (The exact number 62 | may not be known before the first records are returned to the 63 | application.) 64 | """ 65 | return self._impl.rowcount 66 | 67 | @property 68 | def description(self): 69 | """This read-only attribute is a list of 7-item tuples, each 70 | containing (name, type_code, display_size, internal_size, precision, 71 | scale, null_ok). 72 | 73 | pyodbc only provides values for name, type_code, internal_size, 74 | and null_ok. The other values are set to None. 75 | 76 | This attribute will be None for operations that do not return rows 77 | or if one of the execute methods has not been called. 78 | 79 | The type_code member is the class type used to create the Python 80 | objects when reading rows. For example, a varchar column's type will 81 | be str. 82 | """ 83 | return self._impl.description 84 | 85 | @property 86 | def closed(self): 87 | """Read only property indicates if cursor has been closed""" 88 | return self._conn is None 89 | 90 | @property 91 | def arraysize(self): 92 | """This read/write attribute specifies the number of rows to fetch 93 | at a time with .fetchmany() . It defaults to 1 meaning to fetch a 94 | single row at a time. 95 | """ 96 | return self._impl.arraysize 97 | 98 | @arraysize.setter 99 | def arraysize(self, size): 100 | self._impl.arraysize = size 101 | 102 | async def close(self): 103 | """Close the cursor now (rather than whenever __del__ is called). 104 | 105 | The cursor will be unusable from this point forward; an Error 106 | (or subclass) exception will be raised if any operation is attempted 107 | with the cursor. 108 | """ 109 | if self._conn is None: 110 | return 111 | await self._run_operation(self._impl.close) 112 | self._conn = None 113 | 114 | async def execute(self, sql, *params): 115 | """Executes the given operation substituting any markers with 116 | the given parameters. 117 | 118 | :param sql: the SQL statement to execute with optional ? parameter 119 | markers. Note that pyodbc never modifies the SQL statement. 120 | :param params: optional parameters for the markers in the SQL. They 121 | can be passed in a single sequence as defined by the DB API. 122 | For convenience, however, they can also be passed individually 123 | """ 124 | if self._echo: 125 | logger.info(sql) 126 | logger.info("%r", sql) 127 | 128 | await self._run_operation(self._impl.execute, sql, *params) 129 | return self 130 | 131 | def executemany(self, sql, *params): 132 | """Prepare a database query or command and then execute it against 133 | all parameter sequences found in the sequence seq_of_params. 134 | 135 | :param sql: the SQL statement to execute with optional ? parameters 136 | :param params: sequence parameters for the markers in the SQL. 137 | """ 138 | fut = self._run_operation(self._impl.executemany, sql, *params) 139 | return fut 140 | 141 | def callproc(self, procname, args=()): 142 | raise NotImplementedError 143 | 144 | async def setinputsizes(self, *args, **kwargs): 145 | """Does nothing, required by DB API.""" 146 | return None 147 | 148 | async def setoutputsize(self, *args, **kwargs): 149 | """Does nothing, required by DB API.""" 150 | return None 151 | 152 | def fetchone(self): 153 | """Returns the next row or None when no more data is available. 154 | 155 | A ProgrammingError exception is raised if no SQL has been executed 156 | or if it did not return a result set (e.g. was not a SELECT 157 | statement). 158 | """ 159 | fut = self._run_operation(self._impl.fetchone) 160 | return fut 161 | 162 | def fetchall(self): 163 | """Returns a list of all remaining rows. 164 | 165 | Since this reads all rows into memory, it should not be used if 166 | there are a lot of rows. Consider iterating over the rows instead. 167 | However, it is useful for freeing up a Cursor so you can perform a 168 | second query before processing the resulting rows. 169 | 170 | A ProgrammingError exception is raised if no SQL has been executed 171 | or if it did not return a result set (e.g. was not a SELECT statement) 172 | """ 173 | fut = self._run_operation(self._impl.fetchall) 174 | return fut 175 | 176 | def fetchmany(self, size): 177 | """Returns a list of remaining rows, containing no more than size 178 | rows, used to process results in chunks. The list will be empty when 179 | there are no more rows. 180 | 181 | The default for cursor.arraysize is 1 which is no different than 182 | calling fetchone(). 183 | 184 | A ProgrammingError exception is raised if no SQL has been executed 185 | or if it did not return a result set (e.g. was not a SELECT 186 | statement). 187 | 188 | :param size: int, max number of rows to return 189 | """ 190 | fut = self._run_operation(self._impl.fetchmany, size) 191 | return fut 192 | 193 | def nextset(self): 194 | """This method will make the cursor skip to the next available 195 | set, discarding any remaining rows from the current set. 196 | 197 | If there are no more sets, the method returns None. Otherwise, 198 | it returns a true value and subsequent calls to the fetch methods 199 | will return rows from the next result set. 200 | 201 | This method is primarily used if you have stored procedures that 202 | return multiple results. 203 | """ 204 | fut = self._run_operation(self._impl.nextset) 205 | return fut 206 | 207 | def tables(self, **kw): 208 | """Creates a result set of tables in the database that match the 209 | given criteria. 210 | """ 211 | fut = self._run_operation(self._impl.tables, **kw) 212 | return fut 213 | 214 | def columns(self, **kw): 215 | """Creates a results set of column names in specified tables by 216 | executing the ODBC SQLColumns function. Each row fetched has the 217 | following columns. 218 | """ 219 | fut = self._run_operation(self._impl.columns, **kw) 220 | return fut 221 | 222 | def statistics(self, catalog=None, schema=None, unique=False, quick=True): 223 | """Creates a results set of statistics about a single table and 224 | the indexes associated with the table by executing SQLStatistics. 225 | 226 | :param catalog: the catalog name 227 | :param schema: the schmea name 228 | :param unique: if True, only unique indexes are retured. Otherwise 229 | all indexes are returned. 230 | :param quick: if True, CARDINALITY and PAGES are returned only if 231 | they are readily available from the server 232 | """ 233 | fut = self._run_operation( 234 | self._impl.statistics, 235 | catalog=catalog, 236 | schema=schema, 237 | unique=unique, 238 | quick=quick, 239 | ) 240 | return fut 241 | 242 | def rowIdColumns(self, table, catalog=None, schema=None, nullable=True): # nopep8 243 | """Executes SQLSpecialColumns with SQL_BEST_ROWID which creates a 244 | result set of columns that uniquely identify a row 245 | """ 246 | fut = self._run_operation( 247 | self._impl.rowIdColumns, 248 | table, 249 | catalog=catalog, 250 | schema=schema, 251 | nullable=nullable, 252 | ) 253 | return fut 254 | 255 | def rowVerColumns(self, table, catalog=None, schema=None, nullable=True): # nopep8 256 | """Executes SQLSpecialColumns with SQL_ROWVER which creates a 257 | result set of columns that are automatically updated when any 258 | value in the row is updated. 259 | """ 260 | fut = self._run_operation( 261 | self._impl.rowVerColumns, 262 | table, 263 | catalog=catalog, 264 | schema=schema, 265 | nullable=nullable, 266 | ) 267 | return fut 268 | 269 | def primaryKeys(self, table, catalog=None, schema=None): # nopep8 270 | """Creates a result set of column names that make up the primary key 271 | for a table by executing the SQLPrimaryKeys function.""" 272 | fut = self._run_operation( 273 | self._impl.primaryKeys, table, catalog=catalog, schema=schema 274 | ) 275 | return fut 276 | 277 | def foreignKeys(self, *a, **kw): # nopep8 278 | """Executes the SQLForeignKeys function and creates a result set 279 | of column names that are foreign keys in the specified table (columns 280 | in the specified table that refer to primary keys in other tables) 281 | or foreign keys in other tables that refer to the primary key in 282 | the specified table. 283 | """ 284 | fut = self._run_operation(self._impl.foreignKeys, *a, **kw) 285 | return fut 286 | 287 | def getTypeInfo(self, sql_type): # nopep8 288 | """Executes SQLGetTypeInfo a creates a result set with information 289 | about the specified data type or all data types supported by the 290 | ODBC driver if not specified. 291 | """ 292 | fut = self._run_operation(self._impl.getTypeInfo, sql_type) 293 | return fut 294 | 295 | def procedures(self, *a, **kw): 296 | """Executes SQLProcedures and creates a result set of information 297 | about the procedures in the data source. 298 | """ 299 | fut = self._run_operation(self._impl.procedures, *a, **kw) 300 | return fut 301 | 302 | def procedureColumns(self, *a, **kw): # nopep8 303 | fut = self._run_operation(self._impl.procedureColumns, *a, **kw) 304 | return fut 305 | 306 | def skip(self, count): 307 | fut = self._run_operation(self._impl.skip, count) 308 | return fut 309 | 310 | def commit(self): 311 | fut = self._run_operation(self._impl.commit) 312 | return fut 313 | 314 | def rollback(self): 315 | fut = self._run_operation(self._impl.rollback) 316 | return fut 317 | 318 | if PY_352: 319 | 320 | def __aiter__(self): 321 | return self 322 | 323 | else: 324 | 325 | async def __aiter__(self): 326 | return self 327 | 328 | async def __anext__(self): 329 | ret = await self.fetchone() 330 | if ret is not None: 331 | return ret 332 | else: 333 | raise StopAsyncIteration 334 | 335 | async def __aenter__(self): 336 | return self 337 | 338 | async def __aexit__(self, exc_type, exc_val, exc_tb): 339 | await self.close() 340 | -------------------------------------------------------------------------------- /asyncodbc/log.py: -------------------------------------------------------------------------------- 1 | """Logging configuration.""" 2 | 3 | import logging 4 | 5 | # Name the logger after the package. 6 | logger = logging.getLogger(__package__) 7 | -------------------------------------------------------------------------------- /asyncodbc/pool.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import collections 3 | from typing import Deque, Set 4 | 5 | from .connection import Connection, connect 6 | from .utils import _PoolAcquireContextManager, _PoolContextManager 7 | 8 | __all__ = ["create_pool", "Pool"] 9 | 10 | 11 | def create_pool(minsize=1, maxsize=10, echo=False, pool_recycle=-1, **kwargs): 12 | return _PoolContextManager( 13 | _create_pool( 14 | minsize=minsize, 15 | maxsize=maxsize, 16 | echo=echo, 17 | pool_recycle=pool_recycle, 18 | **kwargs 19 | ) 20 | ) 21 | 22 | 23 | async def _create_pool(minsize=1, maxsize=10, echo=False, pool_recycle=-1, **kwargs): 24 | pool = Pool( 25 | minsize=minsize, maxsize=maxsize, echo=echo, pool_recycle=pool_recycle, **kwargs 26 | ) 27 | if minsize > 0: 28 | async with pool.cond: 29 | await pool.fill_free_pool(False) 30 | return pool 31 | 32 | 33 | class Pool(asyncio.AbstractServer): 34 | """Connection pool, just from aiomysql""" 35 | 36 | def __init__( 37 | self, 38 | minsize: int, 39 | maxsize: int, 40 | pool_recycle: int, 41 | echo: bool = False, 42 | **kwargs 43 | ): 44 | if minsize < 0: 45 | raise ValueError("minsize should be zero or greater") 46 | if maxsize < minsize: 47 | raise ValueError("maxsize should be not less than minsize") 48 | self._minsize = minsize 49 | self._loop = asyncio.get_event_loop() 50 | self._conn_kwargs = kwargs 51 | self._acquiring = 0 52 | self._free: Deque[Connection] = collections.deque(maxlen=maxsize) 53 | self._cond = asyncio.Condition() 54 | self._used: Set[Connection] = set() 55 | self._terminated: Set[Connection] = set() 56 | self._closing = False 57 | self._closed = False 58 | self._echo = echo 59 | self._recycle = pool_recycle 60 | 61 | @property 62 | def echo(self): 63 | return self._echo 64 | 65 | @property 66 | def cond(self): 67 | return self._cond 68 | 69 | @property 70 | def minsize(self): 71 | return self._minsize 72 | 73 | @property 74 | def maxsize(self): 75 | return self._free.maxlen 76 | 77 | @property 78 | def size(self): 79 | return self.freesize + len(self._used) + self._acquiring 80 | 81 | @property 82 | def freesize(self): 83 | return len(self._free) 84 | 85 | @property 86 | def closed(self): 87 | return self._closed 88 | 89 | async def clear(self): 90 | """Close all free connections in pool.""" 91 | async with self._cond: 92 | while self._free: 93 | conn = self._free.popleft() 94 | await conn.close() 95 | self._cond.notify() 96 | 97 | def close(self): 98 | """Close pool. 99 | 100 | Mark all pool connections to be closed on getting back to pool. 101 | Closed pool doesn't allow to acquire new connections. 102 | """ 103 | if self._closed: 104 | return 105 | self._closing = True 106 | 107 | def terminate(self): 108 | """Terminate pool. 109 | 110 | Close pool with instantly closing all acquired connections also. 111 | """ 112 | 113 | self.close() 114 | 115 | for conn in list(self._used): 116 | conn.close() 117 | self._terminated.add(conn) 118 | 119 | self._used.clear() 120 | 121 | async def wait_closed(self): 122 | """ 123 | Wait for closing all pool's connections. 124 | 125 | :raises RuntimeError: if pool is not closing 126 | """ 127 | 128 | if self._closed: 129 | return 130 | if not self._closing: 131 | raise RuntimeError(".wait_closed() should be called " "after .close()") 132 | 133 | while self._free: 134 | conn = self._free.popleft() 135 | await conn.close() 136 | 137 | async with self._cond: 138 | while self.size > self.freesize: 139 | await self._cond.wait() 140 | 141 | self._closed = True 142 | 143 | def acquire(self): 144 | """Acquire free connection from the pool.""" 145 | coro = self._acquire() 146 | return _PoolAcquireContextManager(coro, self) 147 | 148 | async def _acquire(self): 149 | if self._closing: 150 | raise RuntimeError("Cannot acquire connection after closing pool") 151 | async with self._cond: 152 | while True: 153 | await self.fill_free_pool(True) 154 | if self._free: 155 | conn = self._free.popleft() 156 | self._used.add(conn) 157 | return conn 158 | else: 159 | await self._cond.wait() 160 | 161 | async def fill_free_pool(self, override_min: bool = False): 162 | # iterate over free connections and remove timeouted ones 163 | free_size = len(self._free) 164 | n = 0 165 | while n < free_size: 166 | conn = self._free[-1] 167 | if conn.expired or ( 168 | self._recycle > -1 169 | and self._loop.time() - conn.last_usage > self._recycle 170 | ): 171 | self._free.pop() 172 | await conn.close() 173 | else: 174 | self._free.rotate() 175 | n += 1 176 | 177 | while self.size < self.minsize: 178 | self._acquiring += 1 179 | try: 180 | conn = await connect(echo=self._echo, **self._conn_kwargs) 181 | # raise exception if pool is closing 182 | self._free.append(conn) 183 | self._cond.notify() 184 | finally: 185 | self._acquiring -= 1 186 | if self._free: 187 | return 188 | 189 | if override_min and self.size < self.maxsize: 190 | self._acquiring += 1 191 | try: 192 | conn = await connect(echo=self._echo, **self._conn_kwargs) 193 | # raise exception if pool is closing 194 | self._free.append(conn) 195 | self._cond.notify() 196 | finally: 197 | self._acquiring -= 1 198 | 199 | async def _wakeup(self): 200 | async with self._cond: 201 | self._cond.notify() 202 | 203 | async def release(self, conn): 204 | if conn in self._terminated: 205 | self._terminated.remove(conn) 206 | return 207 | self._used.remove(conn) 208 | if conn.connected: 209 | if self._closing: 210 | await conn.close() 211 | else: 212 | self._free.append(conn) 213 | await self._wakeup() 214 | 215 | async def __aenter__(self): 216 | return self 217 | 218 | async def __aexit__(self, exc_type, exc_val, exc_tb): 219 | self.close() 220 | await self.wait_closed() 221 | -------------------------------------------------------------------------------- /asyncodbc/utils.py: -------------------------------------------------------------------------------- 1 | import sys 2 | from collections.abc import Coroutine 3 | 4 | from pyodbc import Error 5 | 6 | PY_352 = sys.version_info >= (3, 5, 2) 7 | 8 | # Issue #195. Don't pollute the pool with bad conns 9 | # Unfortunately occasionally sqlite will return 'HY000' for invalid query, 10 | # so we need specialize the check 11 | _CONN_CLOSE_ERRORS = { 12 | # [Microsoft][ODBC Driver 17 for SQL Server]Communication link failure 13 | "08S01": None, 14 | # [HY000] server closed the connection unexpectedly 15 | "HY000": "[HY000] server closed the connection unexpectedly", 16 | } 17 | 18 | 19 | def _is_conn_close_error(e): 20 | if not isinstance(e, Error) or len(e.args) < 2: 21 | return False 22 | 23 | sqlstate, msg = e.args[0], e.args[1] 24 | if sqlstate not in _CONN_CLOSE_ERRORS: 25 | return False 26 | 27 | check_msg = _CONN_CLOSE_ERRORS[sqlstate] 28 | if not check_msg: 29 | return True 30 | 31 | return msg.startswith(check_msg) 32 | 33 | 34 | class _ContextManager(Coroutine): 35 | __slots__ = ("_coro", "_obj") 36 | 37 | def __init__(self, coro): 38 | self._coro = coro 39 | self._obj = None 40 | 41 | def send(self, value): 42 | return self._coro.send(value) 43 | 44 | def throw(self, typ, val=None, tb=None): 45 | if val is None: 46 | return self._coro.throw(typ) 47 | elif tb is None: 48 | return self._coro.throw(typ, val) 49 | else: 50 | return self._coro.throw(typ, val, tb) 51 | 52 | def close(self): 53 | return self._coro.close() 54 | 55 | @property 56 | def gi_frame(self): 57 | return self._coro.gi_frame 58 | 59 | @property 60 | def gi_running(self): 61 | return self._coro.gi_running 62 | 63 | @property 64 | def gi_code(self): 65 | return self._coro.gi_code 66 | 67 | def __next__(self): 68 | return self.send(None) 69 | 70 | def __iter__(self): 71 | return self._coro.__await__() 72 | 73 | def __await__(self): 74 | return self._coro.__await__() 75 | 76 | async def __aenter__(self): 77 | self._obj = await self._coro 78 | return self._obj 79 | 80 | async def __aexit__(self, exc_type, exc, tb): 81 | await self._obj.close() 82 | self._obj = None 83 | 84 | 85 | class _PoolContextManager(_ContextManager): 86 | async def __aexit__(self, exc_type, exc, tb): 87 | self._obj.close() 88 | await self._obj.wait_closed() 89 | self._obj = None 90 | 91 | 92 | class _PoolAcquireContextManager(_ContextManager): 93 | __slots__ = ("_coro", "_conn", "_pool") 94 | 95 | def __init__(self, coro, pool): 96 | super().__init__(coro) 97 | self._coro = coro 98 | self._conn = None 99 | self._pool = pool 100 | 101 | async def __aenter__(self): 102 | self._conn = await self._coro 103 | return self._conn 104 | 105 | async def __aexit__(self, exc_type, exc, tb): 106 | try: 107 | await self._pool.release(self._conn) 108 | finally: 109 | self._pool = None 110 | self._conn = None 111 | 112 | 113 | class _ConnectionContextManager(_ContextManager): 114 | async def __aexit__(self, exc_type, exc, tb): 115 | await self._obj.close() 116 | self._obj = None 117 | -------------------------------------------------------------------------------- /conftest.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import os 3 | import uuid 4 | from concurrent.futures import ThreadPoolExecutor 5 | 6 | import pytest 7 | import pytest_asyncio 8 | 9 | import asyncodbc 10 | 11 | 12 | @pytest_asyncio.fixture 13 | async def conn(connection_maker, database): 14 | connection = await connection_maker() 15 | await connection.execute(f"USE {database};") 16 | await connection.commit() 17 | return connection 18 | 19 | 20 | @pytest.fixture(scope="session") 21 | def event_loop(): 22 | return asyncio.get_event_loop() 23 | 24 | 25 | @pytest_asyncio.fixture(scope="session", autouse=True) 26 | async def database(): 27 | connection = await asyncodbc.connect(dsn=os.getenv("TEST_DSN"), autocommit=True) 28 | db = f"test_{uuid.uuid4()}".replace("-", "") 29 | await connection.execute(f"CREATE DATABASE {db};") 30 | yield db 31 | await connection.execute(f"DROP DATABASE {db};") 32 | await connection.close() 33 | 34 | 35 | @pytest.fixture 36 | async def connection_maker(dsn, database): 37 | cleanup = [] 38 | 39 | async def make(**kw): 40 | if kw.get("executor", None) is None: 41 | executor = ThreadPoolExecutor(max_workers=1) 42 | kw["executor"] = executor 43 | else: 44 | executor = kw["executor"] 45 | 46 | conn = await asyncodbc.connect(dsn=dsn, database=database, **kw) 47 | cleanup.append((conn, executor)) 48 | return conn 49 | 50 | try: 51 | yield make 52 | finally: 53 | for conn, executor in cleanup: 54 | await conn.close() 55 | executor.shutdown(True) 56 | 57 | 58 | @pytest_asyncio.fixture 59 | async def pool(dsn): 60 | p = await asyncodbc.create_pool(dsn=dsn) 61 | 62 | try: 63 | yield p 64 | finally: 65 | p.close() 66 | await p.wait_closed() 67 | 68 | 69 | @pytest.fixture 70 | def dsn(): 71 | return os.getenv("TEST_DSN") 72 | 73 | 74 | @pytest_asyncio.fixture 75 | async def pool_maker(): 76 | pool_list = [] 77 | 78 | async def make(**kw): 79 | pool = await asyncodbc.create_pool(**kw) 80 | pool_list.append(pool) 81 | return pool 82 | 83 | try: 84 | yield make 85 | finally: 86 | for pool in pool_list: 87 | pool.close() 88 | await pool.wait_closed() 89 | 90 | 91 | @pytest.fixture 92 | def executor(): 93 | return ThreadPoolExecutor(max_workers=10) 94 | 95 | 96 | @pytest_asyncio.fixture 97 | async def table(conn): 98 | cur = await conn.cursor() 99 | await cur.execute("CREATE TABLE t1(n INT, v VARCHAR(10));") 100 | await cur.execute("INSERT INTO t1 VALUES (1, '123.45');") 101 | await cur.execute("INSERT INTO t1 VALUES (2, 'foo');") 102 | await conn.commit() 103 | await cur.close() 104 | 105 | try: 106 | yield "t1" 107 | finally: 108 | cur = await conn.cursor() 109 | await cur.execute("DROP TABLE t1;") 110 | await cur.commit() 111 | await cur.close() 112 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | PAPER = 8 | BUILDDIR = _build 9 | 10 | # User-friendly check for sphinx-build 11 | ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) 12 | $(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) 13 | endif 14 | 15 | # Internal variables. 16 | PAPEROPT_a4 = -D latex_paper_size=a4 17 | PAPEROPT_letter = -D latex_paper_size=letter 18 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 19 | # the i18n builder cannot share the environment and doctrees with the others 20 | I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 21 | 22 | .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext 23 | 24 | help: 25 | @echo "Please use \`make ' where is one of" 26 | @echo " html to make standalone HTML files" 27 | @echo " dirhtml to make HTML files named index.html in directories" 28 | @echo " singlehtml to make a single large HTML file" 29 | @echo " pickle to make pickle files" 30 | @echo " json to make JSON files" 31 | @echo " htmlhelp to make HTML files and a HTML help project" 32 | @echo " qthelp to make HTML files and a qthelp project" 33 | @echo " devhelp to make HTML files and a Devhelp project" 34 | @echo " epub to make an epub" 35 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" 36 | @echo " latexpdf to make LaTeX files and run them through pdflatex" 37 | @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" 38 | @echo " text to make text files" 39 | @echo " man to make manual pages" 40 | @echo " texinfo to make Texinfo files" 41 | @echo " info to make Texinfo files and run them through makeinfo" 42 | @echo " gettext to make PO message catalogs" 43 | @echo " changes to make an overview of all changed/added/deprecated items" 44 | @echo " xml to make Docutils-native XML files" 45 | @echo " pseudoxml to make pseudoxml-XML files for display purposes" 46 | @echo " linkcheck to check all external links for integrity" 47 | @echo " doctest to run all doctests embedded in the documentation (if enabled)" 48 | 49 | clean: 50 | rm -rf $(BUILDDIR)/* 51 | 52 | html: 53 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html 54 | @echo 55 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." 56 | 57 | dirhtml: 58 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml 59 | @echo 60 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." 61 | 62 | singlehtml: 63 | $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml 64 | @echo 65 | @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." 66 | 67 | pickle: 68 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle 69 | @echo 70 | @echo "Build finished; now you can process the pickle files." 71 | 72 | json: 73 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json 74 | @echo 75 | @echo "Build finished; now you can process the JSON files." 76 | 77 | htmlhelp: 78 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp 79 | @echo 80 | @echo "Build finished; now you can run HTML Help Workshop with the" \ 81 | ".hhp project file in $(BUILDDIR)/htmlhelp." 82 | 83 | qthelp: 84 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp 85 | @echo 86 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \ 87 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:" 88 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/aiomysql.qhcp" 89 | @echo "To view the help file:" 90 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/aiomysql.qhc" 91 | 92 | devhelp: 93 | $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp 94 | @echo 95 | @echo "Build finished." 96 | @echo "To view the help file:" 97 | @echo "# mkdir -p $$HOME/.local/share/devhelp/aiomysql" 98 | @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/aiomysql" 99 | @echo "# devhelp" 100 | 101 | epub: 102 | $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub 103 | @echo 104 | @echo "Build finished. The epub file is in $(BUILDDIR)/epub." 105 | 106 | latex: 107 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 108 | @echo 109 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." 110 | @echo "Run \`make' in that directory to run these through (pdf)latex" \ 111 | "(use \`make latexpdf' here to do that automatically)." 112 | 113 | latexpdf: 114 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 115 | @echo "Running LaTeX files through pdflatex..." 116 | $(MAKE) -C $(BUILDDIR)/latex all-pdf 117 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 118 | 119 | latexpdfja: 120 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 121 | @echo "Running LaTeX files through platex and dvipdfmx..." 122 | $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja 123 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 124 | 125 | text: 126 | $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text 127 | @echo 128 | @echo "Build finished. The text files are in $(BUILDDIR)/text." 129 | 130 | man: 131 | $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man 132 | @echo 133 | @echo "Build finished. The manual pages are in $(BUILDDIR)/man." 134 | 135 | texinfo: 136 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 137 | @echo 138 | @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." 139 | @echo "Run \`make' in that directory to run these through makeinfo" \ 140 | "(use \`make info' here to do that automatically)." 141 | 142 | info: 143 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 144 | @echo "Running Texinfo files through makeinfo..." 145 | make -C $(BUILDDIR)/texinfo info 146 | @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." 147 | 148 | gettext: 149 | $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale 150 | @echo 151 | @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." 152 | 153 | changes: 154 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes 155 | @echo 156 | @echo "The overview file is in $(BUILDDIR)/changes." 157 | 158 | linkcheck: 159 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck 160 | @echo 161 | @echo "Link check complete; look for any errors in the above output " \ 162 | "or in $(BUILDDIR)/linkcheck/output.txt." 163 | 164 | doctest: 165 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest 166 | @echo "Testing of doctests in the sources finished, look at the " \ 167 | "results in $(BUILDDIR)/doctest/output.txt." 168 | 169 | xml: 170 | $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml 171 | @echo 172 | @echo "Build finished. The XML files are in $(BUILDDIR)/xml." 173 | 174 | pseudoxml: 175 | $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml 176 | @echo 177 | @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." 178 | -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | # 4 | # aiomysql documentation build configuration file, created by 5 | # sphinx-quickstart on Sun Jan 18 22:02:31 2015. 6 | # 7 | # This file is execfile()d with the current directory set to its 8 | # containing dir. 9 | # 10 | # Note that not all possible configuration values are present in this 11 | # autogenerated file. 12 | # 13 | # All configuration values have a default; values that are commented out 14 | # serve to show the default. 15 | 16 | import sys 17 | import os 18 | 19 | # If extensions (or modules to document with autodoc) are in another directory, 20 | # add these directories to sys.path here. If the directory is relative to the 21 | # documentation root, use os.path.abspath to make it absolute, like shown here. 22 | #sys.path.insert(0, os.path.abspath('.')) 23 | 24 | # -- General configuration ------------------------------------------------ 25 | 26 | # If your documentation needs a minimal Sphinx version, state it here. 27 | #needs_sphinx = '1.0' 28 | 29 | # Add any Sphinx extension module names here, as strings. They can be 30 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 31 | # ones. 32 | 33 | import re, os.path 34 | 35 | def get_release(): 36 | regexp = re.compile(r"^__version__\W*=\W*'([\d.abrc]+)'") 37 | here = os.path.dirname(__file__) 38 | root = os.path.dirname(here) 39 | init_py = os.path.join(root, 'asyncodbc', '__init__.py') 40 | with open(init_py) as f: 41 | for line in f: 42 | match = regexp.match(line) 43 | if match is not None: 44 | return match.group(1) 45 | else: 46 | raise RuntimeError('Cannot find version in asyncodbc/__init__.py') 47 | 48 | 49 | def get_version(release): 50 | parts = release.split('.') 51 | return '.'.join(parts[:2]) 52 | 53 | extensions = [ 54 | 'sphinx.ext.autodoc', 55 | 'sphinx.ext.intersphinx', 56 | 'sphinx.ext.viewcode', 57 | ] 58 | 59 | intersphinx_mapping = {'python': ('http://docs.python.org/3', None)} 60 | 61 | # Add any paths that contain templates here, relative to this directory. 62 | templates_path = ['_templates'] 63 | 64 | # The suffix of source filenames. 65 | source_suffix = '.rst' 66 | 67 | # The encoding of source files. 68 | #source_encoding = 'utf-8-sig' 69 | 70 | # The master toctree document. 71 | master_doc = 'index' 72 | 73 | # General information about the project. 74 | project = 'asyncodbc' 75 | copyright = '2015,2016 Nikolay Novik' 76 | 77 | # The version info for the project you're documenting, acts as replacement for 78 | # |version| and |release|, also used in various other places throughout the 79 | # built documents. 80 | # 81 | release = get_release() 82 | version = get_version(release) 83 | 84 | # The language for content autogenerated by Sphinx. Refer to documentation 85 | # for a list of supported languages. 86 | #language = None 87 | 88 | # There are two options for replacing |today|: either, you set today to some 89 | # non-false value, then it is used: 90 | #today = '' 91 | # Else, today_fmt is used as the format for a strftime call. 92 | #today_fmt = '%B %d, %Y' 93 | 94 | # List of patterns, relative to source directory, that match files and 95 | # directories to ignore when looking for source files. 96 | exclude_patterns = ['_build'] 97 | 98 | # The reST default role (used for this markup: `text`) to use for all 99 | # documents. 100 | #default_role = None 101 | 102 | # If true, '()' will be appended to :func: etc. cross-reference text. 103 | #add_function_parentheses = True 104 | 105 | # If true, the current module name will be prepended to all description 106 | # unit titles (such as .. function::). 107 | #add_module_names = True 108 | 109 | # If true, sectionauthor and moduleauthor directives will be shown in the 110 | # output. They are ignored by default. 111 | #show_authors = False 112 | 113 | # The name of the Pygments (syntax highlighting) style to use. 114 | pygments_style = 'sphinx' 115 | 116 | # A list of ignored prefixes for module index sorting. 117 | #modindex_common_prefix = [] 118 | 119 | # If true, keep warnings as "system message" paragraphs in the built documents. 120 | #keep_warnings = False 121 | highlight_language = 'python3' 122 | 123 | # The theme to use for HTML and HTML Help pages. See the documentation for 124 | # a list of builtin themes. 125 | on_rtd = os.environ.get('READTHEDOCS', None) == 'True' 126 | 127 | if on_rtd: 128 | html_theme = 'default' 129 | else: 130 | html_theme = 'pyramid' 131 | 132 | # -- Options for HTML output ---------------------------------------------- 133 | 134 | # The theme to use for HTML and HTML Help pages. See the documentation for 135 | # a list of builtin themes. 136 | html_theme = 'default' 137 | 138 | # Theme options are theme-specific and customize the look and feel of a theme 139 | # further. For a list of options available for each theme, see the 140 | # documentation. 141 | #html_theme_options = {} 142 | 143 | # Add any paths that contain custom themes here, relative to this directory. 144 | #html_theme_path = [] 145 | 146 | # The name for this set of Sphinx documents. If None, it defaults to 147 | # " v documentation". 148 | #html_title = None 149 | 150 | # A shorter title for the navigation bar. Default is the same as html_title. 151 | #html_short_title = None 152 | 153 | # The name of an image file (relative to this directory) to place at the top 154 | # of the sidebar. 155 | #html_logo = None 156 | 157 | # The name of an image file (within the static path) to use as favicon of the 158 | # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 159 | # pixels large. 160 | #html_favicon = None 161 | 162 | # Add any paths that contain custom static files (such as style sheets) here, 163 | # relative to this directory. They are copied after the builtin static files, 164 | # so a file named "default.css" will overwrite the builtin "default.css". 165 | html_static_path = ['_static'] 166 | 167 | # Add any extra paths that contain custom files (such as robots.txt or 168 | # .htaccess) here, relative to this directory. These files are copied 169 | # directly to the root of the documentation. 170 | #html_extra_path = [] 171 | 172 | # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, 173 | # using the given strftime format. 174 | #html_last_updated_fmt = '%b %d, %Y' 175 | 176 | # If true, SmartyPants will be used to convert quotes and dashes to 177 | # typographically correct entities. 178 | #html_use_smartypants = True 179 | 180 | # Custom sidebar templates, maps document names to template names. 181 | #html_sidebars = {} 182 | 183 | # Additional templates that should be rendered to pages, maps page names to 184 | # template names. 185 | #html_additional_pages = {} 186 | 187 | # If false, no module index is generated. 188 | #html_domain_indices = True 189 | 190 | # If false, no index is generated. 191 | #html_use_index = True 192 | 193 | # If true, the index is split into individual pages for each letter. 194 | #html_split_index = False 195 | 196 | # If true, links to the reST sources are added to the pages. 197 | #html_show_sourcelink = True 198 | 199 | # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. 200 | #html_show_sphinx = True 201 | 202 | # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. 203 | #html_show_copyright = True 204 | 205 | # If true, an OpenSearch description file will be output, and all pages will 206 | # contain a tag referring to it. The value of this option must be the 207 | # base URL from which the finished HTML is served. 208 | #html_use_opensearch = '' 209 | 210 | # This is the file name suffix for HTML files (e.g. ".xhtml"). 211 | #html_file_suffix = None 212 | 213 | # Output file base name for HTML help builder. 214 | htmlhelp_basename = 'aioodbcdoc' 215 | 216 | 217 | # -- Options for LaTeX output --------------------------------------------- 218 | 219 | latex_elements = { 220 | # The paper size ('letterpaper' or 'a4paper'). 221 | #'papersize': 'letterpaper', 222 | 223 | # The font size ('10pt', '11pt' or '12pt'). 224 | #'pointsize': '10pt', 225 | 226 | # Additional stuff for the LaTeX preamble. 227 | #'preamble': '', 228 | } 229 | 230 | # Grouping the document tree into LaTeX files. List of tuples 231 | # (source start file, target name, title, 232 | # author, documentclass [howto, manual, or own class]). 233 | latex_documents = [ 234 | ('index', 'asyncodbc.tex', 'asyncodbc Documentation', 235 | 'Nikolay Novik', 'manual'), 236 | ] 237 | 238 | # The name of an image file (relative to this directory) to place at the top of 239 | # the title page. 240 | #latex_logo = None 241 | 242 | # For "manual" documents, if this is true, then toplevel headings are parts, 243 | # not chapters. 244 | #latex_use_parts = False 245 | 246 | # If true, show page references after internal links. 247 | #latex_show_pagerefs = False 248 | 249 | # If true, show URL addresses after external links. 250 | #latex_show_urls = False 251 | 252 | # Documents to append as an appendix to all manuals. 253 | #latex_appendices = [] 254 | 255 | # If false, no module index is generated. 256 | #latex_domain_indices = True 257 | 258 | 259 | # -- Options for manual page output --------------------------------------- 260 | 261 | # One entry per manual page. List of tuples 262 | # (source start file, name, description, authors, manual section). 263 | man_pages = [ 264 | ('index', 'asyncodbc', 'asyncodbc Documentation', 265 | ['Nikolay Novik'], 1) 266 | ] 267 | 268 | # If true, show URL addresses after external links. 269 | #man_show_urls = False 270 | 271 | 272 | # -- Options for Texinfo output ------------------------------------------- 273 | 274 | # Grouping the document tree into Texinfo files. List of tuples 275 | # (source start file, target name, title, author, 276 | # dir menu entry, description, category) 277 | texinfo_documents = [ 278 | ('index', 'asyncodbc', 'asyncodbc Documentation', 279 | 'Nikolay Novik', 'asyncodbc', 'One line description of project.', 280 | 'Miscellaneous'), 281 | ] 282 | 283 | # Documents to append as an appendix to all manuals. 284 | #texinfo_appendices = [] 285 | 286 | # If false, no module index is generated. 287 | #texinfo_domain_indices = True 288 | 289 | # How to display URL addresses: 'footnote', 'no', or 'inline'. 290 | #texinfo_show_urls = 'footnote' 291 | 292 | # If true, do not generate a @detailmenu in the "Top" node's menu. 293 | #texinfo_no_detailmenu = False 294 | -------------------------------------------------------------------------------- /docs/contributing.rst: -------------------------------------------------------------------------------- 1 | .. _aioodbc-contributing: 2 | 3 | .. include:: ../CONTRIBUTING.rst 4 | -------------------------------------------------------------------------------- /docs/examples.rst: -------------------------------------------------------------------------------- 1 | Examples of asyncodbc usage 2 | ========================= 3 | 4 | Below is a list of examples from `asyncodbc/examples 5 | `_ 6 | 7 | Every example is a correct tiny python program. 8 | 9 | .. _aioodbc-examples-simple: 10 | 11 | Basic Usage 12 | ----------- 13 | 14 | Basic example, executes query that return important number 42. 15 | 16 | .. literalinclude:: ../examples/example_simple.py 17 | 18 | Example of query execution in connection pool. 19 | 20 | .. literalinclude:: ../examples/example_pool.py 21 | 22 | Example of using async context managers with Pool, Connection and Cursor 23 | objects. 24 | 25 | .. literalinclude:: ../examples/example_context_managers.py 26 | -------------------------------------------------------------------------------- /docs/glossary.rst: -------------------------------------------------------------------------------- 1 | .. _glossary: 2 | 3 | 4 | ******** 5 | Glossary 6 | ******** 7 | 8 | .. if you add new entries, keep the alphabetical sorting! 9 | 10 | .. glossary:: 11 | 12 | DBAPI 13 | 14 | :pep:`249` -- Python Database API Specification v2.0 15 | 16 | ipdb 17 | 18 | ipdb exports functions to access the IPython debugger, which 19 | features tab completion, syntax highlighting, better tracebacks, 20 | better introspection with the same interface as the pdb module. 21 | 22 | MySQL 23 | 24 | A popular database server. 25 | 26 | http://www.mysql.com/ 27 | 28 | ODBC 29 | 30 | Open Database Connectivity (ODBC) is a standard programming language 31 | middleware application programming interface (API) for accessing 32 | database management systems (DBMS) 33 | 34 | pep8 35 | 36 | Python style guide checker 37 | 38 | *pep8* is a tool to check your Python code against some of the 39 | style conventions in :pep:`8` -- Style Guide for Python Code. 40 | 41 | pyflakes 42 | 43 | passive checker of Python programs 44 | 45 | A simple program which checks Python source files for errors. 46 | 47 | Pyflakes analyzes programs and detects various errors. It works 48 | by parsing the source file, not importing it, so it is safe to 49 | use on modules with side effects. It's also much faster. 50 | 51 | https://pypi.python.org/pypi/pyflakes 52 | 53 | -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | .. asyncodbc documentation master file, created by 2 | sphinx-quickstart on Sun Jan 18 22:02:31 2015. 3 | You can adapt this file completely to your liking, but it should at least 4 | contain the root `toctree` directive. 5 | 6 | Welcome to asyncodbc's documentation! 7 | =================================== 8 | 9 | .. _GitHub: https://github.com/aio-libs/asyncodbc 10 | .. _asyncio: http://docs.python.org/3.4/library/asyncio.html 11 | .. _aiopg: https://github.com/aio-libs/aiopg 12 | .. _aio-libs: https://github.com/aio-libs 13 | .. _pyodbc: https://github.com/mkleehammer/pyodbc 14 | .. _PEP492: https://www.python.org/dev/peps/pep-0492/ 15 | .. _unixODBC: http://www.unixodbc.org/ 16 | .. _threads: http://techspot.zzzeek.org/2015/02/15/asynchronous-python-and-databases/ 17 | 18 | 19 | **asyncodbc** is Python 3.5+ module that makes possible accessing ODBC_ databases 20 | with asyncio_. It is rely on awesome pyodbc_ library, preserve same look and 21 | feel. *asyncodbc* was written `async/await` syntax (PEP492_) thus not 22 | compatible with Python older then 3.5. Internally *asyncodbc* employ threads 23 | to avoid blocking the event loop, btw threads_ are not that bad as you think :) 24 | 25 | 26 | Features 27 | -------- 28 | * Implements `asyncio` :term:`DBAPI` *like* interface for 29 | :term:`ODBC`. It includes :ref:`asyncodbc-connection`, 30 | :ref:`asyncodbc-cursor` and :ref:`asyncodbc-pool` objects. 31 | * Support connection pooling. 32 | 33 | 34 | Source code 35 | ----------- 36 | 37 | The project is hosted on GitHub_ 38 | 39 | Please feel free to file an issue on `bug tracker 40 | `_ if you have found a bug 41 | or have some suggestion for library improvement. 42 | 43 | The library uses `Travis `_ for 44 | Continious Integration and `Coveralls 45 | `_ for 46 | coverage reports. 47 | 48 | 49 | Dependencies 50 | ------------ 51 | 52 | - Python 3.5 (PEP492_ coroutines) 53 | - pyodbc_ 54 | - unixODBC_ 55 | 56 | 57 | Authors and License 58 | ------------------- 59 | 60 | The ``asyncodbc`` package is written by Nikolay Novik and aio-libs_ contributors. 61 | It's MIT licensed. 62 | 63 | Feel free to improve this package and send a pull request to GitHub_. 64 | 65 | Contents: 66 | --------- 67 | 68 | .. toctree:: 69 | :maxdepth: 2 70 | 71 | examples 72 | tuning 73 | glossary 74 | contributing 75 | 76 | Indices and tables 77 | ================== 78 | 79 | * :ref:`genindex` 80 | * :ref:`modindex` 81 | * :ref:`search` 82 | -------------------------------------------------------------------------------- /docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | REM Command file for Sphinx documentation 4 | 5 | if "%SPHINXBUILD%" == "" ( 6 | set SPHINXBUILD=sphinx-build 7 | ) 8 | set BUILDDIR=_build 9 | set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% . 10 | set I18NSPHINXOPTS=%SPHINXOPTS% . 11 | if NOT "%PAPER%" == "" ( 12 | set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% 13 | set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS% 14 | ) 15 | 16 | if "%1" == "" goto help 17 | 18 | if "%1" == "help" ( 19 | :help 20 | echo.Please use `make ^` where ^ is one of 21 | echo. html to make standalone HTML files 22 | echo. dirhtml to make HTML files named index.html in directories 23 | echo. singlehtml to make a single large HTML file 24 | echo. pickle to make pickle files 25 | echo. json to make JSON files 26 | echo. htmlhelp to make HTML files and a HTML help project 27 | echo. qthelp to make HTML files and a qthelp project 28 | echo. devhelp to make HTML files and a Devhelp project 29 | echo. epub to make an epub 30 | echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter 31 | echo. text to make text files 32 | echo. man to make manual pages 33 | echo. texinfo to make Texinfo files 34 | echo. gettext to make PO message catalogs 35 | echo. changes to make an overview over all changed/added/deprecated items 36 | echo. xml to make Docutils-native XML files 37 | echo. pseudoxml to make pseudoxml-XML files for display purposes 38 | echo. linkcheck to check all external links for integrity 39 | echo. doctest to run all doctests embedded in the documentation if enabled 40 | goto end 41 | ) 42 | 43 | if "%1" == "clean" ( 44 | for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i 45 | del /q /s %BUILDDIR%\* 46 | goto end 47 | ) 48 | 49 | 50 | %SPHINXBUILD% 2> nul 51 | if errorlevel 9009 ( 52 | echo. 53 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 54 | echo.installed, then set the SPHINXBUILD environment variable to point 55 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 56 | echo.may add the Sphinx directory to PATH. 57 | echo. 58 | echo.If you don't have Sphinx installed, grab it from 59 | echo.http://sphinx-doc.org/ 60 | exit /b 1 61 | ) 62 | 63 | if "%1" == "html" ( 64 | %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html 65 | if errorlevel 1 exit /b 1 66 | echo. 67 | echo.Build finished. The HTML pages are in %BUILDDIR%/html. 68 | goto end 69 | ) 70 | 71 | if "%1" == "dirhtml" ( 72 | %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml 73 | if errorlevel 1 exit /b 1 74 | echo. 75 | echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. 76 | goto end 77 | ) 78 | 79 | if "%1" == "singlehtml" ( 80 | %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml 81 | if errorlevel 1 exit /b 1 82 | echo. 83 | echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. 84 | goto end 85 | ) 86 | 87 | if "%1" == "pickle" ( 88 | %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle 89 | if errorlevel 1 exit /b 1 90 | echo. 91 | echo.Build finished; now you can process the pickle files. 92 | goto end 93 | ) 94 | 95 | if "%1" == "json" ( 96 | %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json 97 | if errorlevel 1 exit /b 1 98 | echo. 99 | echo.Build finished; now you can process the JSON files. 100 | goto end 101 | ) 102 | 103 | if "%1" == "htmlhelp" ( 104 | %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp 105 | if errorlevel 1 exit /b 1 106 | echo. 107 | echo.Build finished; now you can run HTML Help Workshop with the ^ 108 | .hhp project file in %BUILDDIR%/htmlhelp. 109 | goto end 110 | ) 111 | 112 | if "%1" == "qthelp" ( 113 | %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp 114 | if errorlevel 1 exit /b 1 115 | echo. 116 | echo.Build finished; now you can run "qcollectiongenerator" with the ^ 117 | .qhcp project file in %BUILDDIR%/qthelp, like this: 118 | echo.^> qcollectiongenerator %BUILDDIR%\qthelp\aiomysql.qhcp 119 | echo.To view the help file: 120 | echo.^> assistant -collectionFile %BUILDDIR%\qthelp\aiomysql.ghc 121 | goto end 122 | ) 123 | 124 | if "%1" == "devhelp" ( 125 | %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp 126 | if errorlevel 1 exit /b 1 127 | echo. 128 | echo.Build finished. 129 | goto end 130 | ) 131 | 132 | if "%1" == "epub" ( 133 | %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub 134 | if errorlevel 1 exit /b 1 135 | echo. 136 | echo.Build finished. The epub file is in %BUILDDIR%/epub. 137 | goto end 138 | ) 139 | 140 | if "%1" == "latex" ( 141 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 142 | if errorlevel 1 exit /b 1 143 | echo. 144 | echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. 145 | goto end 146 | ) 147 | 148 | if "%1" == "latexpdf" ( 149 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 150 | cd %BUILDDIR%/latex 151 | make all-pdf 152 | cd %BUILDDIR%/.. 153 | echo. 154 | echo.Build finished; the PDF files are in %BUILDDIR%/latex. 155 | goto end 156 | ) 157 | 158 | if "%1" == "latexpdfja" ( 159 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 160 | cd %BUILDDIR%/latex 161 | make all-pdf-ja 162 | cd %BUILDDIR%/.. 163 | echo. 164 | echo.Build finished; the PDF files are in %BUILDDIR%/latex. 165 | goto end 166 | ) 167 | 168 | if "%1" == "text" ( 169 | %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text 170 | if errorlevel 1 exit /b 1 171 | echo. 172 | echo.Build finished. The text files are in %BUILDDIR%/text. 173 | goto end 174 | ) 175 | 176 | if "%1" == "man" ( 177 | %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man 178 | if errorlevel 1 exit /b 1 179 | echo. 180 | echo.Build finished. The manual pages are in %BUILDDIR%/man. 181 | goto end 182 | ) 183 | 184 | if "%1" == "texinfo" ( 185 | %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo 186 | if errorlevel 1 exit /b 1 187 | echo. 188 | echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo. 189 | goto end 190 | ) 191 | 192 | if "%1" == "gettext" ( 193 | %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale 194 | if errorlevel 1 exit /b 1 195 | echo. 196 | echo.Build finished. The message catalogs are in %BUILDDIR%/locale. 197 | goto end 198 | ) 199 | 200 | if "%1" == "changes" ( 201 | %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes 202 | if errorlevel 1 exit /b 1 203 | echo. 204 | echo.The overview file is in %BUILDDIR%/changes. 205 | goto end 206 | ) 207 | 208 | if "%1" == "linkcheck" ( 209 | %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck 210 | if errorlevel 1 exit /b 1 211 | echo. 212 | echo.Link check complete; look for any errors in the above output ^ 213 | or in %BUILDDIR%/linkcheck/output.txt. 214 | goto end 215 | ) 216 | 217 | if "%1" == "doctest" ( 218 | %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest 219 | if errorlevel 1 exit /b 1 220 | echo. 221 | echo.Testing of doctests in the sources finished, look at the ^ 222 | results in %BUILDDIR%/doctest/output.txt. 223 | goto end 224 | ) 225 | 226 | if "%1" == "xml" ( 227 | %SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml 228 | if errorlevel 1 exit /b 1 229 | echo. 230 | echo.Build finished. The XML files are in %BUILDDIR%/xml. 231 | goto end 232 | ) 233 | 234 | if "%1" == "pseudoxml" ( 235 | %SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml 236 | if errorlevel 1 exit /b 1 237 | echo. 238 | echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml. 239 | goto end 240 | ) 241 | 242 | :end 243 | -------------------------------------------------------------------------------- /docs/tuning.rst: -------------------------------------------------------------------------------- 1 | .. _tuning: 2 | 3 | 4 | ******** 5 | Configuration Tuning 6 | ******** 7 | 8 | 9 | after_created 10 | 11 | When calling ``asyncodbc.connect`` it is possible to pass an async 12 | unary function as a parameter for ``after_created``. This allows 13 | you to configure additional attributes on the underlying 14 | pyodbc connection such as ``.setencoding`` or ``.setdecoding``. 15 | 16 | TheadPoolExecutor 17 | 18 | When using ``aoiodbc.create_pool`` it is considered a 19 | good practice to use ``ThreadPoolExecutor`` from 20 | ``concurrent.futures`` to create worker threads that 21 | are dedicated for database work allowing default threads 22 | to do other work and prevent competition between database 23 | and default workers. 24 | -------------------------------------------------------------------------------- /examples/example_complex_queries.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from functools import partial 3 | 4 | import asyncodbc 5 | 6 | dsn = "Driver=SQLite;Database=sqlite.db" 7 | 8 | 9 | # Sometimes you may want to reuse same connection parameters multiple times. 10 | # This can be accomplished in a way below using partial function 11 | connect = partial(asyncodbc.connect, dsn=dsn, echo=True, autocommit=True) 12 | 13 | 14 | async def init_database(): 15 | """ 16 | Initialize test database with sample schema/data to reuse in other tests. 17 | Make sure that in real applications you have database initialization 18 | file as separate *.sql script or rely on autogenerated code provided 19 | by your ORM. 20 | """ 21 | async with connect(loop=loop) as conn: 22 | async with conn.cursor() as cur: 23 | sql = "CREATE TABLE IF NOT EXISTS t1(n INTEGER, v TEXT);" 24 | await cur.execute(sql) 25 | 26 | 27 | async def error_without_context_managers(): 28 | """ 29 | When not using context manager you may end up having unclosed connections 30 | in case of any error which lead to resource leakage. To avoid 31 | `Unclosed connection` errors in your code always close after yourself. 32 | """ 33 | conn = await asyncodbc.connect(dsn=dsn) 34 | cur = await conn.cursor() 35 | 36 | try: 37 | await cur.execute("SELECT 42 AS;") 38 | rows = await cur.fetchall() 39 | print(rows) 40 | except Exception: 41 | pass 42 | finally: 43 | await cur.close() 44 | await conn.close() 45 | 46 | 47 | async def insert_with_values(): 48 | """ 49 | When providing data to your SQL statement make sure to parametrize it with 50 | question marks placeholders. Do not use string formatting or make sure 51 | your data is escaped to prevent sql injections. 52 | 53 | NOTE: pyodbc does not support named placeholders syntax. 54 | """ 55 | async with connect(loop=loop) as conn: 56 | async with conn.cursor() as cur: 57 | # Substitute sql markers with variables 58 | await cur.execute("INSERT INTO t1(n, v) VALUES(?, ?);", ("2", "test 2")) 59 | # NOTE: make sure to pass variables as tuple of strings even if 60 | # your data types are different to prevent 61 | # pyodbc.ProgrammingError errors. You can even do like this 62 | values = (3, "test 3") 63 | await cur.execute("INSERT INTO t1(n, v) VALUES(?, ?);", *map(str, values)) 64 | 65 | # Retrieve id of last inserted row 66 | await cur.execute("SELECT last_insert_rowid();") 67 | result = await cur.fetchone() 68 | print(result[0]) 69 | 70 | 71 | async def commit(): 72 | """ 73 | When not using `autocommit` parameter do not forget to explicitly call 74 | this method for your changes to persist within database. 75 | """ 76 | async with asyncodbc.connect(dsn=dsn, loop=loop) as conn: 77 | async with conn.cursor() as cur: 78 | sql = 'INSERT INTO t1 VALUES(1, "test");' 79 | await cur.execute(sql) 80 | # Make sure your changes will be actually saved into database 81 | await cur.commit() 82 | 83 | async with asyncodbc.connect(dsn=dsn, loop=loop) as conn: 84 | async with conn.cursor() as cur: 85 | sql_select = "SELECT * FROM t1;" 86 | await cur.execute(sql_select) 87 | # At this point without autocommiting you will not see 88 | # the data inserted above 89 | print(await cur.fetchone()) 90 | 91 | 92 | if __name__ == "__main__": 93 | loop = asyncio.get_event_loop() 94 | loop.run_until_complete(init_database()) 95 | loop.run_until_complete(commit()) 96 | loop.run_until_complete(insert_with_values()) 97 | loop.run_until_complete(error_without_context_managers()) 98 | -------------------------------------------------------------------------------- /examples/example_context_managers.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | import asyncodbc 4 | 5 | 6 | async def example(): 7 | dsn = "Driver=SQLite;Database=sqlite.db" 8 | 9 | async with asyncodbc.create_pool(dsn=dsn) as pool: 10 | async with pool.acquire() as conn: 11 | async with conn.cursor() as cur: 12 | await cur.execute("SELECT 42 AS age;") 13 | val = await cur.fetchone() 14 | print(val) 15 | print(val.age) 16 | 17 | 18 | if __name__ == "__main__": 19 | asyncio.run(example()) 20 | -------------------------------------------------------------------------------- /examples/example_pool.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | import asyncodbc 4 | 5 | 6 | async def pool(): 7 | dsn = "Driver=SQLite;Database=sqlite.db" 8 | pool = await asyncodbc.create_pool(dsn=dsn) 9 | 10 | async with pool.acquire() as conn: 11 | cur = await conn.cursor() 12 | await cur.execute("SELECT 42;") 13 | r = await cur.fetchall() 14 | print(r) 15 | await cur.close() 16 | await conn.close() 17 | pool.close() 18 | await pool.wait_closed() 19 | 20 | 21 | if __name__ == "__main__": 22 | asyncio.run(pool()) 23 | -------------------------------------------------------------------------------- /examples/example_simple.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | import asyncodbc 4 | 5 | 6 | async def example(): 7 | dsn = "Driver=SQLite;Database=sqlite.db" 8 | conn = await asyncodbc.connect( 9 | dsn=dsn, 10 | ) 11 | 12 | cur = await conn.cursor() 13 | await cur.execute("SELECT 42 AS age;") 14 | rows = await cur.fetchall() 15 | print(rows) 16 | print(rows[0]) 17 | print(rows[0].age) 18 | await cur.close() 19 | await conn.close() 20 | 21 | 22 | if __name__ == "__main__": 23 | asyncio.run(example()) 24 | -------------------------------------------------------------------------------- /poetry.lock: -------------------------------------------------------------------------------- 1 | [[package]] 2 | name = "astroid" 3 | version = "2.11.2" 4 | description = "An abstract syntax tree for Python with inference support." 5 | category = "dev" 6 | optional = false 7 | python-versions = ">=3.6.2" 8 | 9 | [package.dependencies] 10 | lazy-object-proxy = ">=1.4.0" 11 | typed-ast = {version = ">=1.4.0,<2.0", markers = "implementation_name == \"cpython\" and python_version < \"3.8\""} 12 | typing-extensions = {version = ">=3.10", markers = "python_version < \"3.10\""} 13 | wrapt = ">=1.11,<2" 14 | 15 | [[package]] 16 | name = "atomicwrites" 17 | version = "1.4.0" 18 | description = "Atomic file writes." 19 | category = "dev" 20 | optional = false 21 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 22 | 23 | [[package]] 24 | name = "attrs" 25 | version = "21.4.0" 26 | description = "Classes Without Boilerplate" 27 | category = "dev" 28 | optional = false 29 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" 30 | 31 | [package.extras] 32 | dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit", "cloudpickle"] 33 | docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"] 34 | tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "cloudpickle"] 35 | tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "cloudpickle"] 36 | 37 | [[package]] 38 | name = "bandit" 39 | version = "1.7.4" 40 | description = "Security oriented static analyser for python code." 41 | category = "dev" 42 | optional = false 43 | python-versions = ">=3.7" 44 | 45 | [package.dependencies] 46 | colorama = {version = ">=0.3.9", markers = "platform_system == \"Windows\""} 47 | GitPython = ">=1.0.1" 48 | PyYAML = ">=5.3.1" 49 | stevedore = ">=1.20.0" 50 | 51 | [package.extras] 52 | test = ["coverage (>=4.5.4)", "fixtures (>=3.0.0)", "flake8 (>=4.0.0)", "stestr (>=2.5.0)", "testscenarios (>=0.5.0)", "testtools (>=2.3.0)", "toml", "beautifulsoup4 (>=4.8.0)", "pylint (==1.9.4)"] 53 | toml = ["toml"] 54 | yaml = ["pyyaml"] 55 | 56 | [[package]] 57 | name = "black" 58 | version = "22.3.0" 59 | description = "The uncompromising code formatter." 60 | category = "dev" 61 | optional = false 62 | python-versions = ">=3.6.2" 63 | 64 | [package.dependencies] 65 | click = ">=8.0.0" 66 | mypy-extensions = ">=0.4.3" 67 | pathspec = ">=0.9.0" 68 | platformdirs = ">=2" 69 | tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} 70 | typed-ast = {version = ">=1.4.2", markers = "python_version < \"3.8\" and implementation_name == \"cpython\""} 71 | typing-extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""} 72 | 73 | [package.extras] 74 | colorama = ["colorama (>=0.4.3)"] 75 | d = ["aiohttp (>=3.7.4)"] 76 | jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] 77 | uvloop = ["uvloop (>=0.15.2)"] 78 | 79 | [[package]] 80 | name = "bleach" 81 | version = "5.0.0" 82 | description = "An easy safelist-based HTML-sanitizing tool." 83 | category = "dev" 84 | optional = false 85 | python-versions = ">=3.7" 86 | 87 | [package.dependencies] 88 | six = ">=1.9.0" 89 | webencodings = "*" 90 | 91 | [package.extras] 92 | css = ["tinycss2 (>=1.1.0)"] 93 | dev = ["pip-tools (==6.5.1)", "pytest (==7.1.1)", "flake8 (==4.0.1)", "tox (==3.24.5)", "sphinx (==4.3.2)", "twine (==4.0.0)", "wheel (==0.37.1)", "hashin (==0.17.0)", "black (==22.3.0)", "mypy (==0.942)"] 94 | 95 | [[package]] 96 | name = "certifi" 97 | version = "2021.10.8" 98 | description = "Python package for providing Mozilla's CA Bundle." 99 | category = "dev" 100 | optional = false 101 | python-versions = "*" 102 | 103 | [[package]] 104 | name = "cffi" 105 | version = "1.15.0" 106 | description = "Foreign Function Interface for Python calling C code." 107 | category = "dev" 108 | optional = false 109 | python-versions = "*" 110 | 111 | [package.dependencies] 112 | pycparser = "*" 113 | 114 | [[package]] 115 | name = "charset-normalizer" 116 | version = "2.0.12" 117 | description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." 118 | category = "dev" 119 | optional = false 120 | python-versions = ">=3.5.0" 121 | 122 | [package.extras] 123 | unicode_backport = ["unicodedata2"] 124 | 125 | [[package]] 126 | name = "click" 127 | version = "8.1.2" 128 | description = "Composable command line interface toolkit" 129 | category = "dev" 130 | optional = false 131 | python-versions = ">=3.7" 132 | 133 | [package.dependencies] 134 | colorama = {version = "*", markers = "platform_system == \"Windows\""} 135 | importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} 136 | 137 | [[package]] 138 | name = "colorama" 139 | version = "0.4.4" 140 | description = "Cross-platform colored terminal text." 141 | category = "dev" 142 | optional = false 143 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" 144 | 145 | [[package]] 146 | name = "commonmark" 147 | version = "0.9.1" 148 | description = "Python parser for the CommonMark Markdown spec" 149 | category = "dev" 150 | optional = false 151 | python-versions = "*" 152 | 153 | [package.extras] 154 | test = ["flake8 (==3.7.8)", "hypothesis (==3.55.3)"] 155 | 156 | [[package]] 157 | name = "coverage" 158 | version = "6.3.2" 159 | description = "Code coverage measurement for Python" 160 | category = "dev" 161 | optional = false 162 | python-versions = ">=3.7" 163 | 164 | [package.dependencies] 165 | tomli = {version = "*", optional = true, markers = "extra == \"toml\""} 166 | 167 | [package.extras] 168 | toml = ["tomli"] 169 | 170 | [[package]] 171 | name = "coveralls" 172 | version = "3.3.1" 173 | description = "Show coverage stats online via coveralls.io" 174 | category = "dev" 175 | optional = false 176 | python-versions = ">= 3.5" 177 | 178 | [package.dependencies] 179 | coverage = ">=4.1,<6.0.0 || >6.1,<6.1.1 || >6.1.1,<7.0" 180 | docopt = ">=0.6.1" 181 | requests = ">=1.0.0" 182 | 183 | [package.extras] 184 | yaml = ["PyYAML (>=3.10)"] 185 | 186 | [[package]] 187 | name = "cryptography" 188 | version = "36.0.2" 189 | description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." 190 | category = "dev" 191 | optional = false 192 | python-versions = ">=3.6" 193 | 194 | [package.dependencies] 195 | cffi = ">=1.12" 196 | 197 | [package.extras] 198 | docs = ["sphinx (>=1.6.5,!=1.8.0,!=3.1.0,!=3.1.1)", "sphinx-rtd-theme"] 199 | docstest = ["pyenchant (>=1.6.11)", "twine (>=1.12.0)", "sphinxcontrib-spelling (>=4.0.1)"] 200 | pep8test = ["black", "flake8", "flake8-import-order", "pep8-naming"] 201 | sdist = ["setuptools_rust (>=0.11.4)"] 202 | ssh = ["bcrypt (>=3.1.5)"] 203 | test = ["pytest (>=6.2.0)", "pytest-cov", "pytest-subtests", "pytest-xdist", "pretend", "iso8601", "pytz", "hypothesis (>=1.11.4,!=3.79.2)"] 204 | 205 | [[package]] 206 | name = "darglint" 207 | version = "1.8.1" 208 | description = "A utility for ensuring Google-style docstrings stay up to date with the source code." 209 | category = "dev" 210 | optional = false 211 | python-versions = ">=3.6,<4.0" 212 | 213 | [[package]] 214 | name = "dill" 215 | version = "0.3.4" 216 | description = "serialize all of python" 217 | category = "dev" 218 | optional = false 219 | python-versions = ">=2.7, !=3.0.*" 220 | 221 | [package.extras] 222 | graph = ["objgraph (>=1.7.2)"] 223 | 224 | [[package]] 225 | name = "docopt" 226 | version = "0.6.2" 227 | description = "Pythonic argument parser, that will make you smile" 228 | category = "dev" 229 | optional = false 230 | python-versions = "*" 231 | 232 | [[package]] 233 | name = "docutils" 234 | version = "0.18.1" 235 | description = "Docutils -- Python Documentation Utilities" 236 | category = "dev" 237 | optional = false 238 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" 239 | 240 | [[package]] 241 | name = "execnet" 242 | version = "1.9.0" 243 | description = "execnet: rapid multi-Python deployment" 244 | category = "dev" 245 | optional = false 246 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" 247 | 248 | [package.extras] 249 | testing = ["pre-commit"] 250 | 251 | [[package]] 252 | name = "flake8" 253 | version = "4.0.1" 254 | description = "the modular source code checker: pep8 pyflakes and co" 255 | category = "dev" 256 | optional = false 257 | python-versions = ">=3.6" 258 | 259 | [package.dependencies] 260 | importlib-metadata = {version = "<4.3", markers = "python_version < \"3.8\""} 261 | mccabe = ">=0.6.0,<0.7.0" 262 | pycodestyle = ">=2.8.0,<2.9.0" 263 | pyflakes = ">=2.4.0,<2.5.0" 264 | 265 | [[package]] 266 | name = "flake8-comprehensions" 267 | version = "3.8.0" 268 | description = "A flake8 plugin to help you write better list/set/dict comprehensions." 269 | category = "dev" 270 | optional = false 271 | python-versions = ">=3.7" 272 | 273 | [package.dependencies] 274 | flake8 = ">=3.0,<3.2.0 || >3.2.0" 275 | importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} 276 | 277 | [[package]] 278 | name = "gitdb" 279 | version = "4.0.9" 280 | description = "Git Object Database" 281 | category = "dev" 282 | optional = false 283 | python-versions = ">=3.6" 284 | 285 | [package.dependencies] 286 | smmap = ">=3.0.1,<6" 287 | 288 | [[package]] 289 | name = "gitpython" 290 | version = "3.1.27" 291 | description = "GitPython is a python library used to interact with Git repositories" 292 | category = "dev" 293 | optional = false 294 | python-versions = ">=3.7" 295 | 296 | [package.dependencies] 297 | gitdb = ">=4.0.1,<5" 298 | typing-extensions = {version = ">=3.7.4.3", markers = "python_version < \"3.8\""} 299 | 300 | [[package]] 301 | name = "idna" 302 | version = "3.3" 303 | description = "Internationalized Domain Names in Applications (IDNA)" 304 | category = "dev" 305 | optional = false 306 | python-versions = ">=3.5" 307 | 308 | [[package]] 309 | name = "importlib-metadata" 310 | version = "4.2.0" 311 | description = "Read metadata from Python packages" 312 | category = "dev" 313 | optional = false 314 | python-versions = ">=3.6" 315 | 316 | [package.dependencies] 317 | typing-extensions = {version = ">=3.6.4", markers = "python_version < \"3.8\""} 318 | zipp = ">=0.5" 319 | 320 | [package.extras] 321 | docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] 322 | testing = ["pytest (>=4.6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "packaging", "pep517", "pyfakefs", "flufl.flake8", "pytest-black (>=0.3.7)", "pytest-mypy", "importlib-resources (>=1.3)"] 323 | 324 | [[package]] 325 | name = "iniconfig" 326 | version = "1.1.1" 327 | description = "iniconfig: brain-dead simple config-ini parsing" 328 | category = "dev" 329 | optional = false 330 | python-versions = "*" 331 | 332 | [[package]] 333 | name = "isort" 334 | version = "5.10.1" 335 | description = "A Python utility / library to sort Python imports." 336 | category = "dev" 337 | optional = false 338 | python-versions = ">=3.6.1,<4.0" 339 | 340 | [package.extras] 341 | pipfile_deprecated_finder = ["pipreqs", "requirementslib"] 342 | requirements_deprecated_finder = ["pipreqs", "pip-api"] 343 | colors = ["colorama (>=0.4.3,<0.5.0)"] 344 | plugins = ["setuptools"] 345 | 346 | [[package]] 347 | name = "jeepney" 348 | version = "0.8.0" 349 | description = "Low-level, pure Python DBus protocol wrapper." 350 | category = "dev" 351 | optional = false 352 | python-versions = ">=3.7" 353 | 354 | [package.extras] 355 | test = ["pytest", "pytest-trio", "pytest-asyncio (>=0.17)", "testpath", "trio", "async-timeout"] 356 | trio = ["trio", "async-generator"] 357 | 358 | [[package]] 359 | name = "keyring" 360 | version = "23.5.0" 361 | description = "Store and access your passwords safely." 362 | category = "dev" 363 | optional = false 364 | python-versions = ">=3.7" 365 | 366 | [package.dependencies] 367 | importlib-metadata = ">=3.6" 368 | jeepney = {version = ">=0.4.2", markers = "sys_platform == \"linux\""} 369 | pywin32-ctypes = {version = "<0.1.0 || >0.1.0,<0.1.1 || >0.1.1", markers = "sys_platform == \"win32\""} 370 | SecretStorage = {version = ">=3.2", markers = "sys_platform == \"linux\""} 371 | 372 | [package.extras] 373 | docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)", "jaraco.tidelift (>=1.4)"] 374 | testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-black (>=0.3.7)", "pytest-mypy"] 375 | 376 | [[package]] 377 | name = "lazy-object-proxy" 378 | version = "1.7.1" 379 | description = "A fast and thorough lazy object proxy." 380 | category = "dev" 381 | optional = false 382 | python-versions = ">=3.6" 383 | 384 | [[package]] 385 | name = "mccabe" 386 | version = "0.6.1" 387 | description = "McCabe checker, plugin for flake8" 388 | category = "dev" 389 | optional = false 390 | python-versions = "*" 391 | 392 | [[package]] 393 | name = "mypy" 394 | version = "0.942" 395 | description = "Optional static typing for Python" 396 | category = "dev" 397 | optional = false 398 | python-versions = ">=3.6" 399 | 400 | [package.dependencies] 401 | mypy-extensions = ">=0.4.3" 402 | tomli = ">=1.1.0" 403 | typed-ast = {version = ">=1.4.0,<2", markers = "python_version < \"3.8\""} 404 | typing-extensions = ">=3.10" 405 | 406 | [package.extras] 407 | dmypy = ["psutil (>=4.0)"] 408 | python2 = ["typed-ast (>=1.4.0,<2)"] 409 | reports = ["lxml"] 410 | 411 | [[package]] 412 | name = "mypy-extensions" 413 | version = "0.4.3" 414 | description = "Experimental type system extensions for programs checked with the mypy typechecker." 415 | category = "dev" 416 | optional = false 417 | python-versions = "*" 418 | 419 | [[package]] 420 | name = "packaging" 421 | version = "21.3" 422 | description = "Core utilities for Python packages" 423 | category = "dev" 424 | optional = false 425 | python-versions = ">=3.6" 426 | 427 | [package.dependencies] 428 | pyparsing = ">=2.0.2,<3.0.5 || >3.0.5" 429 | 430 | [[package]] 431 | name = "pathspec" 432 | version = "0.9.0" 433 | description = "Utility library for gitignore style pattern matching of file paths." 434 | category = "dev" 435 | optional = false 436 | python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" 437 | 438 | [[package]] 439 | name = "pbr" 440 | version = "5.8.1" 441 | description = "Python Build Reasonableness" 442 | category = "dev" 443 | optional = false 444 | python-versions = ">=2.6" 445 | 446 | [[package]] 447 | name = "pkginfo" 448 | version = "1.8.2" 449 | description = "Query metadatdata from sdists / bdists / installed packages." 450 | category = "dev" 451 | optional = false 452 | python-versions = "*" 453 | 454 | [package.extras] 455 | testing = ["coverage", "nose"] 456 | 457 | [[package]] 458 | name = "platformdirs" 459 | version = "2.5.2" 460 | description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." 461 | category = "dev" 462 | optional = false 463 | python-versions = ">=3.7" 464 | 465 | [package.extras] 466 | docs = ["furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx-autodoc-typehints (>=1.12)", "sphinx (>=4)"] 467 | test = ["appdirs (==1.4.4)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)", "pytest (>=6)"] 468 | 469 | [[package]] 470 | name = "pluggy" 471 | version = "1.0.0" 472 | description = "plugin and hook calling mechanisms for python" 473 | category = "dev" 474 | optional = false 475 | python-versions = ">=3.6" 476 | 477 | [package.dependencies] 478 | importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} 479 | 480 | [package.extras] 481 | dev = ["pre-commit", "tox"] 482 | testing = ["pytest", "pytest-benchmark"] 483 | 484 | [[package]] 485 | name = "py" 486 | version = "1.11.0" 487 | description = "library with cross-python path, ini-parsing, io, code, log facilities" 488 | category = "dev" 489 | optional = false 490 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" 491 | 492 | [[package]] 493 | name = "pycodestyle" 494 | version = "2.8.0" 495 | description = "Python style guide checker" 496 | category = "dev" 497 | optional = false 498 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" 499 | 500 | [[package]] 501 | name = "pycparser" 502 | version = "2.21" 503 | description = "C parser in Python" 504 | category = "dev" 505 | optional = false 506 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 507 | 508 | [[package]] 509 | name = "pyflakes" 510 | version = "2.4.0" 511 | description = "passive checker of Python programs" 512 | category = "dev" 513 | optional = false 514 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 515 | 516 | [[package]] 517 | name = "pygments" 518 | version = "2.11.2" 519 | description = "Pygments is a syntax highlighting package written in Python." 520 | category = "dev" 521 | optional = false 522 | python-versions = ">=3.5" 523 | 524 | [[package]] 525 | name = "pylint" 526 | version = "2.13.5" 527 | description = "python code static checker" 528 | category = "dev" 529 | optional = false 530 | python-versions = ">=3.6.2" 531 | 532 | [package.dependencies] 533 | astroid = ">=2.11.2,<=2.12.0-dev0" 534 | colorama = {version = "*", markers = "sys_platform == \"win32\""} 535 | dill = ">=0.2" 536 | isort = ">=4.2.5,<6" 537 | mccabe = ">=0.6,<0.8" 538 | platformdirs = ">=2.2.0" 539 | tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} 540 | typing-extensions = {version = ">=3.10.0", markers = "python_version < \"3.10\""} 541 | 542 | [package.extras] 543 | testutil = ["gitpython (>3)"] 544 | 545 | [[package]] 546 | name = "pyodbc" 547 | version = "4.0.32" 548 | description = "DB API Module for ODBC" 549 | category = "main" 550 | optional = false 551 | python-versions = "*" 552 | 553 | [[package]] 554 | name = "pyparsing" 555 | version = "3.0.8" 556 | description = "pyparsing module - Classes and methods to define and execute parsing grammars" 557 | category = "dev" 558 | optional = false 559 | python-versions = ">=3.6.8" 560 | 561 | [package.extras] 562 | diagrams = ["railroad-diagrams", "jinja2"] 563 | 564 | [[package]] 565 | name = "pyproject-flake8" 566 | version = "0.0.1a4" 567 | description = "pyproject-flake8 (`pflake8`), a monkey patching wrapper to connect flake8 with pyproject.toml configuration" 568 | category = "dev" 569 | optional = false 570 | python-versions = "*" 571 | 572 | [package.dependencies] 573 | flake8 = "*" 574 | tomli = {version = "*", markers = "python_version < \"3.11\""} 575 | 576 | [[package]] 577 | name = "pytest" 578 | version = "7.1.1" 579 | description = "pytest: simple powerful testing with Python" 580 | category = "dev" 581 | optional = false 582 | python-versions = ">=3.7" 583 | 584 | [package.dependencies] 585 | atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} 586 | attrs = ">=19.2.0" 587 | colorama = {version = "*", markers = "sys_platform == \"win32\""} 588 | importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} 589 | iniconfig = "*" 590 | packaging = "*" 591 | pluggy = ">=0.12,<2.0" 592 | py = ">=1.8.2" 593 | tomli = ">=1.0.0" 594 | 595 | [package.extras] 596 | testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"] 597 | 598 | [[package]] 599 | name = "pytest-asyncio" 600 | version = "0.18.3" 601 | description = "Pytest support for asyncio" 602 | category = "dev" 603 | optional = false 604 | python-versions = ">=3.7" 605 | 606 | [package.dependencies] 607 | pytest = ">=6.1.0" 608 | typing-extensions = {version = ">=3.7.2", markers = "python_version < \"3.8\""} 609 | 610 | [package.extras] 611 | testing = ["coverage (==6.2)", "hypothesis (>=5.7.1)", "flaky (>=3.5.0)", "mypy (==0.931)", "pytest-trio (>=0.7.0)"] 612 | 613 | [[package]] 614 | name = "pytest-cov" 615 | version = "3.0.0" 616 | description = "Pytest plugin for measuring coverage." 617 | category = "dev" 618 | optional = false 619 | python-versions = ">=3.6" 620 | 621 | [package.dependencies] 622 | coverage = {version = ">=5.2.1", extras = ["toml"]} 623 | pytest = ">=4.6" 624 | 625 | [package.extras] 626 | testing = ["fields", "hunter", "process-tests", "six", "pytest-xdist", "virtualenv"] 627 | 628 | [[package]] 629 | name = "pytest-forked" 630 | version = "1.4.0" 631 | description = "run tests in isolated forked subprocesses" 632 | category = "dev" 633 | optional = false 634 | python-versions = ">=3.6" 635 | 636 | [package.dependencies] 637 | py = "*" 638 | pytest = ">=3.10" 639 | 640 | [[package]] 641 | name = "pytest-xdist" 642 | version = "2.5.0" 643 | description = "pytest xdist plugin for distributed testing and loop-on-failing modes" 644 | category = "dev" 645 | optional = false 646 | python-versions = ">=3.6" 647 | 648 | [package.dependencies] 649 | execnet = ">=1.1" 650 | pytest = ">=6.2.0" 651 | pytest-forked = "*" 652 | 653 | [package.extras] 654 | psutil = ["psutil (>=3.0)"] 655 | setproctitle = ["setproctitle"] 656 | testing = ["filelock"] 657 | 658 | [[package]] 659 | name = "pywin32-ctypes" 660 | version = "0.2.0" 661 | description = "" 662 | category = "dev" 663 | optional = false 664 | python-versions = "*" 665 | 666 | [[package]] 667 | name = "pyyaml" 668 | version = "6.0" 669 | description = "YAML parser and emitter for Python" 670 | category = "dev" 671 | optional = false 672 | python-versions = ">=3.6" 673 | 674 | [[package]] 675 | name = "readme-renderer" 676 | version = "34.0" 677 | description = "readme_renderer is a library for rendering \"readme\" descriptions for Warehouse" 678 | category = "dev" 679 | optional = false 680 | python-versions = ">=3.6" 681 | 682 | [package.dependencies] 683 | bleach = ">=2.1.0" 684 | docutils = ">=0.13.1" 685 | Pygments = ">=2.5.1" 686 | 687 | [package.extras] 688 | md = ["cmarkgfm (>=0.8.0)"] 689 | 690 | [[package]] 691 | name = "requests" 692 | version = "2.27.1" 693 | description = "Python HTTP for Humans." 694 | category = "dev" 695 | optional = false 696 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" 697 | 698 | [package.dependencies] 699 | certifi = ">=2017.4.17" 700 | charset-normalizer = {version = ">=2.0.0,<2.1.0", markers = "python_version >= \"3\""} 701 | idna = {version = ">=2.5,<4", markers = "python_version >= \"3\""} 702 | urllib3 = ">=1.21.1,<1.27" 703 | 704 | [package.extras] 705 | socks = ["PySocks (>=1.5.6,!=1.5.7)", "win-inet-pton"] 706 | use_chardet_on_py3 = ["chardet (>=3.0.2,<5)"] 707 | 708 | [[package]] 709 | name = "requests-toolbelt" 710 | version = "0.9.1" 711 | description = "A utility belt for advanced users of python-requests" 712 | category = "dev" 713 | optional = false 714 | python-versions = "*" 715 | 716 | [package.dependencies] 717 | requests = ">=2.0.1,<3.0.0" 718 | 719 | [[package]] 720 | name = "rfc3986" 721 | version = "2.0.0" 722 | description = "Validating URI References per RFC 3986" 723 | category = "dev" 724 | optional = false 725 | python-versions = ">=3.7" 726 | 727 | [package.extras] 728 | idna2008 = ["idna"] 729 | 730 | [[package]] 731 | name = "rich" 732 | version = "12.2.0" 733 | description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" 734 | category = "dev" 735 | optional = false 736 | python-versions = ">=3.6.3,<4.0.0" 737 | 738 | [package.dependencies] 739 | commonmark = ">=0.9.0,<0.10.0" 740 | pygments = ">=2.6.0,<3.0.0" 741 | typing-extensions = {version = ">=4.0.0,<5.0", markers = "python_version < \"3.9\""} 742 | 743 | [package.extras] 744 | jupyter = ["ipywidgets (>=7.5.1,<8.0.0)"] 745 | 746 | [[package]] 747 | name = "secretstorage" 748 | version = "3.3.2" 749 | description = "Python bindings to FreeDesktop.org Secret Service API" 750 | category = "dev" 751 | optional = false 752 | python-versions = ">=3.6" 753 | 754 | [package.dependencies] 755 | cryptography = ">=2.0" 756 | jeepney = ">=0.6" 757 | 758 | [[package]] 759 | name = "six" 760 | version = "1.16.0" 761 | description = "Python 2 and 3 compatibility utilities" 762 | category = "dev" 763 | optional = false 764 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" 765 | 766 | [[package]] 767 | name = "smmap" 768 | version = "5.0.0" 769 | description = "A pure Python implementation of a sliding window memory map manager" 770 | category = "dev" 771 | optional = false 772 | python-versions = ">=3.6" 773 | 774 | [[package]] 775 | name = "stevedore" 776 | version = "3.5.0" 777 | description = "Manage dynamic plugins for Python applications" 778 | category = "dev" 779 | optional = false 780 | python-versions = ">=3.6" 781 | 782 | [package.dependencies] 783 | importlib-metadata = {version = ">=1.7.0", markers = "python_version < \"3.8\""} 784 | pbr = ">=2.0.0,<2.1.0 || >2.1.0" 785 | 786 | [[package]] 787 | name = "tomli" 788 | version = "2.0.1" 789 | description = "A lil' TOML parser" 790 | category = "dev" 791 | optional = false 792 | python-versions = ">=3.7" 793 | 794 | [[package]] 795 | name = "twine" 796 | version = "4.0.0" 797 | description = "Collection of utilities for publishing packages on PyPI" 798 | category = "dev" 799 | optional = false 800 | python-versions = ">=3.7" 801 | 802 | [package.dependencies] 803 | importlib-metadata = ">=3.6" 804 | keyring = ">=15.1" 805 | pkginfo = ">=1.8.1" 806 | readme-renderer = ">=21.0" 807 | requests = ">=2.20" 808 | requests-toolbelt = ">=0.8.0,<0.9.0 || >0.9.0" 809 | rfc3986 = ">=1.4.0" 810 | rich = ">=12.0.0" 811 | urllib3 = ">=1.26.0" 812 | 813 | [[package]] 814 | name = "typed-ast" 815 | version = "1.5.3" 816 | description = "a fork of Python 2 and 3 ast modules with type comment support" 817 | category = "dev" 818 | optional = false 819 | python-versions = ">=3.6" 820 | 821 | [[package]] 822 | name = "typing-extensions" 823 | version = "4.2.0" 824 | description = "Backported and Experimental Type Hints for Python 3.7+" 825 | category = "dev" 826 | optional = false 827 | python-versions = ">=3.7" 828 | 829 | [[package]] 830 | name = "urllib3" 831 | version = "1.26.9" 832 | description = "HTTP library with thread-safe connection pooling, file post, and more." 833 | category = "dev" 834 | optional = false 835 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" 836 | 837 | [package.extras] 838 | brotli = ["brotlicffi (>=0.8.0)", "brotli (>=1.0.9)", "brotlipy (>=0.6.0)"] 839 | secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"] 840 | socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] 841 | 842 | [[package]] 843 | name = "webencodings" 844 | version = "0.5.1" 845 | description = "Character encoding aliases for legacy web content" 846 | category = "dev" 847 | optional = false 848 | python-versions = "*" 849 | 850 | [[package]] 851 | name = "wrapt" 852 | version = "1.14.0" 853 | description = "Module for decorators, wrappers and monkey patching." 854 | category = "dev" 855 | optional = false 856 | python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" 857 | 858 | [[package]] 859 | name = "zipp" 860 | version = "3.8.0" 861 | description = "Backport of pathlib-compatible object wrapper for zip files" 862 | category = "dev" 863 | optional = false 864 | python-versions = ">=3.7" 865 | 866 | [package.extras] 867 | docs = ["sphinx", "jaraco.packaging (>=9)", "rst.linker (>=1.9)"] 868 | testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)"] 869 | 870 | [metadata] 871 | lock-version = "1.1" 872 | python-versions = "^3.7" 873 | content-hash = "5138f29ba5620622bf7cb706bbe949264e6c218e847193c8bcb562bb804d7efb" 874 | 875 | [metadata.files] 876 | astroid = [ 877 | {file = "astroid-2.11.2-py3-none-any.whl", hash = "sha256:cc8cc0d2d916c42d0a7c476c57550a4557a083081976bf42a73414322a6411d9"}, 878 | {file = "astroid-2.11.2.tar.gz", hash = "sha256:8d0a30fe6481ce919f56690076eafbb2fb649142a89dc874f1ec0e7a011492d0"}, 879 | ] 880 | atomicwrites = [ 881 | {file = "atomicwrites-1.4.0-py2.py3-none-any.whl", hash = "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197"}, 882 | {file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"}, 883 | ] 884 | attrs = [ 885 | {file = "attrs-21.4.0-py2.py3-none-any.whl", hash = "sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4"}, 886 | {file = "attrs-21.4.0.tar.gz", hash = "sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd"}, 887 | ] 888 | bandit = [ 889 | {file = "bandit-1.7.4-py3-none-any.whl", hash = "sha256:412d3f259dab4077d0e7f0c11f50f650cc7d10db905d98f6520a95a18049658a"}, 890 | {file = "bandit-1.7.4.tar.gz", hash = "sha256:2d63a8c573417bae338962d4b9b06fbc6080f74ecd955a092849e1e65c717bd2"}, 891 | ] 892 | black = [ 893 | {file = "black-22.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:2497f9c2386572e28921fa8bec7be3e51de6801f7459dffd6e62492531c47e09"}, 894 | {file = "black-22.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5795a0375eb87bfe902e80e0c8cfaedf8af4d49694d69161e5bd3206c18618bb"}, 895 | {file = "black-22.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e3556168e2e5c49629f7b0f377070240bd5511e45e25a4497bb0073d9dda776a"}, 896 | {file = "black-22.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67c8301ec94e3bcc8906740fe071391bce40a862b7be0b86fb5382beefecd968"}, 897 | {file = "black-22.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:fd57160949179ec517d32ac2ac898b5f20d68ed1a9c977346efbac9c2f1e779d"}, 898 | {file = "black-22.3.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:cc1e1de68c8e5444e8f94c3670bb48a2beef0e91dddfd4fcc29595ebd90bb9ce"}, 899 | {file = "black-22.3.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d2fc92002d44746d3e7db7cf9313cf4452f43e9ea77a2c939defce3b10b5c82"}, 900 | {file = "black-22.3.0-cp36-cp36m-win_amd64.whl", hash = "sha256:a6342964b43a99dbc72f72812bf88cad8f0217ae9acb47c0d4f141a6416d2d7b"}, 901 | {file = "black-22.3.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:328efc0cc70ccb23429d6be184a15ce613f676bdfc85e5fe8ea2a9354b4e9015"}, 902 | {file = "black-22.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06f9d8846f2340dfac80ceb20200ea5d1b3f181dd0556b47af4e8e0b24fa0a6b"}, 903 | {file = "black-22.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:ad4efa5fad66b903b4a5f96d91461d90b9507a812b3c5de657d544215bb7877a"}, 904 | {file = "black-22.3.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e8477ec6bbfe0312c128e74644ac8a02ca06bcdb8982d4ee06f209be28cdf163"}, 905 | {file = "black-22.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:637a4014c63fbf42a692d22b55d8ad6968a946b4a6ebc385c5505d9625b6a464"}, 906 | {file = "black-22.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:863714200ada56cbc366dc9ae5291ceb936573155f8bf8e9de92aef51f3ad0f0"}, 907 | {file = "black-22.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10dbe6e6d2988049b4655b2b739f98785a884d4d6b85bc35133a8fb9a2233176"}, 908 | {file = "black-22.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:cee3e11161dde1b2a33a904b850b0899e0424cc331b7295f2a9698e79f9a69a0"}, 909 | {file = "black-22.3.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5891ef8abc06576985de8fa88e95ab70641de6c1fca97e2a15820a9b69e51b20"}, 910 | {file = "black-22.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:30d78ba6bf080eeaf0b7b875d924b15cd46fec5fd044ddfbad38c8ea9171043a"}, 911 | {file = "black-22.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ee8f1f7228cce7dffc2b464f07ce769f478968bfb3dd1254a4c2eeed84928aad"}, 912 | {file = "black-22.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ee227b696ca60dd1c507be80a6bc849a5a6ab57ac7352aad1ffec9e8b805f21"}, 913 | {file = "black-22.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:9b542ced1ec0ceeff5b37d69838106a6348e60db7b8fdd245294dc1d26136265"}, 914 | {file = "black-22.3.0-py3-none-any.whl", hash = "sha256:bc58025940a896d7e5356952228b68f793cf5fcb342be703c3a2669a1488cb72"}, 915 | {file = "black-22.3.0.tar.gz", hash = "sha256:35020b8886c022ced9282b51b5a875b6d1ab0c387b31a065b84db7c33085ca79"}, 916 | ] 917 | bleach = [ 918 | {file = "bleach-5.0.0-py3-none-any.whl", hash = "sha256:08a1fe86d253b5c88c92cc3d810fd8048a16d15762e1e5b74d502256e5926aa1"}, 919 | {file = "bleach-5.0.0.tar.gz", hash = "sha256:c6d6cc054bdc9c83b48b8083e236e5f00f238428666d2ce2e083eaa5fd568565"}, 920 | ] 921 | certifi = [ 922 | {file = "certifi-2021.10.8-py2.py3-none-any.whl", hash = "sha256:d62a0163eb4c2344ac042ab2bdf75399a71a2d8c7d47eac2e2ee91b9d6339569"}, 923 | {file = "certifi-2021.10.8.tar.gz", hash = "sha256:78884e7c1d4b00ce3cea67b44566851c4343c120abd683433ce934a68ea58872"}, 924 | ] 925 | cffi = [ 926 | {file = "cffi-1.15.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:c2502a1a03b6312837279c8c1bd3ebedf6c12c4228ddbad40912d671ccc8a962"}, 927 | {file = "cffi-1.15.0-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:23cfe892bd5dd8941608f93348c0737e369e51c100d03718f108bf1add7bd6d0"}, 928 | {file = "cffi-1.15.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:41d45de54cd277a7878919867c0f08b0cf817605e4eb94093e7516505d3c8d14"}, 929 | {file = "cffi-1.15.0-cp27-cp27m-win32.whl", hash = "sha256:4a306fa632e8f0928956a41fa8e1d6243c71e7eb59ffbd165fc0b41e316b2474"}, 930 | {file = "cffi-1.15.0-cp27-cp27m-win_amd64.whl", hash = "sha256:e7022a66d9b55e93e1a845d8c9eba2a1bebd4966cd8bfc25d9cd07d515b33fa6"}, 931 | {file = "cffi-1.15.0-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:14cd121ea63ecdae71efa69c15c5543a4b5fbcd0bbe2aad864baca0063cecf27"}, 932 | {file = "cffi-1.15.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:d4d692a89c5cf08a8557fdeb329b82e7bf609aadfaed6c0d79f5a449a3c7c023"}, 933 | {file = "cffi-1.15.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0104fb5ae2391d46a4cb082abdd5c69ea4eab79d8d44eaaf79f1b1fd806ee4c2"}, 934 | {file = "cffi-1.15.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:91ec59c33514b7c7559a6acda53bbfe1b283949c34fe7440bcf917f96ac0723e"}, 935 | {file = "cffi-1.15.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:f5c7150ad32ba43a07c4479f40241756145a1f03b43480e058cfd862bf5041c7"}, 936 | {file = "cffi-1.15.0-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:00c878c90cb53ccfaae6b8bc18ad05d2036553e6d9d1d9dbcf323bbe83854ca3"}, 937 | {file = "cffi-1.15.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:abb9a20a72ac4e0fdb50dae135ba5e77880518e742077ced47eb1499e29a443c"}, 938 | {file = "cffi-1.15.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a5263e363c27b653a90078143adb3d076c1a748ec9ecc78ea2fb916f9b861962"}, 939 | {file = "cffi-1.15.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f54a64f8b0c8ff0b64d18aa76675262e1700f3995182267998c31ae974fbc382"}, 940 | {file = "cffi-1.15.0-cp310-cp310-win32.whl", hash = "sha256:c21c9e3896c23007803a875460fb786118f0cdd4434359577ea25eb556e34c55"}, 941 | {file = "cffi-1.15.0-cp310-cp310-win_amd64.whl", hash = "sha256:5e069f72d497312b24fcc02073d70cb989045d1c91cbd53979366077959933e0"}, 942 | {file = "cffi-1.15.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:64d4ec9f448dfe041705426000cc13e34e6e5bb13736e9fd62e34a0b0c41566e"}, 943 | {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2756c88cbb94231c7a147402476be2c4df2f6078099a6f4a480d239a8817ae39"}, 944 | {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b96a311ac60a3f6be21d2572e46ce67f09abcf4d09344c49274eb9e0bf345fc"}, 945 | {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75e4024375654472cc27e91cbe9eaa08567f7fbdf822638be2814ce059f58032"}, 946 | {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:59888172256cac5629e60e72e86598027aca6bf01fa2465bdb676d37636573e8"}, 947 | {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:27c219baf94952ae9d50ec19651a687b826792055353d07648a5695413e0c605"}, 948 | {file = "cffi-1.15.0-cp36-cp36m-win32.whl", hash = "sha256:4958391dbd6249d7ad855b9ca88fae690783a6be9e86df65865058ed81fc860e"}, 949 | {file = "cffi-1.15.0-cp36-cp36m-win_amd64.whl", hash = "sha256:f6f824dc3bce0edab5f427efcfb1d63ee75b6fcb7282900ccaf925be84efb0fc"}, 950 | {file = "cffi-1.15.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:06c48159c1abed75c2e721b1715c379fa3200c7784271b3c46df01383b593636"}, 951 | {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:c2051981a968d7de9dd2d7b87bcb9c939c74a34626a6e2f8181455dd49ed69e4"}, 952 | {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:fd8a250edc26254fe5b33be00402e6d287f562b6a5b2152dec302fa15bb3e997"}, 953 | {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:91d77d2a782be4274da750752bb1650a97bfd8f291022b379bb8e01c66b4e96b"}, 954 | {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:45db3a33139e9c8f7c09234b5784a5e33d31fd6907800b316decad50af323ff2"}, 955 | {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:263cc3d821c4ab2213cbe8cd8b355a7f72a8324577dc865ef98487c1aeee2bc7"}, 956 | {file = "cffi-1.15.0-cp37-cp37m-win32.whl", hash = "sha256:17771976e82e9f94976180f76468546834d22a7cc404b17c22df2a2c81db0c66"}, 957 | {file = "cffi-1.15.0-cp37-cp37m-win_amd64.whl", hash = "sha256:3415c89f9204ee60cd09b235810be700e993e343a408693e80ce7f6a40108029"}, 958 | {file = "cffi-1.15.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4238e6dab5d6a8ba812de994bbb0a79bddbdf80994e4ce802b6f6f3142fcc880"}, 959 | {file = "cffi-1.15.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0808014eb713677ec1292301ea4c81ad277b6cdf2fdd90fd540af98c0b101d20"}, 960 | {file = "cffi-1.15.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:57e9ac9ccc3101fac9d6014fba037473e4358ef4e89f8e181f8951a2c0162024"}, 961 | {file = "cffi-1.15.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b6c2ea03845c9f501ed1313e78de148cd3f6cad741a75d43a29b43da27f2e1e"}, 962 | {file = "cffi-1.15.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:10dffb601ccfb65262a27233ac273d552ddc4d8ae1bf93b21c94b8511bffe728"}, 963 | {file = "cffi-1.15.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:786902fb9ba7433aae840e0ed609f45c7bcd4e225ebb9c753aa39725bb3e6ad6"}, 964 | {file = "cffi-1.15.0-cp38-cp38-win32.whl", hash = "sha256:da5db4e883f1ce37f55c667e5c0de439df76ac4cb55964655906306918e7363c"}, 965 | {file = "cffi-1.15.0-cp38-cp38-win_amd64.whl", hash = "sha256:181dee03b1170ff1969489acf1c26533710231c58f95534e3edac87fff06c443"}, 966 | {file = "cffi-1.15.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:45e8636704eacc432a206ac7345a5d3d2c62d95a507ec70d62f23cd91770482a"}, 967 | {file = "cffi-1.15.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:31fb708d9d7c3f49a60f04cf5b119aeefe5644daba1cd2a0fe389b674fd1de37"}, 968 | {file = "cffi-1.15.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6dc2737a3674b3e344847c8686cf29e500584ccad76204efea14f451d4cc669a"}, 969 | {file = "cffi-1.15.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:74fdfdbfdc48d3f47148976f49fab3251e550a8720bebc99bf1483f5bfb5db3e"}, 970 | {file = "cffi-1.15.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffaa5c925128e29efbde7301d8ecaf35c8c60ffbcd6a1ffd3a552177c8e5e796"}, 971 | {file = "cffi-1.15.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3f7d084648d77af029acb79a0ff49a0ad7e9d09057a9bf46596dac9514dc07df"}, 972 | {file = "cffi-1.15.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ef1f279350da2c586a69d32fc8733092fd32cc8ac95139a00377841f59a3f8d8"}, 973 | {file = "cffi-1.15.0-cp39-cp39-win32.whl", hash = "sha256:2a23af14f408d53d5e6cd4e3d9a24ff9e05906ad574822a10563efcef137979a"}, 974 | {file = "cffi-1.15.0-cp39-cp39-win_amd64.whl", hash = "sha256:3773c4d81e6e818df2efbc7dd77325ca0dcb688116050fb2b3011218eda36139"}, 975 | {file = "cffi-1.15.0.tar.gz", hash = "sha256:920f0d66a896c2d99f0adbb391f990a84091179542c205fa53ce5787aff87954"}, 976 | ] 977 | charset-normalizer = [ 978 | {file = "charset-normalizer-2.0.12.tar.gz", hash = "sha256:2857e29ff0d34db842cd7ca3230549d1a697f96ee6d3fb071cfa6c7393832597"}, 979 | {file = "charset_normalizer-2.0.12-py3-none-any.whl", hash = "sha256:6881edbebdb17b39b4eaaa821b438bf6eddffb4468cf344f09f89def34a8b1df"}, 980 | ] 981 | click = [ 982 | {file = "click-8.1.2-py3-none-any.whl", hash = "sha256:24e1a4a9ec5bf6299411369b208c1df2188d9eb8d916302fe6bf03faed227f1e"}, 983 | {file = "click-8.1.2.tar.gz", hash = "sha256:479707fe14d9ec9a0757618b7a100a0ae4c4e236fac5b7f80ca68028141a1a72"}, 984 | ] 985 | colorama = [ 986 | {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"}, 987 | {file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"}, 988 | ] 989 | commonmark = [ 990 | {file = "commonmark-0.9.1-py2.py3-none-any.whl", hash = "sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9"}, 991 | {file = "commonmark-0.9.1.tar.gz", hash = "sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60"}, 992 | ] 993 | coverage = [ 994 | {file = "coverage-6.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9b27d894748475fa858f9597c0ee1d4829f44683f3813633aaf94b19cb5453cf"}, 995 | {file = "coverage-6.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:37d1141ad6b2466a7b53a22e08fe76994c2d35a5b6b469590424a9953155afac"}, 996 | {file = "coverage-6.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f9987b0354b06d4df0f4d3e0ec1ae76d7ce7cbca9a2f98c25041eb79eec766f1"}, 997 | {file = "coverage-6.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:26e2deacd414fc2f97dd9f7676ee3eaecd299ca751412d89f40bc01557a6b1b4"}, 998 | {file = "coverage-6.3.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4dd8bafa458b5c7d061540f1ee9f18025a68e2d8471b3e858a9dad47c8d41903"}, 999 | {file = "coverage-6.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:46191097ebc381fbf89bdce207a6c107ac4ec0890d8d20f3360345ff5976155c"}, 1000 | {file = "coverage-6.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6f89d05e028d274ce4fa1a86887b071ae1755082ef94a6740238cd7a8178804f"}, 1001 | {file = "coverage-6.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:58303469e9a272b4abdb9e302a780072c0633cdcc0165db7eec0f9e32f901e05"}, 1002 | {file = "coverage-6.3.2-cp310-cp310-win32.whl", hash = "sha256:2fea046bfb455510e05be95e879f0e768d45c10c11509e20e06d8fcaa31d9e39"}, 1003 | {file = "coverage-6.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:a2a8b8bcc399edb4347a5ca8b9b87e7524c0967b335fbb08a83c8421489ddee1"}, 1004 | {file = "coverage-6.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:f1555ea6d6da108e1999b2463ea1003fe03f29213e459145e70edbaf3e004aaa"}, 1005 | {file = "coverage-6.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e5f4e1edcf57ce94e5475fe09e5afa3e3145081318e5fd1a43a6b4539a97e518"}, 1006 | {file = "coverage-6.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7a15dc0a14008f1da3d1ebd44bdda3e357dbabdf5a0b5034d38fcde0b5c234b7"}, 1007 | {file = "coverage-6.3.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21b7745788866028adeb1e0eca3bf1101109e2dc58456cb49d2d9b99a8c516e6"}, 1008 | {file = "coverage-6.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:8ce257cac556cb03be4a248d92ed36904a59a4a5ff55a994e92214cde15c5bad"}, 1009 | {file = "coverage-6.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b0be84e5a6209858a1d3e8d1806c46214e867ce1b0fd32e4ea03f4bd8b2e3359"}, 1010 | {file = "coverage-6.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:acf53bc2cf7282ab9b8ba346746afe703474004d9e566ad164c91a7a59f188a4"}, 1011 | {file = "coverage-6.3.2-cp37-cp37m-win32.whl", hash = "sha256:8bdde1177f2311ee552f47ae6e5aa7750c0e3291ca6b75f71f7ffe1f1dab3dca"}, 1012 | {file = "coverage-6.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:b31651d018b23ec463e95cf10070d0b2c548aa950a03d0b559eaa11c7e5a6fa3"}, 1013 | {file = "coverage-6.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:07e6db90cd9686c767dcc593dff16c8c09f9814f5e9c51034066cad3373b914d"}, 1014 | {file = "coverage-6.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2c6dbb42f3ad25760010c45191e9757e7dce981cbfb90e42feef301d71540059"}, 1015 | {file = "coverage-6.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c76aeef1b95aff3905fb2ae2d96e319caca5b76fa41d3470b19d4e4a3a313512"}, 1016 | {file = "coverage-6.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8cf5cfcb1521dc3255d845d9dca3ff204b3229401994ef8d1984b32746bb45ca"}, 1017 | {file = "coverage-6.3.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8fbbdc8d55990eac1b0919ca69eb5a988a802b854488c34b8f37f3e2025fa90d"}, 1018 | {file = "coverage-6.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ec6bc7fe73a938933d4178c9b23c4e0568e43e220aef9472c4f6044bfc6dd0f0"}, 1019 | {file = "coverage-6.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:9baff2a45ae1f17c8078452e9e5962e518eab705e50a0aa8083733ea7d45f3a6"}, 1020 | {file = "coverage-6.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fd9e830e9d8d89b20ab1e5af09b32d33e1a08ef4c4e14411e559556fd788e6b2"}, 1021 | {file = "coverage-6.3.2-cp38-cp38-win32.whl", hash = "sha256:f7331dbf301b7289013175087636bbaf5b2405e57259dd2c42fdcc9fcc47325e"}, 1022 | {file = "coverage-6.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:68353fe7cdf91f109fc7d474461b46e7f1f14e533e911a2a2cbb8b0fc8613cf1"}, 1023 | {file = "coverage-6.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b78e5afb39941572209f71866aa0b206c12f0109835aa0d601e41552f9b3e620"}, 1024 | {file = "coverage-6.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4e21876082ed887baed0146fe222f861b5815455ada3b33b890f4105d806128d"}, 1025 | {file = "coverage-6.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34626a7eee2a3da12af0507780bb51eb52dca0e1751fd1471d0810539cefb536"}, 1026 | {file = "coverage-6.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1ebf730d2381158ecf3dfd4453fbca0613e16eaa547b4170e2450c9707665ce7"}, 1027 | {file = "coverage-6.3.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd6fe30bd519694b356cbfcaca9bd5c1737cddd20778c6a581ae20dc8c04def2"}, 1028 | {file = "coverage-6.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:96f8a1cb43ca1422f36492bebe63312d396491a9165ed3b9231e778d43a7fca4"}, 1029 | {file = "coverage-6.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:dd035edafefee4d573140a76fdc785dc38829fe5a455c4bb12bac8c20cfc3d69"}, 1030 | {file = "coverage-6.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5ca5aeb4344b30d0bec47481536b8ba1181d50dbe783b0e4ad03c95dc1296684"}, 1031 | {file = "coverage-6.3.2-cp39-cp39-win32.whl", hash = "sha256:f5fa5803f47e095d7ad8443d28b01d48c0359484fec1b9d8606d0e3282084bc4"}, 1032 | {file = "coverage-6.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:9548f10d8be799551eb3a9c74bbf2b4934ddb330e08a73320123c07f95cc2d92"}, 1033 | {file = "coverage-6.3.2-pp36.pp37.pp38-none-any.whl", hash = "sha256:18d520c6860515a771708937d2f78f63cc47ab3b80cb78e86573b0a760161faf"}, 1034 | {file = "coverage-6.3.2.tar.gz", hash = "sha256:03e2a7826086b91ef345ff18742ee9fc47a6839ccd517061ef8fa1976e652ce9"}, 1035 | ] 1036 | coveralls = [ 1037 | {file = "coveralls-3.3.1-py2.py3-none-any.whl", hash = "sha256:f42015f31d386b351d4226389b387ae173207058832fbf5c8ec4b40e27b16026"}, 1038 | {file = "coveralls-3.3.1.tar.gz", hash = "sha256:b32a8bb5d2df585207c119d6c01567b81fba690c9c10a753bfe27a335bfc43ea"}, 1039 | ] 1040 | cryptography = [ 1041 | {file = "cryptography-36.0.2-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:4e2dddd38a5ba733be6a025a1475a9f45e4e41139d1321f412c6b360b19070b6"}, 1042 | {file = "cryptography-36.0.2-cp36-abi3-macosx_10_10_x86_64.whl", hash = "sha256:4881d09298cd0b669bb15b9cfe6166f16fc1277b4ed0d04a22f3d6430cb30f1d"}, 1043 | {file = "cryptography-36.0.2-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ea634401ca02367c1567f012317502ef3437522e2fc44a3ea1844de028fa4b84"}, 1044 | {file = "cryptography-36.0.2-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:7be666cc4599b415f320839e36367b273db8501127b38316f3b9f22f17a0b815"}, 1045 | {file = "cryptography-36.0.2-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8241cac0aae90b82d6b5c443b853723bcc66963970c67e56e71a2609dc4b5eaf"}, 1046 | {file = "cryptography-36.0.2-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b2d54e787a884ffc6e187262823b6feb06c338084bbe80d45166a1cb1c6c5bf"}, 1047 | {file = "cryptography-36.0.2-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:c2c5250ff0d36fd58550252f54915776940e4e866f38f3a7866d92b32a654b86"}, 1048 | {file = "cryptography-36.0.2-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:ec6597aa85ce03f3e507566b8bcdf9da2227ec86c4266bd5e6ab4d9e0cc8dab2"}, 1049 | {file = "cryptography-36.0.2-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:ca9f686517ec2c4a4ce930207f75c00bf03d94e5063cbc00a1dc42531511b7eb"}, 1050 | {file = "cryptography-36.0.2-cp36-abi3-win32.whl", hash = "sha256:f64b232348ee82f13aac22856515ce0195837f6968aeaa94a3d0353ea2ec06a6"}, 1051 | {file = "cryptography-36.0.2-cp36-abi3-win_amd64.whl", hash = "sha256:53e0285b49fd0ab6e604f4c5d9c5ddd98de77018542e88366923f152dbeb3c29"}, 1052 | {file = "cryptography-36.0.2-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:32db5cc49c73f39aac27574522cecd0a4bb7384e71198bc65a0d23f901e89bb7"}, 1053 | {file = "cryptography-36.0.2-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b3d199647468d410994dbeb8cec5816fb74feb9368aedf300af709ef507e3e"}, 1054 | {file = "cryptography-36.0.2-pp37-pypy37_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:da73d095f8590ad437cd5e9faf6628a218aa7c387e1fdf67b888b47ba56a17f0"}, 1055 | {file = "cryptography-36.0.2-pp38-pypy38_pp73-macosx_10_10_x86_64.whl", hash = "sha256:0a3bf09bb0b7a2c93ce7b98cb107e9170a90c51a0162a20af1c61c765b90e60b"}, 1056 | {file = "cryptography-36.0.2-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8897b7b7ec077c819187a123174b645eb680c13df68354ed99f9b40a50898f77"}, 1057 | {file = "cryptography-36.0.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82740818f2f240a5da8dfb8943b360e4f24022b093207160c77cadade47d7c85"}, 1058 | {file = "cryptography-36.0.2-pp38-pypy38_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:1f64a62b3b75e4005df19d3b5235abd43fa6358d5516cfc43d87aeba8d08dd51"}, 1059 | {file = "cryptography-36.0.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:e167b6b710c7f7bc54e67ef593f8731e1f45aa35f8a8a7b72d6e42ec76afd4b3"}, 1060 | {file = "cryptography-36.0.2.tar.gz", hash = "sha256:70f8f4f7bb2ac9f340655cbac89d68c527af5bb4387522a8413e841e3e6628c9"}, 1061 | ] 1062 | darglint = [ 1063 | {file = "darglint-1.8.1-py3-none-any.whl", hash = "sha256:5ae11c259c17b0701618a20c3da343a3eb98b3bc4b5a83d31cdd94f5ebdced8d"}, 1064 | {file = "darglint-1.8.1.tar.gz", hash = "sha256:080d5106df149b199822e7ee7deb9c012b49891538f14a11be681044f0bb20da"}, 1065 | ] 1066 | dill = [ 1067 | {file = "dill-0.3.4-py2.py3-none-any.whl", hash = "sha256:7e40e4a70304fd9ceab3535d36e58791d9c4a776b38ec7f7ec9afc8d3dca4d4f"}, 1068 | {file = "dill-0.3.4.zip", hash = "sha256:9f9734205146b2b353ab3fec9af0070237b6ddae78452af83d2fca84d739e675"}, 1069 | ] 1070 | docopt = [ 1071 | {file = "docopt-0.6.2.tar.gz", hash = "sha256:49b3a825280bd66b3aa83585ef59c4a8c82f2c8a522dbe754a8bc8d08c85c491"}, 1072 | ] 1073 | docutils = [ 1074 | {file = "docutils-0.18.1-py2.py3-none-any.whl", hash = "sha256:23010f129180089fbcd3bc08cfefccb3b890b0050e1ca00c867036e9d161b98c"}, 1075 | {file = "docutils-0.18.1.tar.gz", hash = "sha256:679987caf361a7539d76e584cbeddc311e3aee937877c87346f31debc63e9d06"}, 1076 | ] 1077 | execnet = [ 1078 | {file = "execnet-1.9.0-py2.py3-none-any.whl", hash = "sha256:a295f7cc774947aac58dde7fdc85f4aa00c42adf5d8f5468fc630c1acf30a142"}, 1079 | {file = "execnet-1.9.0.tar.gz", hash = "sha256:8f694f3ba9cc92cab508b152dcfe322153975c29bda272e2fd7f3f00f36e47c5"}, 1080 | ] 1081 | flake8 = [ 1082 | {file = "flake8-4.0.1-py2.py3-none-any.whl", hash = "sha256:479b1304f72536a55948cb40a32dce8bb0ffe3501e26eaf292c7e60eb5e0428d"}, 1083 | {file = "flake8-4.0.1.tar.gz", hash = "sha256:806e034dda44114815e23c16ef92f95c91e4c71100ff52813adf7132a6ad870d"}, 1084 | ] 1085 | flake8-comprehensions = [ 1086 | {file = "flake8-comprehensions-3.8.0.tar.gz", hash = "sha256:8e108707637b1d13734f38e03435984f6b7854fa6b5a4e34f93e69534be8e521"}, 1087 | {file = "flake8_comprehensions-3.8.0-py3-none-any.whl", hash = "sha256:9406314803abe1193c064544ab14fdc43c58424c0882f6ff8a581eb73fc9bb58"}, 1088 | ] 1089 | gitdb = [ 1090 | {file = "gitdb-4.0.9-py3-none-any.whl", hash = "sha256:8033ad4e853066ba6ca92050b9df2f89301b8fc8bf7e9324d412a63f8bf1a8fd"}, 1091 | {file = "gitdb-4.0.9.tar.gz", hash = "sha256:bac2fd45c0a1c9cf619e63a90d62bdc63892ef92387424b855792a6cabe789aa"}, 1092 | ] 1093 | gitpython = [ 1094 | {file = "GitPython-3.1.27-py3-none-any.whl", hash = "sha256:5b68b000463593e05ff2b261acff0ff0972df8ab1b70d3cdbd41b546c8b8fc3d"}, 1095 | {file = "GitPython-3.1.27.tar.gz", hash = "sha256:1c885ce809e8ba2d88a29befeb385fcea06338d3640712b59ca623c220bb5704"}, 1096 | ] 1097 | idna = [ 1098 | {file = "idna-3.3-py3-none-any.whl", hash = "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff"}, 1099 | {file = "idna-3.3.tar.gz", hash = "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"}, 1100 | ] 1101 | importlib-metadata = [ 1102 | {file = "importlib_metadata-4.2.0-py3-none-any.whl", hash = "sha256:057e92c15bc8d9e8109738a48db0ccb31b4d9d5cfbee5a8670879a30be66304b"}, 1103 | {file = "importlib_metadata-4.2.0.tar.gz", hash = "sha256:b7e52a1f8dec14a75ea73e0891f3060099ca1d8e6a462a4dff11c3e119ea1b31"}, 1104 | ] 1105 | iniconfig = [ 1106 | {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, 1107 | {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, 1108 | ] 1109 | isort = [ 1110 | {file = "isort-5.10.1-py3-none-any.whl", hash = "sha256:6f62d78e2f89b4500b080fe3a81690850cd254227f27f75c3a0c491a1f351ba7"}, 1111 | {file = "isort-5.10.1.tar.gz", hash = "sha256:e8443a5e7a020e9d7f97f1d7d9cd17c88bcb3bc7e218bf9cf5095fe550be2951"}, 1112 | ] 1113 | jeepney = [ 1114 | {file = "jeepney-0.8.0-py3-none-any.whl", hash = "sha256:c0a454ad016ca575060802ee4d590dd912e35c122fa04e70306de3d076cce755"}, 1115 | {file = "jeepney-0.8.0.tar.gz", hash = "sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806"}, 1116 | ] 1117 | keyring = [ 1118 | {file = "keyring-23.5.0-py3-none-any.whl", hash = "sha256:b0d28928ac3ec8e42ef4cc227822647a19f1d544f21f96457965dc01cf555261"}, 1119 | {file = "keyring-23.5.0.tar.gz", hash = "sha256:9012508e141a80bd1c0b6778d5c610dd9f8c464d75ac6774248500503f972fb9"}, 1120 | ] 1121 | lazy-object-proxy = [ 1122 | {file = "lazy-object-proxy-1.7.1.tar.gz", hash = "sha256:d609c75b986def706743cdebe5e47553f4a5a1da9c5ff66d76013ef396b5a8a4"}, 1123 | {file = "lazy_object_proxy-1.7.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bb8c5fd1684d60a9902c60ebe276da1f2281a318ca16c1d0a96db28f62e9166b"}, 1124 | {file = "lazy_object_proxy-1.7.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a57d51ed2997e97f3b8e3500c984db50a554bb5db56c50b5dab1b41339b37e36"}, 1125 | {file = "lazy_object_proxy-1.7.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd45683c3caddf83abbb1249b653a266e7069a09f486daa8863fb0e7496a9fdb"}, 1126 | {file = "lazy_object_proxy-1.7.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:8561da8b3dd22d696244d6d0d5330618c993a215070f473b699e00cf1f3f6443"}, 1127 | {file = "lazy_object_proxy-1.7.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fccdf7c2c5821a8cbd0a9440a456f5050492f2270bd54e94360cac663398739b"}, 1128 | {file = "lazy_object_proxy-1.7.1-cp310-cp310-win32.whl", hash = "sha256:898322f8d078f2654d275124a8dd19b079080ae977033b713f677afcfc88e2b9"}, 1129 | {file = "lazy_object_proxy-1.7.1-cp310-cp310-win_amd64.whl", hash = "sha256:85b232e791f2229a4f55840ed54706110c80c0a210d076eee093f2b2e33e1bfd"}, 1130 | {file = "lazy_object_proxy-1.7.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:46ff647e76f106bb444b4533bb4153c7370cdf52efc62ccfc1a28bdb3cc95442"}, 1131 | {file = "lazy_object_proxy-1.7.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:12f3bb77efe1367b2515f8cb4790a11cffae889148ad33adad07b9b55e0ab22c"}, 1132 | {file = "lazy_object_proxy-1.7.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c19814163728941bb871240d45c4c30d33b8a2e85972c44d4e63dd7107faba44"}, 1133 | {file = "lazy_object_proxy-1.7.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:e40f2013d96d30217a51eeb1db28c9ac41e9d0ee915ef9d00da639c5b63f01a1"}, 1134 | {file = "lazy_object_proxy-1.7.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:2052837718516a94940867e16b1bb10edb069ab475c3ad84fd1e1a6dd2c0fcfc"}, 1135 | {file = "lazy_object_proxy-1.7.1-cp36-cp36m-win32.whl", hash = "sha256:6a24357267aa976abab660b1d47a34aaf07259a0c3859a34e536f1ee6e76b5bb"}, 1136 | {file = "lazy_object_proxy-1.7.1-cp36-cp36m-win_amd64.whl", hash = "sha256:6aff3fe5de0831867092e017cf67e2750c6a1c7d88d84d2481bd84a2e019ec35"}, 1137 | {file = "lazy_object_proxy-1.7.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:6a6e94c7b02641d1311228a102607ecd576f70734dc3d5e22610111aeacba8a0"}, 1138 | {file = "lazy_object_proxy-1.7.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4ce15276a1a14549d7e81c243b887293904ad2d94ad767f42df91e75fd7b5b6"}, 1139 | {file = "lazy_object_proxy-1.7.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e368b7f7eac182a59ff1f81d5f3802161932a41dc1b1cc45c1f757dc876b5d2c"}, 1140 | {file = "lazy_object_proxy-1.7.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:6ecbb350991d6434e1388bee761ece3260e5228952b1f0c46ffc800eb313ff42"}, 1141 | {file = "lazy_object_proxy-1.7.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:553b0f0d8dbf21890dd66edd771f9b1b5f51bd912fa5f26de4449bfc5af5e029"}, 1142 | {file = "lazy_object_proxy-1.7.1-cp37-cp37m-win32.whl", hash = "sha256:c7a683c37a8a24f6428c28c561c80d5f4fd316ddcf0c7cab999b15ab3f5c5c69"}, 1143 | {file = "lazy_object_proxy-1.7.1-cp37-cp37m-win_amd64.whl", hash = "sha256:df2631f9d67259dc9620d831384ed7732a198eb434eadf69aea95ad18c587a28"}, 1144 | {file = "lazy_object_proxy-1.7.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:07fa44286cda977bd4803b656ffc1c9b7e3bc7dff7d34263446aec8f8c96f88a"}, 1145 | {file = "lazy_object_proxy-1.7.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4dca6244e4121c74cc20542c2ca39e5c4a5027c81d112bfb893cf0790f96f57e"}, 1146 | {file = "lazy_object_proxy-1.7.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:91ba172fc5b03978764d1df5144b4ba4ab13290d7bab7a50f12d8117f8630c38"}, 1147 | {file = "lazy_object_proxy-1.7.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:043651b6cb706eee4f91854da4a089816a6606c1428fd391573ef8cb642ae4f7"}, 1148 | {file = "lazy_object_proxy-1.7.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b9e89b87c707dd769c4ea91f7a31538888aad05c116a59820f28d59b3ebfe25a"}, 1149 | {file = "lazy_object_proxy-1.7.1-cp38-cp38-win32.whl", hash = "sha256:9d166602b525bf54ac994cf833c385bfcc341b364e3ee71e3bf5a1336e677b55"}, 1150 | {file = "lazy_object_proxy-1.7.1-cp38-cp38-win_amd64.whl", hash = "sha256:8f3953eb575b45480db6568306893f0bd9d8dfeeebd46812aa09ca9579595148"}, 1151 | {file = "lazy_object_proxy-1.7.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dd7ed7429dbb6c494aa9bc4e09d94b778a3579be699f9d67da7e6804c422d3de"}, 1152 | {file = "lazy_object_proxy-1.7.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70ed0c2b380eb6248abdef3cd425fc52f0abd92d2b07ce26359fcbc399f636ad"}, 1153 | {file = "lazy_object_proxy-1.7.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7096a5e0c1115ec82641afbdd70451a144558ea5cf564a896294e346eb611be1"}, 1154 | {file = "lazy_object_proxy-1.7.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f769457a639403073968d118bc70110e7dce294688009f5c24ab78800ae56dc8"}, 1155 | {file = "lazy_object_proxy-1.7.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:39b0e26725c5023757fc1ab2a89ef9d7ab23b84f9251e28f9cc114d5b59c1b09"}, 1156 | {file = "lazy_object_proxy-1.7.1-cp39-cp39-win32.whl", hash = "sha256:2130db8ed69a48a3440103d4a520b89d8a9405f1b06e2cc81640509e8bf6548f"}, 1157 | {file = "lazy_object_proxy-1.7.1-cp39-cp39-win_amd64.whl", hash = "sha256:677ea950bef409b47e51e733283544ac3d660b709cfce7b187f5ace137960d61"}, 1158 | {file = "lazy_object_proxy-1.7.1-pp37.pp38-none-any.whl", hash = "sha256:d66906d5785da8e0be7360912e99c9188b70f52c422f9fc18223347235691a84"}, 1159 | ] 1160 | mccabe = [ 1161 | {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"}, 1162 | {file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"}, 1163 | ] 1164 | mypy = [ 1165 | {file = "mypy-0.942-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:5bf44840fb43ac4074636fd47ee476d73f0039f4f54e86d7265077dc199be24d"}, 1166 | {file = "mypy-0.942-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dcd955f36e0180258a96f880348fbca54ce092b40fbb4b37372ae3b25a0b0a46"}, 1167 | {file = "mypy-0.942-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6776e5fa22381cc761df53e7496a805801c1a751b27b99a9ff2f0ca848c7eca0"}, 1168 | {file = "mypy-0.942-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:edf7237137a1a9330046dbb14796963d734dd740a98d5e144a3eb1d267f5f9ee"}, 1169 | {file = "mypy-0.942-cp310-cp310-win_amd64.whl", hash = "sha256:64235137edc16bee6f095aba73be5334677d6f6bdb7fa03cfab90164fa294a17"}, 1170 | {file = "mypy-0.942-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:b840cfe89c4ab6386c40300689cd8645fc8d2d5f20101c7f8bd23d15fca14904"}, 1171 | {file = "mypy-0.942-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2b184db8c618c43c3a31b32ff00cd28195d39e9c24e7c3b401f3db7f6e5767f5"}, 1172 | {file = "mypy-0.942-cp36-cp36m-win_amd64.whl", hash = "sha256:1a0459c333f00e6a11cbf6b468b870c2b99a906cb72d6eadf3d1d95d38c9352c"}, 1173 | {file = "mypy-0.942-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4c3e497588afccfa4334a9986b56f703e75793133c4be3a02d06a3df16b67a58"}, 1174 | {file = "mypy-0.942-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6f6ad963172152e112b87cc7ec103ba0f2db2f1cd8997237827c052a3903eaa6"}, 1175 | {file = "mypy-0.942-cp37-cp37m-win_amd64.whl", hash = "sha256:0e2dd88410937423fba18e57147dd07cd8381291b93d5b1984626f173a26543e"}, 1176 | {file = "mypy-0.942-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:246e1aa127d5b78488a4a0594bd95f6d6fb9d63cf08a66dafbff8595d8891f67"}, 1177 | {file = "mypy-0.942-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d8d3ba77e56b84cd47a8ee45b62c84b6d80d32383928fe2548c9a124ea0a725c"}, 1178 | {file = "mypy-0.942-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2bc249409a7168d37c658e062e1ab5173300984a2dada2589638568ddc1db02b"}, 1179 | {file = "mypy-0.942-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:9521c1265ccaaa1791d2c13582f06facf815f426cd8b07c3a485f486a8ffc1f3"}, 1180 | {file = "mypy-0.942-cp38-cp38-win_amd64.whl", hash = "sha256:e865fec858d75b78b4d63266c9aff770ecb6a39dfb6d6b56c47f7f8aba6baba8"}, 1181 | {file = "mypy-0.942-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:6ce34a118d1a898f47def970a2042b8af6bdcc01546454726c7dd2171aa6dfca"}, 1182 | {file = "mypy-0.942-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:10daab80bc40f84e3f087d896cdb53dc811a9f04eae4b3f95779c26edee89d16"}, 1183 | {file = "mypy-0.942-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3841b5433ff936bff2f4dc8d54cf2cdbfea5d8e88cedfac45c161368e5770ba6"}, 1184 | {file = "mypy-0.942-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6f7106cbf9cc2f403693bf50ed7c9fa5bb3dfa9007b240db3c910929abe2a322"}, 1185 | {file = "mypy-0.942-cp39-cp39-win_amd64.whl", hash = "sha256:7742d2c4e46bb5017b51c810283a6a389296cda03df805a4f7869a6f41246534"}, 1186 | {file = "mypy-0.942-py3-none-any.whl", hash = "sha256:a1b383fe99678d7402754fe90448d4037f9512ce70c21f8aee3b8bf48ffc51db"}, 1187 | {file = "mypy-0.942.tar.gz", hash = "sha256:17e44649fec92e9f82102b48a3bf7b4a5510ad0cd22fa21a104826b5db4903e2"}, 1188 | ] 1189 | mypy-extensions = [ 1190 | {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, 1191 | {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, 1192 | ] 1193 | packaging = [ 1194 | {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, 1195 | {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, 1196 | ] 1197 | pathspec = [ 1198 | {file = "pathspec-0.9.0-py2.py3-none-any.whl", hash = "sha256:7d15c4ddb0b5c802d161efc417ec1a2558ea2653c2e8ad9c19098201dc1c993a"}, 1199 | {file = "pathspec-0.9.0.tar.gz", hash = "sha256:e564499435a2673d586f6b2130bb5b95f04a3ba06f81b8f895b651a3c76aabb1"}, 1200 | ] 1201 | pbr = [ 1202 | {file = "pbr-5.8.1-py2.py3-none-any.whl", hash = "sha256:27108648368782d07bbf1cb468ad2e2eeef29086affd14087a6d04b7de8af4ec"}, 1203 | {file = "pbr-5.8.1.tar.gz", hash = "sha256:66bc5a34912f408bb3925bf21231cb6f59206267b7f63f3503ef865c1a292e25"}, 1204 | ] 1205 | pkginfo = [ 1206 | {file = "pkginfo-1.8.2-py2.py3-none-any.whl", hash = "sha256:c24c487c6a7f72c66e816ab1796b96ac6c3d14d49338293d2141664330b55ffc"}, 1207 | {file = "pkginfo-1.8.2.tar.gz", hash = "sha256:542e0d0b6750e2e21c20179803e40ab50598d8066d51097a0e382cba9eb02bff"}, 1208 | ] 1209 | platformdirs = [ 1210 | {file = "platformdirs-2.5.2-py3-none-any.whl", hash = "sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788"}, 1211 | {file = "platformdirs-2.5.2.tar.gz", hash = "sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19"}, 1212 | ] 1213 | pluggy = [ 1214 | {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, 1215 | {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, 1216 | ] 1217 | py = [ 1218 | {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, 1219 | {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, 1220 | ] 1221 | pycodestyle = [ 1222 | {file = "pycodestyle-2.8.0-py2.py3-none-any.whl", hash = "sha256:720f8b39dde8b293825e7ff02c475f3077124006db4f440dcbc9a20b76548a20"}, 1223 | {file = "pycodestyle-2.8.0.tar.gz", hash = "sha256:eddd5847ef438ea1c7870ca7eb78a9d47ce0cdb4851a5523949f2601d0cbbe7f"}, 1224 | ] 1225 | pycparser = [ 1226 | {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, 1227 | {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, 1228 | ] 1229 | pyflakes = [ 1230 | {file = "pyflakes-2.4.0-py2.py3-none-any.whl", hash = "sha256:3bb3a3f256f4b7968c9c788781e4ff07dce46bdf12339dcda61053375426ee2e"}, 1231 | {file = "pyflakes-2.4.0.tar.gz", hash = "sha256:05a85c2872edf37a4ed30b0cce2f6093e1d0581f8c19d7393122da7e25b2b24c"}, 1232 | ] 1233 | pygments = [ 1234 | {file = "Pygments-2.11.2-py3-none-any.whl", hash = "sha256:44238f1b60a76d78fc8ca0528ee429702aae011c265fe6a8dd8b63049ae41c65"}, 1235 | {file = "Pygments-2.11.2.tar.gz", hash = "sha256:4e426f72023d88d03b2fa258de560726ce890ff3b630f88c21cbb8b2503b8c6a"}, 1236 | ] 1237 | pylint = [ 1238 | {file = "pylint-2.13.5-py3-none-any.whl", hash = "sha256:c149694cfdeaee1aa2465e6eaab84c87a881a7d55e6e93e09466be7164764d1e"}, 1239 | {file = "pylint-2.13.5.tar.gz", hash = "sha256:dab221658368c7a05242e673c275c488670144123f4bd262b2777249c1c0de9b"}, 1240 | ] 1241 | pyodbc = [ 1242 | {file = "pyodbc-4.0.32-cp27-cp27m-win32.whl", hash = "sha256:2152ce6d5131d769ff5839aa762e12d844c95e9ec4bb2f666e8cd9dfa1ae2240"}, 1243 | {file = "pyodbc-4.0.32-cp27-cp27m-win_amd64.whl", hash = "sha256:56ec4974096d40d6c62a228799122dbc2ade6c4045cc5d31860212a32cae95b1"}, 1244 | {file = "pyodbc-4.0.32-cp36-cp36m-win32.whl", hash = "sha256:699c080b1c1f7b4afc368b3521fd1161f46a10223443692a249cb01d90949b31"}, 1245 | {file = "pyodbc-4.0.32-cp36-cp36m-win_amd64.whl", hash = "sha256:0d4e14adb149cae45da37fa87aa297055156dae6e89ca3c75493d3d62d78e543"}, 1246 | {file = "pyodbc-4.0.32-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:6c1e1c1fe747b0f6419e8df0b5c43161e7437dbf72f93f9fcfb9b7358fad3e12"}, 1247 | {file = "pyodbc-4.0.32-cp37-cp37m-win32.whl", hash = "sha256:bbc07517f339e019ee9f1fe679c4241251d11ca2124567616f67d62e73c29fc0"}, 1248 | {file = "pyodbc-4.0.32-cp37-cp37m-win_amd64.whl", hash = "sha256:e81ebf9cab80a6eaba7922dea02036e9f8a507a7b818856b8008a02d6fc0d2ab"}, 1249 | {file = "pyodbc-4.0.32-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0e4178e9b93329bbba17555882008e36a114179d06033b813a13b254dcd755d0"}, 1250 | {file = "pyodbc-4.0.32-cp38-cp38-win32.whl", hash = "sha256:c066f032e69fd71e9fadb3a380dfe8ecd1728b40a2bf38f76054d284f8523b29"}, 1251 | {file = "pyodbc-4.0.32-cp38-cp38-win_amd64.whl", hash = "sha256:736acad1b264ddb7313058dfe37265b0c5160c1c2a9d1ffd391347c025eb5dd1"}, 1252 | {file = "pyodbc-4.0.32-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:339d8aa633b0c65be5149c3378c7e3b5bead94dc8bb023a715b416bd047a008e"}, 1253 | {file = "pyodbc-4.0.32-cp39-cp39-win_amd64.whl", hash = "sha256:cda790bdc25bfad12d4fb9ba93368275802f7f9ecfa4c9c65e982d3a7fc35f2e"}, 1254 | {file = "pyodbc-4.0.32.tar.gz", hash = "sha256:9be5f0c3590655e1968488410fe3528bb8023d527e7ccec1f663d64245071a6b"}, 1255 | ] 1256 | pyparsing = [ 1257 | {file = "pyparsing-3.0.8-py3-none-any.whl", hash = "sha256:ef7b523f6356f763771559412c0d7134753f037822dad1b16945b7b846f7ad06"}, 1258 | {file = "pyparsing-3.0.8.tar.gz", hash = "sha256:7bf433498c016c4314268d95df76c81b842a4cb2b276fa3312cfb1e1d85f6954"}, 1259 | ] 1260 | pyproject-flake8 = [ 1261 | {file = "pyproject-flake8-0.0.1a4.tar.gz", hash = "sha256:8ed9453f1d984cfe94c998f9840275359e29e7f435b8ddd188ae084e2dc1270c"}, 1262 | {file = "pyproject_flake8-0.0.1a4-py2.py3-none-any.whl", hash = "sha256:1a8f94e18d08677ee780625049d9d00a9ee823661c6606caab8a383351037a75"}, 1263 | ] 1264 | pytest = [ 1265 | {file = "pytest-7.1.1-py3-none-any.whl", hash = "sha256:92f723789a8fdd7180b6b06483874feca4c48a5c76968e03bb3e7f806a1869ea"}, 1266 | {file = "pytest-7.1.1.tar.gz", hash = "sha256:841132caef6b1ad17a9afde46dc4f6cfa59a05f9555aae5151f73bdf2820ca63"}, 1267 | ] 1268 | pytest-asyncio = [ 1269 | {file = "pytest-asyncio-0.18.3.tar.gz", hash = "sha256:7659bdb0a9eb9c6e3ef992eef11a2b3e69697800ad02fb06374a210d85b29f91"}, 1270 | {file = "pytest_asyncio-0.18.3-1-py3-none-any.whl", hash = "sha256:16cf40bdf2b4fb7fc8e4b82bd05ce3fbcd454cbf7b92afc445fe299dabb88213"}, 1271 | {file = "pytest_asyncio-0.18.3-py3-none-any.whl", hash = "sha256:8fafa6c52161addfd41ee7ab35f11836c5a16ec208f93ee388f752bea3493a84"}, 1272 | ] 1273 | pytest-cov = [ 1274 | {file = "pytest-cov-3.0.0.tar.gz", hash = "sha256:e7f0f5b1617d2210a2cabc266dfe2f4c75a8d32fb89eafb7ad9d06f6d076d470"}, 1275 | {file = "pytest_cov-3.0.0-py3-none-any.whl", hash = "sha256:578d5d15ac4a25e5f961c938b85a05b09fdaae9deef3bb6de9a6e766622ca7a6"}, 1276 | ] 1277 | pytest-forked = [ 1278 | {file = "pytest-forked-1.4.0.tar.gz", hash = "sha256:8b67587c8f98cbbadfdd804539ed5455b6ed03802203485dd2f53c1422d7440e"}, 1279 | {file = "pytest_forked-1.4.0-py3-none-any.whl", hash = "sha256:bbbb6717efc886b9d64537b41fb1497cfaf3c9601276be8da2cccfea5a3c8ad8"}, 1280 | ] 1281 | pytest-xdist = [ 1282 | {file = "pytest-xdist-2.5.0.tar.gz", hash = "sha256:4580deca3ff04ddb2ac53eba39d76cb5dd5edeac050cb6fbc768b0dd712b4edf"}, 1283 | {file = "pytest_xdist-2.5.0-py3-none-any.whl", hash = "sha256:6fe5c74fec98906deb8f2d2b616b5c782022744978e7bd4695d39c8f42d0ce65"}, 1284 | ] 1285 | pywin32-ctypes = [ 1286 | {file = "pywin32-ctypes-0.2.0.tar.gz", hash = "sha256:24ffc3b341d457d48e8922352130cf2644024a4ff09762a2261fd34c36ee5942"}, 1287 | {file = "pywin32_ctypes-0.2.0-py2.py3-none-any.whl", hash = "sha256:9dc2d991b3479cc2df15930958b674a48a227d5361d413827a4cfd0b5876fc98"}, 1288 | ] 1289 | pyyaml = [ 1290 | {file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"}, 1291 | {file = "PyYAML-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c"}, 1292 | {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc"}, 1293 | {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b"}, 1294 | {file = "PyYAML-6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5"}, 1295 | {file = "PyYAML-6.0-cp310-cp310-win32.whl", hash = "sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513"}, 1296 | {file = "PyYAML-6.0-cp310-cp310-win_amd64.whl", hash = "sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a"}, 1297 | {file = "PyYAML-6.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86"}, 1298 | {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f"}, 1299 | {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92"}, 1300 | {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4"}, 1301 | {file = "PyYAML-6.0-cp36-cp36m-win32.whl", hash = "sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293"}, 1302 | {file = "PyYAML-6.0-cp36-cp36m-win_amd64.whl", hash = "sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57"}, 1303 | {file = "PyYAML-6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c"}, 1304 | {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0"}, 1305 | {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4"}, 1306 | {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9"}, 1307 | {file = "PyYAML-6.0-cp37-cp37m-win32.whl", hash = "sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737"}, 1308 | {file = "PyYAML-6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d"}, 1309 | {file = "PyYAML-6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b"}, 1310 | {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba"}, 1311 | {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34"}, 1312 | {file = "PyYAML-6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287"}, 1313 | {file = "PyYAML-6.0-cp38-cp38-win32.whl", hash = "sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78"}, 1314 | {file = "PyYAML-6.0-cp38-cp38-win_amd64.whl", hash = "sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07"}, 1315 | {file = "PyYAML-6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b"}, 1316 | {file = "PyYAML-6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174"}, 1317 | {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803"}, 1318 | {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3"}, 1319 | {file = "PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0"}, 1320 | {file = "PyYAML-6.0-cp39-cp39-win32.whl", hash = "sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb"}, 1321 | {file = "PyYAML-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c"}, 1322 | {file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"}, 1323 | ] 1324 | readme-renderer = [ 1325 | {file = "readme_renderer-34.0-py3-none-any.whl", hash = "sha256:262510fe6aae81ed4e94d8b169077f325614c0b1a45916a80442c6576264a9c2"}, 1326 | {file = "readme_renderer-34.0.tar.gz", hash = "sha256:dfb4d17f21706d145f7473e0b61ca245ba58e810cf9b2209a48239677f82e5b0"}, 1327 | ] 1328 | requests = [ 1329 | {file = "requests-2.27.1-py2.py3-none-any.whl", hash = "sha256:f22fa1e554c9ddfd16e6e41ac79759e17be9e492b3587efa038054674760e72d"}, 1330 | {file = "requests-2.27.1.tar.gz", hash = "sha256:68d7c56fd5a8999887728ef304a6d12edc7be74f1cfa47714fc8b414525c9a61"}, 1331 | ] 1332 | requests-toolbelt = [ 1333 | {file = "requests-toolbelt-0.9.1.tar.gz", hash = "sha256:968089d4584ad4ad7c171454f0a5c6dac23971e9472521ea3b6d49d610aa6fc0"}, 1334 | {file = "requests_toolbelt-0.9.1-py2.py3-none-any.whl", hash = "sha256:380606e1d10dc85c3bd47bf5a6095f815ec007be7a8b69c878507068df059e6f"}, 1335 | ] 1336 | rfc3986 = [ 1337 | {file = "rfc3986-2.0.0-py2.py3-none-any.whl", hash = "sha256:50b1502b60e289cb37883f3dfd34532b8873c7de9f49bb546641ce9cbd256ebd"}, 1338 | {file = "rfc3986-2.0.0.tar.gz", hash = "sha256:97aacf9dbd4bfd829baad6e6309fa6573aaf1be3f6fa735c8ab05e46cecb261c"}, 1339 | ] 1340 | rich = [ 1341 | {file = "rich-12.2.0-py3-none-any.whl", hash = "sha256:c50f3d253bc6a9bb9c79d61a26d510d74abdf1b16881260fab5edfc3edfb082f"}, 1342 | {file = "rich-12.2.0.tar.gz", hash = "sha256:ea74bc9dad9589d8eea3e3fd0b136d8bf6e428888955f215824c2894f0da8b47"}, 1343 | ] 1344 | secretstorage = [ 1345 | {file = "SecretStorage-3.3.2-py3-none-any.whl", hash = "sha256:755dc845b6ad76dcbcbc07ea3da75ae54bb1ea529eb72d15f83d26499a5df319"}, 1346 | {file = "SecretStorage-3.3.2.tar.gz", hash = "sha256:0a8eb9645b320881c222e827c26f4cfcf55363e8b374a021981ef886657a912f"}, 1347 | ] 1348 | six = [ 1349 | {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, 1350 | {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, 1351 | ] 1352 | smmap = [ 1353 | {file = "smmap-5.0.0-py3-none-any.whl", hash = "sha256:2aba19d6a040e78d8b09de5c57e96207b09ed71d8e55ce0959eeee6c8e190d94"}, 1354 | {file = "smmap-5.0.0.tar.gz", hash = "sha256:c840e62059cd3be204b0c9c9f74be2c09d5648eddd4580d9314c3ecde0b30936"}, 1355 | ] 1356 | stevedore = [ 1357 | {file = "stevedore-3.5.0-py3-none-any.whl", hash = "sha256:a547de73308fd7e90075bb4d301405bebf705292fa90a90fc3bcf9133f58616c"}, 1358 | {file = "stevedore-3.5.0.tar.gz", hash = "sha256:f40253887d8712eaa2bb0ea3830374416736dc8ec0e22f5a65092c1174c44335"}, 1359 | ] 1360 | tomli = [ 1361 | {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, 1362 | {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, 1363 | ] 1364 | twine = [ 1365 | {file = "twine-4.0.0-py3-none-any.whl", hash = "sha256:6f7496cf14a3a8903474552d5271c79c71916519edb42554f23f42a8563498a9"}, 1366 | {file = "twine-4.0.0.tar.gz", hash = "sha256:817aa0c0bdc02a5ebe32051e168e23c71a0608334e624c793011f120dbbc05b7"}, 1367 | ] 1368 | typed-ast = [ 1369 | {file = "typed_ast-1.5.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ad3b48cf2b487be140072fb86feff36801487d4abb7382bb1929aaac80638ea"}, 1370 | {file = "typed_ast-1.5.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:542cd732351ba8235f20faa0fc7398946fe1a57f2cdb289e5497e1e7f48cfedb"}, 1371 | {file = "typed_ast-1.5.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5dc2c11ae59003d4a26dda637222d9ae924387f96acae9492df663843aefad55"}, 1372 | {file = "typed_ast-1.5.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:fd5df1313915dbd70eaaa88c19030b441742e8b05e6103c631c83b75e0435ccc"}, 1373 | {file = "typed_ast-1.5.3-cp310-cp310-win_amd64.whl", hash = "sha256:e34f9b9e61333ecb0f7d79c21c28aa5cd63bec15cb7e1310d7d3da6ce886bc9b"}, 1374 | {file = "typed_ast-1.5.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f818c5b81966d4728fec14caa338e30a70dfc3da577984d38f97816c4b3071ec"}, 1375 | {file = "typed_ast-1.5.3-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3042bfc9ca118712c9809201f55355479cfcdc17449f9f8db5e744e9625c6805"}, 1376 | {file = "typed_ast-1.5.3-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4fff9fdcce59dc61ec1b317bdb319f8f4e6b69ebbe61193ae0a60c5f9333dc49"}, 1377 | {file = "typed_ast-1.5.3-cp36-cp36m-win_amd64.whl", hash = "sha256:8e0b8528838ffd426fea8d18bde4c73bcb4167218998cc8b9ee0a0f2bfe678a6"}, 1378 | {file = "typed_ast-1.5.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8ef1d96ad05a291f5c36895d86d1375c0ee70595b90f6bb5f5fdbee749b146db"}, 1379 | {file = "typed_ast-1.5.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed44e81517364cb5ba367e4f68fca01fba42a7a4690d40c07886586ac267d9b9"}, 1380 | {file = "typed_ast-1.5.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f60d9de0d087454c91b3999a296d0c4558c1666771e3460621875021bf899af9"}, 1381 | {file = "typed_ast-1.5.3-cp37-cp37m-win_amd64.whl", hash = "sha256:9e237e74fd321a55c90eee9bc5d44be976979ad38a29bbd734148295c1ce7617"}, 1382 | {file = "typed_ast-1.5.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ee852185964744987609b40aee1d2eb81502ae63ee8eef614558f96a56c1902d"}, 1383 | {file = "typed_ast-1.5.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:27e46cdd01d6c3a0dd8f728b6a938a6751f7bd324817501c15fb056307f918c6"}, 1384 | {file = "typed_ast-1.5.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d64dabc6336ddc10373922a146fa2256043b3b43e61f28961caec2a5207c56d5"}, 1385 | {file = "typed_ast-1.5.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8cdf91b0c466a6c43f36c1964772918a2c04cfa83df8001ff32a89e357f8eb06"}, 1386 | {file = "typed_ast-1.5.3-cp38-cp38-win_amd64.whl", hash = "sha256:9cc9e1457e1feb06b075c8ef8aeb046a28ec351b1958b42c7c31c989c841403a"}, 1387 | {file = "typed_ast-1.5.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e20d196815eeffb3d76b75223e8ffed124e65ee62097e4e73afb5fec6b993e7a"}, 1388 | {file = "typed_ast-1.5.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:37e5349d1d5de2f4763d534ccb26809d1c24b180a477659a12c4bde9dd677d74"}, 1389 | {file = "typed_ast-1.5.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c9f1a27592fac87daa4e3f16538713d705599b0a27dfe25518b80b6b017f0a6d"}, 1390 | {file = "typed_ast-1.5.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8831479695eadc8b5ffed06fdfb3e424adc37962a75925668deeb503f446c0a3"}, 1391 | {file = "typed_ast-1.5.3-cp39-cp39-win_amd64.whl", hash = "sha256:20d5118e494478ef2d3a2702d964dae830aedd7b4d3b626d003eea526be18718"}, 1392 | {file = "typed_ast-1.5.3.tar.gz", hash = "sha256:27f25232e2dd0edfe1f019d6bfaaf11e86e657d9bdb7b0956db95f560cceb2b3"}, 1393 | ] 1394 | typing-extensions = [ 1395 | {file = "typing_extensions-4.2.0-py3-none-any.whl", hash = "sha256:6657594ee297170d19f67d55c05852a874e7eb634f4f753dbd667855e07c1708"}, 1396 | {file = "typing_extensions-4.2.0.tar.gz", hash = "sha256:f1c24655a0da0d1b67f07e17a5e6b2a105894e6824b92096378bb3668ef02376"}, 1397 | ] 1398 | urllib3 = [ 1399 | {file = "urllib3-1.26.9-py2.py3-none-any.whl", hash = "sha256:44ece4d53fb1706f667c9bd1c648f5469a2ec925fcf3a776667042d645472c14"}, 1400 | {file = "urllib3-1.26.9.tar.gz", hash = "sha256:aabaf16477806a5e1dd19aa41f8c2b7950dd3c746362d7e3223dbe6de6ac448e"}, 1401 | ] 1402 | webencodings = [ 1403 | {file = "webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78"}, 1404 | {file = "webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"}, 1405 | ] 1406 | wrapt = [ 1407 | {file = "wrapt-1.14.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:5a9a1889cc01ed2ed5f34574c90745fab1dd06ec2eee663e8ebeefe363e8efd7"}, 1408 | {file = "wrapt-1.14.0-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:9a3ff5fb015f6feb78340143584d9f8a0b91b6293d6b5cf4295b3e95d179b88c"}, 1409 | {file = "wrapt-1.14.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:4b847029e2d5e11fd536c9ac3136ddc3f54bc9488a75ef7d040a3900406a91eb"}, 1410 | {file = "wrapt-1.14.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:9a5a544861b21e0e7575b6023adebe7a8c6321127bb1d238eb40d99803a0e8bd"}, 1411 | {file = "wrapt-1.14.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:88236b90dda77f0394f878324cfbae05ae6fde8a84d548cfe73a75278d760291"}, 1412 | {file = "wrapt-1.14.0-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:f0408e2dbad9e82b4c960274214af533f856a199c9274bd4aff55d4634dedc33"}, 1413 | {file = "wrapt-1.14.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:9d8c68c4145041b4eeae96239802cfdfd9ef927754a5be3f50505f09f309d8c6"}, 1414 | {file = "wrapt-1.14.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:22626dca56fd7f55a0733e604f1027277eb0f4f3d95ff28f15d27ac25a45f71b"}, 1415 | {file = "wrapt-1.14.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:65bf3eb34721bf18b5a021a1ad7aa05947a1767d1aa272b725728014475ea7d5"}, 1416 | {file = "wrapt-1.14.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:09d16ae7a13cff43660155383a2372b4aa09109c7127aa3f24c3cf99b891c330"}, 1417 | {file = "wrapt-1.14.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:debaf04f813ada978d7d16c7dfa16f3c9c2ec9adf4656efdc4defdf841fc2f0c"}, 1418 | {file = "wrapt-1.14.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:748df39ed634851350efa87690c2237a678ed794fe9ede3f0d79f071ee042561"}, 1419 | {file = "wrapt-1.14.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1807054aa7b61ad8d8103b3b30c9764de2e9d0c0978e9d3fc337e4e74bf25faa"}, 1420 | {file = "wrapt-1.14.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:763a73ab377390e2af26042f685a26787c402390f682443727b847e9496e4a2a"}, 1421 | {file = "wrapt-1.14.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:8529b07b49b2d89d6917cfa157d3ea1dfb4d319d51e23030664a827fe5fd2131"}, 1422 | {file = "wrapt-1.14.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:68aeefac31c1f73949662ba8affaf9950b9938b712fb9d428fa2a07e40ee57f8"}, 1423 | {file = "wrapt-1.14.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59d7d92cee84a547d91267f0fea381c363121d70fe90b12cd88241bd9b0e1763"}, 1424 | {file = "wrapt-1.14.0-cp310-cp310-win32.whl", hash = "sha256:3a88254881e8a8c4784ecc9cb2249ff757fd94b911d5df9a5984961b96113fff"}, 1425 | {file = "wrapt-1.14.0-cp310-cp310-win_amd64.whl", hash = "sha256:9a242871b3d8eecc56d350e5e03ea1854de47b17f040446da0e47dc3e0b9ad4d"}, 1426 | {file = "wrapt-1.14.0-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:a65bffd24409454b889af33b6c49d0d9bcd1a219b972fba975ac935f17bdf627"}, 1427 | {file = "wrapt-1.14.0-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9d9fcd06c952efa4b6b95f3d788a819b7f33d11bea377be6b8980c95e7d10775"}, 1428 | {file = "wrapt-1.14.0-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:db6a0ddc1282ceb9032e41853e659c9b638789be38e5b8ad7498caac00231c23"}, 1429 | {file = "wrapt-1.14.0-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:14e7e2c5f5fca67e9a6d5f753d21f138398cad2b1159913ec9e9a67745f09ba3"}, 1430 | {file = "wrapt-1.14.0-cp35-cp35m-win32.whl", hash = "sha256:6d9810d4f697d58fd66039ab959e6d37e63ab377008ef1d63904df25956c7db0"}, 1431 | {file = "wrapt-1.14.0-cp35-cp35m-win_amd64.whl", hash = "sha256:d808a5a5411982a09fef6b49aac62986274ab050e9d3e9817ad65b2791ed1425"}, 1432 | {file = "wrapt-1.14.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:b77159d9862374da213f741af0c361720200ab7ad21b9f12556e0eb95912cd48"}, 1433 | {file = "wrapt-1.14.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:36a76a7527df8583112b24adc01748cd51a2d14e905b337a6fefa8b96fc708fb"}, 1434 | {file = "wrapt-1.14.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a0057b5435a65b933cbf5d859cd4956624df37b8bf0917c71756e4b3d9958b9e"}, 1435 | {file = "wrapt-1.14.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a0a4ca02752ced5f37498827e49c414d694ad7cf451ee850e3ff160f2bee9d3"}, 1436 | {file = "wrapt-1.14.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:8c6be72eac3c14baa473620e04f74186c5d8f45d80f8f2b4eda6e1d18af808e8"}, 1437 | {file = "wrapt-1.14.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:21b1106bff6ece8cb203ef45b4f5778d7226c941c83aaaa1e1f0f4f32cc148cd"}, 1438 | {file = "wrapt-1.14.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:493da1f8b1bb8a623c16552fb4a1e164c0200447eb83d3f68b44315ead3f9036"}, 1439 | {file = "wrapt-1.14.0-cp36-cp36m-win32.whl", hash = "sha256:89ba3d548ee1e6291a20f3c7380c92f71e358ce8b9e48161401e087e0bc740f8"}, 1440 | {file = "wrapt-1.14.0-cp36-cp36m-win_amd64.whl", hash = "sha256:729d5e96566f44fccac6c4447ec2332636b4fe273f03da128fff8d5559782b06"}, 1441 | {file = "wrapt-1.14.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:891c353e95bb11abb548ca95c8b98050f3620a7378332eb90d6acdef35b401d4"}, 1442 | {file = "wrapt-1.14.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23f96134a3aa24cc50614920cc087e22f87439053d886e474638c68c8d15dc80"}, 1443 | {file = "wrapt-1.14.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6807bcee549a8cb2f38f73f469703a1d8d5d990815c3004f21ddb68a567385ce"}, 1444 | {file = "wrapt-1.14.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6915682f9a9bc4cf2908e83caf5895a685da1fbd20b6d485dafb8e218a338279"}, 1445 | {file = "wrapt-1.14.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:f2f3bc7cd9c9fcd39143f11342eb5963317bd54ecc98e3650ca22704b69d9653"}, 1446 | {file = "wrapt-1.14.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:3a71dbd792cc7a3d772ef8cd08d3048593f13d6f40a11f3427c000cf0a5b36a0"}, 1447 | {file = "wrapt-1.14.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:5a0898a640559dec00f3614ffb11d97a2666ee9a2a6bad1259c9facd01a1d4d9"}, 1448 | {file = "wrapt-1.14.0-cp37-cp37m-win32.whl", hash = "sha256:167e4793dc987f77fd476862d32fa404d42b71f6a85d3b38cbce711dba5e6b68"}, 1449 | {file = "wrapt-1.14.0-cp37-cp37m-win_amd64.whl", hash = "sha256:d066ffc5ed0be00cd0352c95800a519cf9e4b5dd34a028d301bdc7177c72daf3"}, 1450 | {file = "wrapt-1.14.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d9bdfa74d369256e4218000a629978590fd7cb6cf6893251dad13d051090436d"}, 1451 | {file = "wrapt-1.14.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2498762814dd7dd2a1d0248eda2afbc3dd9c11537bc8200a4b21789b6df6cd38"}, 1452 | {file = "wrapt-1.14.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f24ca7953f2643d59a9c87d6e272d8adddd4a53bb62b9208f36db408d7aafc7"}, 1453 | {file = "wrapt-1.14.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b835b86bd5a1bdbe257d610eecab07bf685b1af2a7563093e0e69180c1d4af1"}, 1454 | {file = "wrapt-1.14.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b21650fa6907e523869e0396c5bd591cc326e5c1dd594dcdccac089561cacfb8"}, 1455 | {file = "wrapt-1.14.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:354d9fc6b1e44750e2a67b4b108841f5f5ea08853453ecbf44c81fdc2e0d50bd"}, 1456 | {file = "wrapt-1.14.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1f83e9c21cd5275991076b2ba1cd35418af3504667affb4745b48937e214bafe"}, 1457 | {file = "wrapt-1.14.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:61e1a064906ccba038aa3c4a5a82f6199749efbbb3cef0804ae5c37f550eded0"}, 1458 | {file = "wrapt-1.14.0-cp38-cp38-win32.whl", hash = "sha256:28c659878f684365d53cf59dc9a1929ea2eecd7ac65da762be8b1ba193f7e84f"}, 1459 | {file = "wrapt-1.14.0-cp38-cp38-win_amd64.whl", hash = "sha256:b0ed6ad6c9640671689c2dbe6244680fe8b897c08fd1fab2228429b66c518e5e"}, 1460 | {file = "wrapt-1.14.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b3f7e671fb19734c872566e57ce7fc235fa953d7c181bb4ef138e17d607dc8a1"}, 1461 | {file = "wrapt-1.14.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:87fa943e8bbe40c8c1ba4086971a6fefbf75e9991217c55ed1bcb2f1985bd3d4"}, 1462 | {file = "wrapt-1.14.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4775a574e9d84e0212f5b18886cace049a42e13e12009bb0491562a48bb2b758"}, 1463 | {file = "wrapt-1.14.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9d57677238a0c5411c76097b8b93bdebb02eb845814c90f0b01727527a179e4d"}, 1464 | {file = "wrapt-1.14.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00108411e0f34c52ce16f81f1d308a571df7784932cc7491d1e94be2ee93374b"}, 1465 | {file = "wrapt-1.14.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d332eecf307fca852d02b63f35a7872de32d5ba8b4ec32da82f45df986b39ff6"}, 1466 | {file = "wrapt-1.14.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:01f799def9b96a8ec1ef6b9c1bbaf2bbc859b87545efbecc4a78faea13d0e3a0"}, 1467 | {file = "wrapt-1.14.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:47045ed35481e857918ae78b54891fac0c1d197f22c95778e66302668309336c"}, 1468 | {file = "wrapt-1.14.0-cp39-cp39-win32.whl", hash = "sha256:2eca15d6b947cfff51ed76b2d60fd172c6ecd418ddab1c5126032d27f74bc350"}, 1469 | {file = "wrapt-1.14.0-cp39-cp39-win_amd64.whl", hash = "sha256:bb36fbb48b22985d13a6b496ea5fb9bb2a076fea943831643836c9f6febbcfdc"}, 1470 | {file = "wrapt-1.14.0.tar.gz", hash = "sha256:8323a43bd9c91f62bb7d4be74cc9ff10090e7ef820e27bfe8815c57e68261311"}, 1471 | ] 1472 | zipp = [ 1473 | {file = "zipp-3.8.0-py3-none-any.whl", hash = "sha256:c4f6e5bbf48e74f7a38e7cc5b0480ff42b0ae5178957d564d18932525d5cf099"}, 1474 | {file = "zipp-3.8.0.tar.gz", hash = "sha256:56bf8aadb83c24db6c4b577e13de374ccfb67da2078beba1d037c17980bf43ad"}, 1475 | ] 1476 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | name = "asyncodbc" 3 | version = "0.1.1" 4 | description = "Forked from aioodbc and make improvement" 5 | authors = ["long2ice "] 6 | license = "Apache-2.0" 7 | readme = "README.rst" 8 | homepage = "https://github.com/tortoise/asyncodbc" 9 | repository = "https://github.com/tortoise/asyncodbc.git" 10 | documentation = "https://github.com/tortoise/asyncodbc" 11 | keywords = ["sql", "async", "asyncio", "aio", "mssql", "odbc"] 12 | packages = [ 13 | { include = "asyncodbc" } 14 | ] 15 | include = ["LICENSE", "README.rst"] 16 | 17 | [tool.poetry.dependencies] 18 | python = "^3.7" 19 | pyodbc = "*" 20 | 21 | [tool.poetry.dev-dependencies] 22 | # Linter tools 23 | mypy = "*" 24 | flake8 = "*" 25 | flake8-comprehensions = "*" 26 | pyproject-flake8 = "*" 27 | darglint = "*" 28 | pylint = "*" 29 | pygments = "*" 30 | bandit = "*" 31 | black = "*" 32 | # Test tools 33 | coveralls = "*" 34 | pytest = "*" 35 | pytest-xdist = "*" 36 | pytest-cov = "*" 37 | pytest-asyncio = "*" 38 | # Pypi 39 | twine = "*" 40 | 41 | [build-system] 42 | requires = ["poetry-core>=1.0.0"] 43 | build-backend = "poetry.core.masonry.api" 44 | 45 | [tool.pytest.ini_options] 46 | asyncio_mode = "auto" 47 | 48 | [tool.flake8] 49 | ignore = "E501,W503,DAR101,DAR201,DAR402" 50 | max-line-length = 100 51 | docstring_style = "sphinx" 52 | 53 | [tool.coverage.run] 54 | branch = true 55 | source = ["asyncodbc"] 56 | 57 | [tool.coverage.report] 58 | show_missing = true 59 | 60 | [tool.mypy] 61 | pretty = true 62 | ignore_missing_imports = true 63 | check_untyped_defs = true 64 | disallow_subclassing_any = true 65 | disallow_untyped_calls = true 66 | disallow_untyped_defs = false 67 | disallow_incomplete_defs = false 68 | disallow_untyped_decorators = true 69 | no_implicit_optional = true 70 | warn_redundant_casts = true 71 | warn_unused_ignores = true 72 | warn_no_return = true 73 | warn_return_any = false 74 | warn_unused_configs = true 75 | warn_unreachable = true 76 | allow_redefinition = true 77 | strict_equality = true 78 | show_error_context = true 79 | -------------------------------------------------------------------------------- /tests/test_connection.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import gc 3 | from unittest import mock 4 | 5 | import pyodbc 6 | import pytest 7 | 8 | import asyncodbc 9 | 10 | 11 | def test_connect(conn): 12 | assert not conn.autocommit 13 | assert conn.timeout == 0 14 | assert not conn.closed 15 | 16 | 17 | @pytest.mark.asyncio 18 | async def test_connect_hook(connection_maker): 19 | raw_conn = None 20 | 21 | async def hook(conn): 22 | nonlocal raw_conn 23 | raw_conn = conn 24 | 25 | connection = await connection_maker(after_created=hook) 26 | assert connection._conn == raw_conn 27 | 28 | 29 | @pytest.mark.asyncio 30 | async def test_basic_cursor(conn): 31 | cursor = await conn.cursor() 32 | sql = "SELECT 10;" 33 | await cursor.execute(sql) 34 | (resp,) = await cursor.fetchone() 35 | assert resp == 10 36 | 37 | 38 | @pytest.mark.asyncio 39 | async def test_default_loop(dsn): 40 | conn = await asyncodbc.connect(dsn=dsn) 41 | assert conn._loop is asyncio.get_running_loop() 42 | await conn.close() 43 | 44 | 45 | @pytest.mark.asyncio 46 | async def test_close_twice(conn): 47 | await conn.close() 48 | await conn.close() 49 | assert conn.closed 50 | 51 | 52 | @pytest.mark.asyncio 53 | async def test_execute(conn): 54 | cur = await conn.execute("SELECT 10;") 55 | (resp,) = await cur.fetchone() 56 | await conn.close() 57 | assert resp == 10 58 | assert conn.closed 59 | 60 | 61 | @pytest.mark.asyncio 62 | async def test_output_conversion(conn, table): 63 | def convert(value): 64 | # value will be a string. We'll simply add an X at the 65 | # beginning at the end. 66 | if isinstance(value, str): 67 | return "X" + value + "X" 68 | return b"X" + value + b"X" 69 | 70 | await conn.add_output_converter(pyodbc.SQL_VARCHAR, convert) 71 | cur = await conn.cursor() 72 | 73 | await cur.execute("INSERT INTO t1 VALUES (3, '123.45')") 74 | await cur.execute("SELECT v FROM t1 WHERE n=3;") 75 | (value,) = await cur.fetchone() 76 | 77 | assert value in (b"X123.45X", "X123.45X") 78 | 79 | # Now clear the conversions and try again. There should be 80 | # no Xs this time. 81 | await conn.clear_output_converters() 82 | await cur.execute("SELECT v FROM t1") 83 | (value,) = await cur.fetchone() 84 | assert value == "123.45" 85 | await cur.close() 86 | 87 | 88 | @pytest.mark.asyncio 89 | async def test_autocommit(connection_maker): 90 | conn = await connection_maker(autocommit=True) 91 | assert conn.autocommit, True 92 | 93 | 94 | @pytest.mark.asyncio 95 | async def test_rollback(conn): 96 | assert not conn.autocommit 97 | 98 | cur = await conn.cursor() 99 | await cur.execute("CREATE TABLE t1(n INT, v VARCHAR(10));") 100 | 101 | await conn.commit() 102 | 103 | await cur.execute("INSERT INTO t1 VALUES (1, '123.45');") 104 | await cur.execute("SELECT v FROM t1") 105 | (value,) = await cur.fetchone() 106 | assert value == "123.45" 107 | 108 | await conn.rollback() 109 | await cur.execute("SELECT v FROM t1;") 110 | value = await cur.fetchone() 111 | assert value is None 112 | await cur.execute("DROP TABLE t1;") 113 | await conn.commit() 114 | 115 | await conn.close() 116 | 117 | 118 | @pytest.mark.asyncio 119 | async def test_custom_executor(dsn, executor): 120 | conn = await asyncodbc.connect( 121 | dsn=dsn, 122 | executor=executor, 123 | ) 124 | assert conn._executor is executor 125 | cur = await conn.execute("SELECT 10;") 126 | (resp,) = await cur.fetchone() 127 | await conn.close() 128 | assert resp == 10 129 | assert conn.closed 130 | 131 | 132 | @pytest.mark.asyncio 133 | async def test_data_sources(executor): 134 | data = await asyncodbc.data_sources(executor) 135 | assert isinstance(data, dict) 136 | 137 | 138 | @pytest.mark.asyncio 139 | async def test_connection_simple_with(conn): 140 | assert not conn.closed 141 | async with conn: 142 | pass 143 | 144 | assert conn.closed 145 | 146 | 147 | @pytest.mark.asyncio 148 | async def test_connect_context_manager(dsn): 149 | async with asyncodbc.connect(dsn=dsn, echo=True) as conn: 150 | assert not conn.closed 151 | assert conn.echo 152 | 153 | cur = await conn.execute("SELECT 10;") 154 | assert cur.echo 155 | (resp,) = await cur.fetchone() 156 | assert resp == 10 157 | await cur.close() 158 | 159 | assert conn.closed 160 | 161 | 162 | @pytest.mark.asyncio 163 | async def test___del__(dsn, recwarn, executor): 164 | conn = await asyncodbc.connect(dsn=dsn, executor=executor) 165 | exc_handler = mock.Mock() 166 | loop = conn._loop 167 | loop.set_exception_handler(exc_handler) 168 | 169 | del conn 170 | gc.collect() 171 | w = recwarn.pop() 172 | assert issubclass(w.category, ResourceWarning) 173 | 174 | msg = {"connection": mock.ANY, "message": "Unclosed connection"} # conn was deleted 175 | if loop.get_debug(): 176 | msg["source_traceback"] = mock.ANY 177 | exc_handler.assert_called_with(loop, msg) 178 | -------------------------------------------------------------------------------- /tests/test_cursor.py: -------------------------------------------------------------------------------- 1 | import pyodbc 2 | import pytest 3 | 4 | 5 | @pytest.mark.asyncio 6 | async def test_cursor_with(conn, table): 7 | ret = [] 8 | 9 | # regular cursor usage 10 | cur = await conn.cursor() 11 | await cur.execute("SELECT * FROM t1;") 12 | assert not cur.closed 13 | assert not cur.echo 14 | 15 | # cursor should be closed 16 | async with cur: 17 | assert not cur.echo 18 | async for i in cur: 19 | ret.append(i) 20 | expected = [tuple(r) for r in ret] 21 | assert [(1, "123.45"), (2, "foo")] == expected 22 | assert cur.closed 23 | 24 | 25 | @pytest.mark.asyncio 26 | async def test_cursor_lightweight(conn, table): 27 | cur = await conn.cursor() 28 | ex_cursor = await cur.execute("SELECT * FROM t1;") 29 | assert ex_cursor is cur 30 | 31 | assert not cur.closed 32 | async with cur: 33 | pass 34 | 35 | assert cur.closed 36 | 37 | 38 | @pytest.mark.asyncio 39 | async def test_cursor_await(conn, table): 40 | async with conn.cursor() as cur: 41 | await cur.execute("SELECT * FROM t1;") 42 | assert not cur.closed 43 | 44 | assert cur.closed 45 | 46 | 47 | @pytest.mark.asyncio 48 | async def test_cursor(conn): 49 | cur = await conn.cursor() 50 | assert cur.connection is conn 51 | assert cur._loop, conn.loop 52 | assert cur.arraysize == 1 53 | assert cur.rowcount == -1 54 | 55 | r = await cur.setinputsizes() 56 | assert r is None 57 | 58 | await cur.setoutputsize() 59 | assert r is None 60 | await cur.close() 61 | 62 | 63 | @pytest.mark.asyncio 64 | async def test_execute_on_closed_cursor(conn): 65 | cur = await conn.cursor() 66 | await cur.close() 67 | with pytest.raises(pyodbc.OperationalError): 68 | await cur.execute("SELECT 1;") 69 | 70 | 71 | @pytest.mark.asyncio 72 | async def test_close(conn): 73 | cur = await conn.cursor() 74 | assert not cur.closed 75 | await cur.close() 76 | await cur.close() 77 | assert cur.closed 78 | 79 | 80 | @pytest.mark.asyncio 81 | async def test_description(conn): 82 | cur = await conn.cursor() 83 | assert cur.description is None 84 | await cur.execute("SELECT 1;") 85 | expected = (("", int, None, 10, 10, 0, False),) 86 | assert cur.description == expected 87 | await cur.close() 88 | 89 | 90 | @pytest.mark.asyncio 91 | async def test_description_with_real_table(conn, table): 92 | cur = await conn.cursor() 93 | await cur.execute("SELECT * FROM t1;") 94 | 95 | expected = (("n", int, None, 10, 10, 0, True), ("v", str, None, 10, 10, 0, True)) 96 | assert cur.description == expected 97 | await cur.close() 98 | 99 | 100 | @pytest.mark.asyncio 101 | async def test_rowcount_with_table(conn, table): 102 | cur = await conn.cursor() 103 | await cur.execute("SELECT * FROM t1;") 104 | await cur.fetchall() 105 | assert cur.rowcount == -1 106 | await cur.close() 107 | 108 | 109 | @pytest.mark.asyncio 110 | async def test_arraysize(conn): 111 | cur = await conn.cursor() 112 | assert 1 == cur.arraysize 113 | cur.arraysize = 10 114 | assert 10 == cur.arraysize 115 | await cur.close() 116 | 117 | 118 | @pytest.mark.asyncio 119 | async def test_fetchall(conn, table): 120 | cur = await conn.cursor() 121 | await cur.execute("SELECT * FROM t1;") 122 | resp = await cur.fetchall() 123 | expected = [(1, "123.45"), (2, "foo")] 124 | 125 | for row, exp in zip(resp, expected): 126 | assert exp == tuple(row) 127 | 128 | await cur.close() 129 | 130 | 131 | @pytest.mark.asyncio 132 | async def test_fetchmany(conn, table): 133 | cur = await conn.cursor() 134 | await cur.execute("SELECT * FROM t1;") 135 | resp = await cur.fetchmany(1) 136 | expected = [(1, "123.45")] 137 | 138 | for row, exp in zip(resp, expected): 139 | assert exp == tuple(row) 140 | 141 | await cur.close() 142 | 143 | 144 | @pytest.mark.asyncio 145 | async def test_fetchone(conn, table): 146 | cur = await conn.cursor() 147 | await cur.execute("SELECT * FROM t1;") 148 | resp = await cur.fetchone() 149 | expected = (1, "123.45") 150 | 151 | assert expected == tuple(resp) 152 | await cur.close() 153 | 154 | 155 | @pytest.mark.asyncio 156 | async def test_cursor_rollback(conn, table): 157 | cur = await conn.cursor() 158 | await cur.execute("INSERT INTO t1 VALUES (3, '123.45');") 159 | await cur.execute("SELECT v FROM t1 WHERE n=3;") 160 | (value,) = await cur.fetchone() 161 | assert value == "123.45" 162 | 163 | await cur.rollback() 164 | await cur.execute("SELECT v FROM t1 WHERE n=3;") 165 | value = await cur.fetchone() 166 | assert value is None 167 | 168 | 169 | @pytest.mark.asyncio 170 | async def test_executemany(conn): 171 | cur = await conn.cursor() 172 | await cur.execute("CREATE TABLE t1(a int, b VARCHAR(10))") 173 | # TODO: figure out why it is possible to insert only strings... but not int 174 | params = [(str(i), str(i)) for i in range(1, 6)] 175 | await cur.executemany("INSERT INTO t1(a, b) VALUES (?, ?)", params) 176 | await cur.execute("SELECT COUNT(*) FROM t1") 177 | count = await cur.fetchone() 178 | assert count[0] == len(params) 179 | 180 | await cur.execute("SELECT a, b FROM t1 ORDER BY a") 181 | rows = await cur.fetchall() 182 | assert count[0] == len(rows) 183 | 184 | for param, row in zip(params, rows): 185 | assert int(param[0]) == row[0] 186 | assert param[1] == row[1] 187 | await cur.execute("DROP TABLE t1;") 188 | 189 | 190 | @pytest.mark.asyncio 191 | async def test_primaryKeys_empty(conn, table): 192 | cur = await conn.cursor() 193 | await cur.primaryKeys("t1", "t1", "t1") 194 | resp = await cur.fetchall() 195 | assert resp == [] 196 | 197 | 198 | @pytest.mark.asyncio 199 | async def test_foreignKeys_empty(conn, table): 200 | cur = await conn.cursor() 201 | await cur.foreignKeys("t1") 202 | resp = await cur.fetchall() 203 | assert resp == [] 204 | 205 | 206 | @pytest.mark.asyncio 207 | async def test_getTypeInfo_empty(conn, table): 208 | cur = await conn.cursor() 209 | await cur.getTypeInfo(pyodbc.SQL_CHAR) 210 | resp = await cur.fetchall() 211 | expected = [ 212 | ( 213 | "char", 214 | 1, 215 | 8000, 216 | "'", 217 | "'", 218 | "length", 219 | 1, 220 | 0, 221 | 3, 222 | None, 223 | 0, 224 | None, 225 | "char", 226 | None, 227 | None, 228 | 1, 229 | None, 230 | None, 231 | None, 232 | 1, 233 | ) 234 | ] 235 | type_info = [tuple(r) for r in resp] 236 | assert type_info == expected 237 | -------------------------------------------------------------------------------- /tests/test_pool.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | import pytest 4 | from pyodbc import Error 5 | 6 | import asyncodbc 7 | from asyncodbc import Connection, Pool 8 | 9 | 10 | @pytest.mark.asyncio 11 | async def test_create_pool(pool_maker, dsn): 12 | pool = await pool_maker(dsn=dsn) 13 | assert isinstance(pool, Pool) 14 | assert 1 == pool.minsize 15 | assert 10 == pool.maxsize 16 | assert 1 == pool.size 17 | assert 1 == pool.freesize 18 | assert not pool.echo 19 | 20 | 21 | @pytest.mark.asyncio 22 | async def test_create_pool2(pool_maker, dsn): 23 | pool = await pool_maker(dsn=dsn, maxsize=20) 24 | assert isinstance(pool, Pool) 25 | assert 1 == pool.minsize 26 | assert 20 == pool.maxsize 27 | assert 1 == pool.size 28 | assert 1 == pool.freesize 29 | 30 | 31 | @pytest.mark.asyncio 32 | async def test_acquire(pool): 33 | conn = await pool.acquire() 34 | try: 35 | assert isinstance(conn, Connection) 36 | assert not conn.closed 37 | cur = await conn.cursor() 38 | await cur.execute("SELECT 1") 39 | val = await cur.fetchone() 40 | assert (1,) == tuple(val) 41 | finally: 42 | await pool.release(conn) 43 | 44 | 45 | @pytest.mark.asyncio 46 | async def test_release(pool): 47 | conn = await pool.acquire() 48 | try: 49 | assert 0 == pool.freesize 50 | assert {conn} == pool._used 51 | finally: 52 | await pool.release(conn) 53 | assert 1 == pool.freesize 54 | assert not pool._used 55 | 56 | 57 | @pytest.mark.asyncio 58 | async def test_release_closed(pool): 59 | conn = await pool.acquire() 60 | assert 0 == pool.freesize 61 | await conn.close() 62 | await pool.release(conn) 63 | assert 1 == pool.freesize 64 | assert not pool._used 65 | assert 1 == pool.size 66 | 67 | conn2 = await pool.acquire() 68 | assert 0 == pool.freesize 69 | assert 1 == pool.size 70 | await pool.release(conn2) 71 | 72 | 73 | @pytest.mark.asyncio 74 | async def test_context_manager(pool): 75 | conn = await pool.acquire() 76 | try: 77 | assert isinstance(conn, Connection) 78 | assert 0 == pool.freesize 79 | assert {conn} == pool._used 80 | finally: 81 | await pool.release(conn) 82 | assert 1 == pool.freesize 83 | 84 | 85 | @pytest.mark.asyncio 86 | async def test_clear(pool): 87 | await pool.clear() 88 | assert 0 == pool.freesize 89 | 90 | 91 | @pytest.mark.asyncio 92 | async def test_initial_empty(pool_maker, dsn): 93 | pool = await pool_maker(dsn=dsn, minsize=0) 94 | 95 | assert 10 == pool.maxsize 96 | assert 0 == pool.minsize 97 | assert 0 == pool.size 98 | assert 0 == pool.freesize 99 | 100 | conn = await pool.acquire() 101 | try: 102 | assert 1 == pool.size 103 | assert 0 == pool.freesize 104 | finally: 105 | await pool.release(conn) 106 | assert 1 == pool.size 107 | assert 1 == pool.freesize 108 | 109 | conn1 = await pool.acquire() 110 | assert 1 == pool.size 111 | assert 0 == pool.freesize 112 | 113 | conn2 = await pool.acquire() 114 | assert 2 == pool.size 115 | assert 0 == pool.freesize 116 | 117 | await pool.release(conn1) 118 | assert 2 == pool.size 119 | assert 1 == pool.freesize 120 | 121 | await pool.release(conn2) 122 | assert 2 == pool.size 123 | assert 2 == pool.freesize 124 | 125 | 126 | @pytest.mark.asyncio 127 | async def test_parallel_tasks(pool_maker, dsn): 128 | pool = await pool_maker(dsn=dsn, minsize=0, maxsize=2) 129 | 130 | assert 2 == pool.maxsize 131 | assert 0 == pool.minsize 132 | assert 0 == pool.size 133 | assert 0 == pool.freesize 134 | 135 | fut1 = pool.acquire() 136 | fut2 = pool.acquire() 137 | 138 | conn1, conn2 = await asyncio.gather( 139 | fut1, 140 | fut2, 141 | ) 142 | assert 2 == pool.size 143 | assert 0 == pool.freesize 144 | assert {conn1, conn2} == pool._used 145 | 146 | await pool.release(conn1) 147 | assert 2 == pool.size 148 | assert 1 == pool.freesize 149 | assert {conn2} == pool._used 150 | 151 | await pool.release(conn2) 152 | assert 2 == pool.size 153 | assert 2 == pool.freesize 154 | assert not conn1.closed 155 | assert not conn2.closed 156 | 157 | conn3 = await pool.acquire() 158 | assert conn3 is conn1 159 | await pool.release(conn3) 160 | 161 | 162 | @pytest.mark.asyncio 163 | async def test_parallel_tasks_more(pool_maker, dsn): 164 | pool = await pool_maker(dsn=dsn, minsize=0, maxsize=3) 165 | 166 | fut1 = pool.acquire() 167 | fut2 = pool.acquire() 168 | fut3 = pool.acquire() 169 | 170 | conn1, conn2, conn3 = await asyncio.gather( 171 | fut1, 172 | fut2, 173 | fut3, 174 | ) 175 | assert 3 == pool.size 176 | assert 0 == pool.freesize 177 | assert {conn1, conn2, conn3} == pool._used 178 | 179 | await pool.release(conn1) 180 | assert 3 == pool.size 181 | assert 1 == pool.freesize 182 | assert {conn2, conn3} == pool._used 183 | 184 | await pool.release(conn2) 185 | assert 3 == pool.size 186 | assert 2 == pool.freesize 187 | assert {conn3} == pool._used 188 | assert not conn1.closed 189 | assert not conn2.closed 190 | 191 | await pool.release(conn3) 192 | assert 3 == pool.size 193 | assert 3 == pool.freesize 194 | assert not pool._used 195 | assert not conn1.closed 196 | assert not conn2.closed 197 | assert not conn3.closed 198 | 199 | conn4 = await pool.acquire() 200 | assert conn4 is conn1 201 | await pool.release(conn4) 202 | 203 | 204 | @pytest.mark.asyncio 205 | async def test__fill_free(pool_maker, dsn): 206 | pool = await pool_maker(dsn=dsn, minsize=1) 207 | 208 | first_conn = await pool.acquire() 209 | try: 210 | assert 0 == pool.freesize 211 | assert 1 == pool.size 212 | 213 | conn = await asyncio.wait_for( 214 | pool.acquire(), 215 | timeout=0.5, 216 | ) 217 | assert 0 == pool.freesize 218 | assert 2 == pool.size 219 | await pool.release(conn) 220 | assert 1 == pool.freesize 221 | assert 2 == pool.size 222 | finally: 223 | await pool.release(first_conn) 224 | assert 2 == pool.freesize 225 | assert 2 == pool.size 226 | 227 | 228 | @pytest.mark.asyncio 229 | async def test_connect_from_acquire(pool_maker, dsn): 230 | pool = await pool_maker(dsn=dsn, minsize=0) 231 | 232 | assert 0 == pool.freesize 233 | assert 0 == pool.size 234 | conn = await pool.acquire() 235 | try: 236 | assert 1 == pool.size 237 | assert 0 == pool.freesize 238 | finally: 239 | await pool.release(conn) 240 | assert 1 == pool.size 241 | assert 1 == pool.freesize 242 | 243 | 244 | @pytest.mark.asyncio 245 | async def test_pool_with_connection_recycling(pool_maker, dsn): 246 | pool = await pool_maker(dsn=dsn, minsize=1, maxsize=1, pool_recycle=3) 247 | async with pool.acquire() as conn: 248 | conn1 = conn 249 | 250 | await asyncio.sleep( 251 | 5, 252 | ) 253 | 254 | assert 1 == pool.freesize 255 | async with pool.acquire() as conn: 256 | conn2 = conn 257 | 258 | assert conn1 is not conn2 259 | 260 | 261 | @pytest.mark.asyncio 262 | async def test_concurrency(pool_maker, dsn): 263 | pool = await pool_maker(dsn=dsn, minsize=2, maxsize=4) 264 | 265 | c1 = await pool.acquire() 266 | c2 = await pool.acquire() 267 | assert 0 == pool.freesize 268 | assert 2 == pool.size 269 | await pool.release(c1) 270 | await pool.release(c2) 271 | 272 | 273 | @pytest.mark.asyncio 274 | async def test_invalid_minsize_and_maxsize(dsn): 275 | with pytest.raises(ValueError): 276 | await asyncodbc.create_pool(dsn=dsn, minsize=-1) 277 | 278 | with pytest.raises(ValueError): 279 | await asyncodbc.create_pool(dsn=dsn, minsize=5, maxsize=2) 280 | 281 | 282 | @pytest.mark.asyncio 283 | async def test_true_parallel_tasks(pool_maker, dsn): 284 | pool = await pool_maker(dsn=dsn, minsize=0, maxsize=1) 285 | 286 | assert 1 == pool.maxsize 287 | assert 0 == pool.minsize 288 | assert 0 == pool.size 289 | assert 0 == pool.freesize 290 | 291 | maxsize = 0 292 | minfreesize = 100 293 | 294 | async def inner(): 295 | nonlocal maxsize, minfreesize 296 | maxsize = max(maxsize, pool.size) 297 | minfreesize = min(minfreesize, pool.freesize) 298 | conn = await pool.acquire() 299 | maxsize = max(maxsize, pool.size) 300 | minfreesize = min(minfreesize, pool.freesize) 301 | await asyncio.sleep( 302 | 0.01, 303 | ) 304 | await pool.release(conn) 305 | maxsize = max(maxsize, pool.size) 306 | minfreesize = min(minfreesize, pool.freesize) 307 | 308 | await asyncio.gather( 309 | inner(), 310 | inner(), 311 | ) 312 | 313 | assert 1 == maxsize 314 | assert 0 == minfreesize 315 | 316 | 317 | @pytest.mark.asyncio 318 | async def test_cannot_acquire_after_closing(pool_maker, dsn): 319 | pool = await pool_maker(dsn=dsn) 320 | 321 | pool.close() 322 | 323 | with pytest.raises(RuntimeError): 324 | await pool.acquire() 325 | 326 | 327 | @pytest.mark.asyncio 328 | async def test_wait_closed(pool_maker, dsn): 329 | pool = await pool_maker(dsn=dsn) 330 | 331 | c1 = await pool.acquire() 332 | c2 = await pool.acquire() 333 | assert 2 == pool.size 334 | assert 0 == pool.freesize 335 | 336 | ops = [] 337 | 338 | async def do_release(conn): 339 | await asyncio.sleep( 340 | 0, 341 | ) 342 | await pool.release(conn) 343 | ops.append("release") 344 | 345 | async def wait_closed(): 346 | await pool.wait_closed() 347 | ops.append("wait_closed") 348 | 349 | pool.close() 350 | await asyncio.gather( 351 | wait_closed(), 352 | do_release(c1), 353 | do_release(c2), 354 | ) 355 | assert sorted(["release", "release", "wait_closed"]) == sorted(ops) 356 | assert 0 == pool.freesize 357 | 358 | 359 | @pytest.mark.asyncio 360 | async def test_echo(pool_maker, dsn): 361 | pool = await pool_maker(dsn=dsn, echo=True) 362 | 363 | assert pool.echo 364 | conn = await pool.acquire() 365 | assert conn.echo 366 | await pool.release(conn) 367 | 368 | 369 | @pytest.mark.asyncio 370 | async def test_release_closed_connection(pool_maker, dsn): 371 | pool = await pool_maker(dsn=dsn) 372 | 373 | conn = await pool.acquire() 374 | await conn.close() 375 | 376 | await pool.release(conn) 377 | pool.close() 378 | 379 | 380 | @pytest.mark.asyncio 381 | async def test_wait_closing_on_not_closed(pool_maker, dsn): 382 | pool = await pool_maker(dsn=dsn) 383 | 384 | with pytest.raises(RuntimeError): 385 | await pool.wait_closed() 386 | pool.close() 387 | 388 | 389 | @pytest.mark.asyncio 390 | async def test_close_with_acquired_connections(pool_maker, dsn): 391 | pool = await pool_maker(dsn=dsn) 392 | 393 | conn = await pool.acquire() 394 | pool.close() 395 | 396 | with pytest.raises(asyncio.TimeoutError): 397 | await asyncio.wait_for( 398 | pool.wait_closed(), 399 | 0.1, 400 | ) 401 | await conn.close() 402 | await pool.release(conn) 403 | 404 | 405 | @pytest.mark.asyncio 406 | async def test_pool_with_executor(pool_maker, dsn, executor): 407 | pool = await pool_maker(executor=executor, dsn=dsn, minsize=2, maxsize=2) 408 | 409 | conn = await pool.acquire() 410 | try: 411 | assert isinstance(conn, Connection) 412 | assert not conn.closed 413 | assert conn._executor is executor 414 | cur = await conn.cursor() 415 | await cur.execute("SELECT 1") 416 | val = await cur.fetchone() 417 | assert (1,) == tuple(val) 418 | finally: 419 | await pool.release(conn) 420 | # we close pool here instead in finalizer because of pool should be 421 | # closed before executor 422 | pool.close() 423 | await pool.wait_closed() 424 | 425 | 426 | @pytest.mark.asyncio 427 | async def test_pool_context_manager(pool): 428 | assert not pool.closed 429 | async with pool: 430 | assert not pool.closed 431 | assert pool.closed 432 | 433 | 434 | @pytest.mark.asyncio 435 | async def test_pool_context_manager2(pool): 436 | async with pool.acquire() as conn: 437 | assert not conn.closed 438 | cur = await conn.cursor() 439 | await cur.execute("SELECT 1") 440 | val = await cur.fetchone() 441 | assert (1,) == tuple(val) 442 | 443 | 444 | @pytest.mark.asyncio 445 | async def test_all_context_managers(dsn, executor): 446 | kw = {"dsn": dsn, "executor": executor} 447 | async with asyncodbc.create_pool(**kw) as pool: 448 | async with pool.acquire() as conn: 449 | async with conn.cursor() as cur: 450 | assert not pool.closed 451 | assert not conn.closed 452 | assert not cur.closed 453 | 454 | await cur.execute("SELECT 1") 455 | val = await cur.fetchone() 456 | assert (1,) == tuple(val) 457 | 458 | assert pool.closed 459 | assert conn.closed 460 | assert cur.closed 461 | 462 | 463 | @pytest.mark.asyncio 464 | async def test_context_manager_aexit(connection_maker): 465 | async def aexit_conntex_managet(conn): 466 | # commit on exit if no error 467 | params = (1, "123.45") 468 | async with conn.cursor() as cur: 469 | await cur.execute("CREATE TABLE cmt(n int, v VARCHAR(10))") 470 | await cur.execute("INSERT INTO cmt VALUES (?,?)", params) 471 | async with conn.cursor() as cur: 472 | await cur.execute("SELECT v FROM cmt WHERE n=1;") 473 | (value,) = await cur.fetchone() 474 | assert value == params[1] 475 | 476 | # rollback on exit if error 477 | with pytest.raises(Error): 478 | async with conn.cursor() as cur: 479 | await cur.execute("ins INTO cmt VALUES (2, '666');") 480 | async with conn.cursor() as cur: 481 | await cur.execute("SELECT v FROM cmt WHERE n=2") 482 | row = await cur.fetchone() 483 | assert row is None 484 | 485 | async with conn.cursor() as cur: 486 | await cur.execute("DROP TABLE cmt") 487 | 488 | conn = await connection_maker(autocommit=False) 489 | assert not conn.autocommit 490 | await aexit_conntex_managet(conn) 491 | await conn.commit() 492 | 493 | conn = await connection_maker(autocommit=True) 494 | assert conn.autocommit 495 | await aexit_conntex_managet(conn) 496 | -------------------------------------------------------------------------------- /tests/test_slow.py: -------------------------------------------------------------------------------- 1 | import gc 2 | from unittest import mock 3 | 4 | import pytest 5 | 6 | import asyncodbc 7 | 8 | 9 | @pytest.mark.asyncio 10 | async def test___del__(dsn, recwarn, executor): 11 | conn = await asyncodbc.connect(dsn=dsn, executor=executor) 12 | exc_handler = mock.Mock() 13 | loop = conn.loop 14 | loop.set_exception_handler(exc_handler) 15 | 16 | del conn 17 | gc.collect() 18 | w = recwarn.pop() 19 | assert issubclass(w.category, ResourceWarning) 20 | 21 | msg = {"connection": mock.ANY, "message": "Unclosed connection"} # conn was deleted 22 | if loop.get_debug(): 23 | msg["source_traceback"] = mock.ANY 24 | exc_handler.assert_called_with(loop, msg) 25 | assert not loop.is_closed() 26 | --------------------------------------------------------------------------------