├── .dockerignore ├── .editorconfig ├── .flake8 ├── .github ├── scripts │ ├── docs.sh │ └── package-build.sh └── workflows │ ├── aiosql-package.yml │ └── deploy-pages.yml ├── .gitignore ├── LICENSE ├── MANIFEST.in ├── Makefile ├── README.rst ├── aiosql ├── __init__.py ├── adapters │ ├── __init__.py │ ├── aiosqlite.py │ ├── asyncpg.py │ ├── duckdb.py │ ├── generic.py │ ├── mysql.py │ ├── pg8000.py │ ├── pyformat.py │ └── sqlite3.py ├── aiosql.py ├── py.typed ├── queries.py ├── query_loader.py ├── types.py └── utils.py ├── docker ├── .gitignore ├── Makefile ├── README.md ├── docker-compose.yml ├── dockerfile.python-dbs ├── dockerfile.python-mysql └── dockerfile.python-postgres ├── docs └── source │ ├── advanced-topics.rst │ ├── conf.py │ ├── contributing.rst │ ├── database-driver-adapters.rst │ ├── defining-sql-queries.rst │ ├── getting-started.rst │ ├── index.rst │ ├── todo.rst │ └── versions.rst ├── example ├── example.py ├── greetings.py ├── greetings.sh ├── greetings.sql ├── greetings_async.py ├── greetings_create.sql ├── greetings_cursor.py ├── observe_query.py ├── pg_execute_values.py └── sql │ ├── blogs │ └── blogs.sql │ ├── create_schema.sql │ └── users │ └── users.sql ├── pyproject.toml └── tests ├── Makefile ├── blogdb ├── data │ ├── blogs_data.csv │ └── users_data.csv └── sql │ ├── blogs │ ├── blogs.oops │ ├── blogs.sql │ ├── du │ │ └── blogs.sql │ ├── li │ │ └── blogs.sql │ ├── ms │ │ └── blogs.sql │ ├── my │ │ └── blogs.sql │ └── pg │ │ ├── asyncpg │ │ └── blogs.sql │ │ ├── blogs.sql │ │ └── pg8000 │ │ └── blogs.sql │ ├── comments │ ├── du │ │ └── comments.sql │ ├── li │ │ └── comments.sql │ ├── ms │ │ └── comments.sql │ ├── my │ │ └── comments.sql │ └── pg │ │ └── comments.sql │ ├── empty.sql │ ├── misc │ ├── du │ │ └── misc.sql │ ├── li │ │ └── misc.sql │ ├── misc.sql │ ├── ms │ │ └── misc.sql │ ├── my │ │ ├── misc.sql │ │ └── pymysql │ │ │ └── misc.sql │ └── pg │ │ ├── misc.sql │ │ └── pg8000 │ │ └── misc.sql │ └── users │ ├── du │ └── users.sql │ ├── li │ └── users.sql │ ├── ms │ └── users.sql │ ├── pg │ ├── asyncpg │ │ └── users.sql │ └── pg8000 │ │ └── users.sql │ └── users.sql ├── conf_duckdb.py ├── conf_mssql.py ├── conf_mysql.py ├── conf_pgsql.py ├── conf_schema.py ├── conf_sqlite.py ├── conftest.py ├── pytest.ini ├── run_tests.py ├── test_aiosqlite.py ├── test_apsw.py ├── test_asyncpg.py ├── test_duckdb.py ├── test_loading.py ├── test_mariadb.py ├── test_myco.py ├── test_mysqldb.py ├── test_patterns.py ├── test_pg8000.py ├── test_psycopg2.py ├── test_psycopg3.py ├── test_pygresql.py ├── test_pymssql.py ├── test_pymysql.py ├── test_sqlite3.py ├── utils.py └── wait.py /.dockerignore: -------------------------------------------------------------------------------- 1 | .gitignore -------------------------------------------------------------------------------- /.editorconfig: -------------------------------------------------------------------------------- 1 | # Use as the top-most EditorConfig file 2 | root = true 3 | 4 | # Unix-style newlines with a newline ending every file 5 | [*] 6 | end_of_line = lf 7 | insert_final_newline = true 8 | trim_trailing_whitespace = true 9 | 10 | [*.py] 11 | charset = utf-8 12 | indent_style = space 13 | indent_size = 4 14 | 15 | [*.{md,markdown}] 16 | indent_style = space 17 | indent_size = 4 18 | trim_trailing_whitespace = false 19 | 20 | # 2 space indentation 21 | [*.{json,yml,sh}] 22 | indent_style = space 23 | indent_size = 2 24 | -------------------------------------------------------------------------------- /.flake8: -------------------------------------------------------------------------------- 1 | [flake8] 2 | # Derived from the one from Black itself. Ignore rules that are handled by Black. 3 | ignore = E203, E266, E501, W503, E704 4 | select = B,C,E,F,W,T4,B9 5 | max-line-length = 100 6 | -------------------------------------------------------------------------------- /.github/scripts/docs.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -euo pipefail 4 | 5 | # add sphinx specific entries when generating the doc so that pypi will not complain 6 | 7 | cat >> docs/source/index.rst < 18 | Defining SQL Queries 19 | Advanced Topics 20 | Database Driver Adapters 21 | Contributing 22 | API 23 | Versions 24 | Backlog 25 | EOF 26 | 27 | sphinx-apidoc -f -o docs/source/pydoc aiosql 28 | sphinx-build -b html docs/source docs/build 29 | -------------------------------------------------------------------------------- /.github/scripts/package-build.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | set -euo pipefail 3 | 4 | python -m build --sdist 5 | python -m build --wheel 6 | -------------------------------------------------------------------------------- /.github/workflows/aiosql-package.yml: -------------------------------------------------------------------------------- 1 | # This workflow will install Python dependencies, run tests and lint with a variety of Python versions 2 | # For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions 3 | 4 | name: Aiosql Package 5 | 6 | on: 7 | push: 8 | branches: [ main ] 9 | pull_request: 10 | branches: [ main ] 11 | 12 | jobs: 13 | build: 14 | runs-on: ubuntu-latest 15 | strategy: 16 | fail-fast: false 17 | matrix: 18 | # https://github.com/actions/python-versions (versions-manifest.json) 19 | # https://downloads.python.org/pypy/versions.json 20 | # 3.14 KO on pydantic build dependences 2024-10-27 21 | # 3.14 way too slow duckdb wheel build 2025-01-28 22 | python: 23 | - version: "pypy3.10" 24 | - version: "pypy3.11" 25 | - version: "3.9" 26 | - version: "3.10" 27 | - version: "3.11" 28 | - version: "3.12" 29 | - version: "3.13" 30 | - version: "3.13t" 31 | gil: 1 32 | - version: "3.13t" 33 | gil: 0 34 | - version: "3.14" 35 | # - version: "3.14t" 36 | # gil: 1 37 | # - version: "3.14t" 38 | # gil: 0 39 | env: 40 | PYTHON_GIL: ${{ matrix.python.gil }} 41 | steps: 42 | - name: Checkout Project 43 | uses: actions/checkout@v4 44 | - name: Set up Python ${{ matrix.python.version }} 45 | uses: actions/setup-python@v5 46 | with: 47 | python-version: ${{ matrix.python.version }} 48 | allow-prereleases: true 49 | cache: "pip" 50 | - name: Install dependencies 51 | run: | 52 | python -m pip install --upgrade pip 53 | python -m pip install .[dev,dev-postgres,dev-mysql,dev-sqlite,dev-duckdb] 54 | - name: Check types with mypy 55 | run: make VENV= INSTALL= check.mypy 56 | - name: Check types with pyright 57 | run: make VENV= INSTALL= check.pyright 58 | - name: Lint with ruff 59 | run: make VENV= INSTALL= check.ruff 60 | - name: Test with pytest and databases 61 | run: make VENV= INSTALL= check.pytest 62 | - name: Coverage tests 63 | run: make VENV= INSTALL= check.coverage 64 | -------------------------------------------------------------------------------- /.github/workflows/deploy-pages.yml: -------------------------------------------------------------------------------- 1 | name: aiosql docs 2 | 3 | on: 4 | release: 5 | types: [published] 6 | 7 | jobs: 8 | publish: 9 | runs-on: ubuntu-latest 10 | steps: 11 | - name: Checkout 12 | uses: actions/checkout@v4 13 | - name: Set up Python 14 | uses: actions/setup-python@v5 15 | - name: Install dependencies 16 | run: | 17 | python -m pip install -U pip 18 | python -m pip install .[doc] 19 | - name: Check RST files 20 | run: make check.rstcheck 21 | - name: Generate documentation 22 | run: .github/scripts/docs.sh 23 | - name: Deploy to GitHub Pages 24 | if: success() 25 | uses: crazy-max/ghaction-github-pages@v4 26 | with: 27 | target_branch: gh-pages 28 | build_dir: docs/build 29 | env: 30 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 31 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Obvious 2 | .git 3 | 4 | # Swap 5 | [._]*.s[a-v][a-z] 6 | [._]*.sw[a-p] 7 | [._]s[a-rt-v][a-z] 8 | [._]ss[a-gi-z] 9 | [._]sw[a-p] 10 | 11 | # Session 12 | Session.vim 13 | 14 | # Temporary 15 | .netrwhist 16 | *~ 17 | # Auto-generated tag files 18 | tags 19 | # Persistent undo 20 | [._]*.un~ 21 | 22 | # Byte-compiled / optimized / DLL files 23 | __pycache__/ 24 | *.py[cod] 25 | *$py.class 26 | 27 | # C extensions 28 | *.so 29 | 30 | # Distribution / packaging 31 | .Python 32 | build/ 33 | develop-eggs/ 34 | dist/ 35 | downloads/ 36 | eggs/ 37 | .eggs/ 38 | lib/ 39 | lib64/ 40 | parts/ 41 | sdist/ 42 | var/ 43 | wheels/ 44 | *.egg-info/ 45 | .installed.cfg 46 | *.egg 47 | MANIFEST 48 | docs/html 49 | 50 | # PyInstaller 51 | # Usually these files are written by a python script from a template 52 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 53 | *.manifest 54 | *.spec 55 | 56 | # Installer logs 57 | pip-log.txt 58 | pip-delete-this-directory.txt 59 | 60 | # Unit test / coverage reports 61 | htmlcov/ 62 | .tox/ 63 | .coverage 64 | .coverage.* 65 | .cache 66 | nosetests.xml 67 | coverage.xml 68 | *.cover 69 | .hypothesis/ 70 | .pytest_cache/ 71 | 72 | # Translations 73 | *.mo 74 | *.pot 75 | 76 | # Django stuff: 77 | *.log 78 | local_settings.py 79 | db.sqlite3 80 | 81 | # Flask stuff: 82 | instance/ 83 | .webassets-cache 84 | 85 | # Scrapy stuff: 86 | .scrapy 87 | 88 | # Sphinx documentation 89 | docs/build/ 90 | docs/source/pydoc/ 91 | 92 | # PyBuilder 93 | target/ 94 | 95 | # Jupyter Notebook 96 | .ipynb_checkpoints 97 | 98 | # pyenv 99 | .python-version 100 | 101 | # celery beat schedule file 102 | celerybeat-schedule 103 | 104 | # SageMath parsed files 105 | *.sage.py 106 | 107 | # Environments 108 | .env 109 | .venv 110 | env/ 111 | venv/ 112 | ENV/ 113 | env.bak/ 114 | venv.bak/ 115 | poetry.lock 116 | 117 | # Spyder project settings 118 | .spyderproject 119 | .spyproject 120 | 121 | # Rope project settings 122 | .ropeproject 123 | 124 | # mypy 125 | .mypy_cache/ 126 | 127 | # PyCharm 128 | .idea/ 129 | 130 | # Project files 131 | scratch/ 132 | exampleblog.db 133 | 134 | # misc 135 | .docker.* 136 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2014-2017, Honza Pokorny 2 | Copyright (c) 2018, William Vaughn 3 | All rights reserved. 4 | 5 | Redistribution and use in source and binary forms, with or without 6 | modification, are permitted provided that the following conditions are met: 7 | 8 | 1. Redistributions of source code must retain the above copyright notice, this 9 | list of conditions and the following disclaimer. 10 | 2. Redistributions in binary form must reproduce the above copyright notice, 11 | this list of conditions and the following disclaimer in the documentation 12 | and/or other materials provided with the distribution. 13 | 14 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND 15 | ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED 16 | WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 17 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR 18 | ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES 19 | (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; 20 | LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND 21 | ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 22 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS 23 | SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 24 | 25 | The views and conclusions contained in the software and documentation are those 26 | of the authors and should not be interpreted as representing official policies, 27 | either expressed or implied, of the aiosql Project. 28 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | recursive-include docs * 2 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | docs/source/index.rst -------------------------------------------------------------------------------- /aiosql/__init__.py: -------------------------------------------------------------------------------- 1 | from .aiosql import from_path, from_str, register_adapter 2 | from .utils import SQLParseException, SQLLoadException 3 | from importlib.metadata import version 4 | 5 | __version__ = version("aiosql") 6 | 7 | __all__ = ["from_path", "from_str", "register_adapter", "SQLParseException", "SQLLoadException"] 8 | -------------------------------------------------------------------------------- /aiosql/adapters/__init__.py: -------------------------------------------------------------------------------- 1 | # standard adapters 2 | from .pyformat import PyFormatAdapter 3 | from .generic import GenericAdapter 4 | from .sqlite3 import SQLite3Adapter 5 | 6 | # async adapters 7 | from .aiosqlite import AioSQLiteAdapter 8 | from .asyncpg import AsyncPGAdapter 9 | 10 | # silence flake8 F401 warning: 11 | _ALL = [ 12 | PyFormatAdapter, 13 | GenericAdapter, 14 | SQLite3Adapter, 15 | AioSQLiteAdapter, 16 | AsyncPGAdapter, 17 | ] 18 | -------------------------------------------------------------------------------- /aiosql/adapters/aiosqlite.py: -------------------------------------------------------------------------------- 1 | from contextlib import asynccontextmanager 2 | 3 | 4 | class AioSQLiteAdapter: 5 | is_aio_driver = True 6 | 7 | def process_sql(self, _query_name, _op_type, sql): 8 | """Pass through function because the ``aiosqlite`` driver can already handle the 9 | ``:var_name`` format used by aiosql and doesn't need any additional processing. 10 | 11 | Args: 12 | 13 | - _query_name (str): The name of the sql query. 14 | - _op_type (SQLOperationType): The type of SQL operation performed by the query. 15 | - sql (str): The sql as written before processing. 16 | 17 | Returns: 18 | 19 | - str: Original SQL text unchanged. 20 | """ 21 | return sql 22 | 23 | async def select(self, conn, _query_name, sql, parameters, record_class=None): 24 | async with conn.execute(sql, parameters) as cur: 25 | results = await cur.fetchall() 26 | if record_class is not None: 27 | column_names = [c[0] for c in cur.description] 28 | results = [record_class(**dict(zip(column_names, row))) for row in results] 29 | return results 30 | 31 | async def select_one(self, conn, _query_name, sql, parameters, record_class=None): 32 | async with conn.execute(sql, parameters) as cur: 33 | result = await cur.fetchone() 34 | if result is not None and record_class is not None: 35 | column_names = [c[0] for c in cur.description] 36 | result = record_class(**dict(zip(column_names, result))) 37 | return result 38 | 39 | async def select_value(self, conn, _query_name, sql, parameters): 40 | async with conn.execute(sql, parameters) as cur: 41 | result = await cur.fetchone() 42 | return result[0] if result else None 43 | 44 | @asynccontextmanager 45 | async def select_cursor(self, conn, _query_name, sql, parameters): 46 | async with conn.execute(sql, parameters) as cur: 47 | yield cur 48 | 49 | async def insert_returning(self, conn, _query_name, sql, parameters): 50 | async with conn.execute(sql, parameters) as cur: 51 | return cur.lastrowid 52 | 53 | async def insert_update_delete(self, conn, _query_name, sql, parameters): 54 | async with conn.execute(sql, parameters) as cur: 55 | return cur.rowcount 56 | 57 | async def insert_update_delete_many(self, conn, _query_name, sql, parameters): 58 | cur = await conn.executemany(sql, parameters) 59 | await cur.close() 60 | 61 | async def execute_script(self, conn, sql): 62 | await conn.executescript(sql) 63 | return "DONE" 64 | -------------------------------------------------------------------------------- /aiosql/adapters/asyncpg.py: -------------------------------------------------------------------------------- 1 | from collections import defaultdict 2 | from contextlib import asynccontextmanager 3 | 4 | from ..utils import VAR_REF 5 | 6 | 7 | class MaybeAcquire: 8 | def __init__(self, client, driver=None): 9 | self.client = client 10 | self._driver = driver 11 | 12 | async def __aenter__(self): 13 | if "acquire" in dir(self.client): 14 | self._managed_conn = await self.client.acquire() 15 | return self._managed_conn 16 | else: 17 | self._managed_conn = None 18 | return self.client 19 | 20 | async def __aexit__(self, exc_type, exc, tb): 21 | if self._managed_conn is not None: 22 | await self.client.release(self._managed_conn) 23 | 24 | 25 | class AsyncPGAdapter: 26 | is_aio_driver = True 27 | 28 | def __init__(self): 29 | self.var_sorted = defaultdict(list) 30 | 31 | def process_sql(self, query_name, _op_type, sql): 32 | """asyncpg seems to only support numeric.""" 33 | adj = 0 34 | 35 | for match in VAR_REF.finditer(sql): 36 | gd = match.groupdict() 37 | # Do nothing if the match is found within quotes. 38 | if gd["dquote"] is not None or gd["squote"] is not None: 39 | continue 40 | 41 | var_name = gd["var_name"] 42 | if var_name in self.var_sorted[query_name]: 43 | replacement = f"${self.var_sorted[query_name].index(var_name) + 1}" 44 | else: 45 | replacement = f"${len(self.var_sorted[query_name]) + 1}" 46 | self.var_sorted[query_name].append(var_name) 47 | 48 | # Determine the offset of the start and end of the original 49 | # variable that we are replacing, taking into account an adjustment 50 | # factor based on previous replacements (see the note below). 51 | start = match.start() + len(gd["lead"]) + adj 52 | end = match.end() + adj 53 | 54 | sql = sql[:start] + replacement + sql[end:] 55 | 56 | # If the replacement and original variable were different lengths, 57 | # then the offsets of subsequent matches will be wrong by the 58 | # difference. Calculate an adjustment to apply to reconcile those 59 | # offsets with the modified string. 60 | # 61 | # The "- 1" is to account for the leading ":" character in the 62 | # original string. 63 | adj += len(replacement) - len(var_name) - 1 64 | 65 | return sql 66 | 67 | def maybe_order_params(self, query_name, parameters): 68 | if isinstance(parameters, dict): 69 | return [parameters[rk] for rk in self.var_sorted[query_name]] 70 | elif isinstance(parameters, tuple): 71 | return parameters 72 | else: 73 | raise ValueError(f"Parameters expected to be dict or tuple, received {parameters}") 74 | 75 | async def select(self, conn, query_name, sql, parameters, record_class=None): 76 | parameters = self.maybe_order_params(query_name, parameters) 77 | async with MaybeAcquire(conn) as connection: 78 | results = await connection.fetch(sql, *parameters) 79 | if record_class is not None: 80 | results = [record_class(**dict(rec)) for rec in results] 81 | return results 82 | 83 | async def select_one(self, conn, query_name, sql, parameters, record_class=None): 84 | parameters = self.maybe_order_params(query_name, parameters) 85 | async with MaybeAcquire(conn) as connection: 86 | result = await connection.fetchrow(sql, *parameters) 87 | if result is not None and record_class is not None: 88 | result = record_class(**dict(result)) 89 | return result 90 | 91 | async def select_value(self, conn, query_name, sql, parameters): 92 | parameters = self.maybe_order_params(query_name, parameters) 93 | async with MaybeAcquire(conn) as connection: 94 | return await connection.fetchval(sql, *parameters) 95 | 96 | @asynccontextmanager 97 | async def select_cursor(self, conn, query_name, sql, parameters): 98 | parameters = self.maybe_order_params(query_name, parameters) 99 | async with MaybeAcquire(conn) as connection: 100 | stmt = await connection.prepare(sql) 101 | async with connection.transaction(): 102 | yield stmt.cursor(*parameters) 103 | 104 | async def insert_returning(self, conn, query_name, sql, parameters): 105 | parameters = self.maybe_order_params(query_name, parameters) 106 | async with MaybeAcquire(conn) as connection: 107 | res = await connection.fetchrow(sql, *parameters) 108 | if res: 109 | return res[0] if len(res) == 1 else res 110 | else: 111 | return None 112 | 113 | async def insert_update_delete(self, conn, query_name, sql, parameters): 114 | parameters = self.maybe_order_params(query_name, parameters) 115 | async with MaybeAcquire(conn) as connection: 116 | # TODO extract integer last result 117 | return await connection.execute(sql, *parameters) 118 | 119 | async def insert_update_delete_many(self, conn, query_name, sql, parameters): 120 | parameters = [self.maybe_order_params(query_name, params) for params in parameters] 121 | async with MaybeAcquire(conn) as connection: 122 | return await connection.executemany(sql, parameters) 123 | 124 | async def execute_script(self, conn, sql): 125 | async with MaybeAcquire(conn) as connection: 126 | return await connection.execute(sql) 127 | -------------------------------------------------------------------------------- /aiosql/adapters/duckdb.py: -------------------------------------------------------------------------------- 1 | from .generic import GenericAdapter 2 | from ..utils import VAR_REF 3 | from typing import List 4 | 5 | 6 | def _colon_to_dollar(ma): 7 | """Convert 'WHERE :id = 1' to 'WHERE $id = 1'.""" 8 | gd = ma.groupdict() 9 | if gd["dquote"] is not None: 10 | return gd["dquote"] 11 | elif gd["squote"] is not None: 12 | return gd["squote"] 13 | else: 14 | return f'{gd["lead"]}${gd["var_name"]}' 15 | 16 | 17 | class DuckDBAdapter(GenericAdapter): 18 | """DuckDB Adapter""" 19 | 20 | def __init__(self, *args, cursor_as_dict: bool = False, use_cursor: bool = True, **kwargs): 21 | super().__init__(*args, **kwargs) 22 | # whether to converts the default tuple response to a dict. 23 | self._convert_row_to_dict = cursor_as_dict 24 | self._use_cursor = use_cursor 25 | 26 | def _cursor(self, conn): 27 | """Get a cursor from a connection.""" 28 | # For DuckDB cursor is duplicated connection so we don't want to use it 29 | if self._use_cursor: 30 | return conn.cursor(*self._args, **self._kwargs) 31 | return conn 32 | 33 | def process_sql(self, query_name, op_type, sql): 34 | return VAR_REF.sub(_colon_to_dollar, sql) 35 | 36 | def insert_returning(self, conn, query_name, sql, parameters): # pragma: no cover 37 | # very similar to select_one but the returned value 38 | cur = self._cursor(conn) 39 | try: 40 | cur.execute(sql, parameters) 41 | # we have to use fetchall instead of fetchone for now due to this: 42 | # https://github.com/duckdb/duckdb/issues/6008 43 | res = cur.fetchall() 44 | finally: 45 | if self._use_cursor: 46 | cur.close() 47 | if isinstance(res, list): 48 | res = res[0] 49 | return res[0] if res and len(res) == 1 else res 50 | 51 | def select(self, conn, query_name: str, sql: str, parameters, record_class=None): 52 | column_names: List[str] = [] 53 | cur = self._cursor(conn) 54 | try: 55 | cur.execute(sql, parameters) 56 | if record_class is None: 57 | first = True 58 | for row in cur.fetchall(): 59 | if first: # get column names on the fly 60 | column_names = [c[0] for c in cur.description or []] 61 | first = False 62 | if self._convert_row_to_dict: # pragma: no cover 63 | # strict=False: requires 3.10 64 | yield dict(zip(column_names, row)) 65 | else: 66 | yield row 67 | else: # pragma: no cover 68 | first = True 69 | for row in cur.fetchall(): 70 | if first: # only get description on the fly, for apsw 71 | column_names = [c[0] for c in cur.description or []] 72 | first = False 73 | # strict=False: requires 3.10 74 | yield record_class(**dict(zip(column_names, row))) 75 | finally: 76 | if self._use_cursor: 77 | cur.close() 78 | 79 | def select_one(self, conn, query_name, sql, parameters, record_class=None): 80 | cur = self._cursor(conn) 81 | try: 82 | cur.execute(sql, parameters) 83 | result = cur.fetchone() 84 | if result is not None and record_class is not None: # pragma: no cover 85 | column_names = [c[0] for c in cur.description or []] 86 | # strict=False: requires 3.10 87 | result = record_class(**dict(zip(column_names, result))) 88 | elif result is not None and self._convert_row_to_dict: # pragma: no cover 89 | column_names = [c[0] for c in cur.description or []] 90 | result = dict(zip(column_names, result)) 91 | finally: 92 | if self._use_cursor: 93 | cur.close() 94 | return result 95 | -------------------------------------------------------------------------------- /aiosql/adapters/generic.py: -------------------------------------------------------------------------------- 1 | from contextlib import contextmanager 2 | from typing import List 3 | from ..types import SyncDriverAdapterProtocol 4 | 5 | 6 | class GenericAdapter(SyncDriverAdapterProtocol): 7 | """ 8 | Generic AioSQL Adapter suitable for `named` parameter style and no with support. 9 | 10 | This class also serves as the base class for other adapters. 11 | 12 | Miscellaneous parameters are passed to cursor creation. 13 | """ 14 | 15 | def __init__(self, *args, **kwargs): 16 | self._args = args 17 | self._kwargs = kwargs 18 | 19 | def process_sql(self, query_name, op_type, sql): 20 | """Pass-through SQL query preprocessing.""" 21 | return sql 22 | 23 | def _cursor(self, conn): 24 | """Get a cursor from a connection.""" 25 | return conn.cursor(*self._args, **self._kwargs) 26 | 27 | def select(self, conn, query_name: str, sql: str, parameters, record_class=None): 28 | """Handle a relation-returning SELECT (no suffix).""" 29 | cur = self._cursor(conn) 30 | try: 31 | cur.execute(sql, parameters) 32 | if record_class is None: 33 | for row in cur: 34 | yield row 35 | else: 36 | column_names: List[str] = [] 37 | first = True 38 | for row in cur: 39 | if first: # only get description on the fly, for apsw 40 | column_names = [c[0] for c in cur.description] 41 | first = False 42 | yield record_class(**dict(zip(column_names, row))) 43 | finally: 44 | cur.close() 45 | 46 | def select_one(self, conn, query_name, sql, parameters, record_class=None): 47 | """Handle a tuple-returning (one row) SELECT (``^`` suffix). 48 | 49 | Return None if empty.""" 50 | cur = self._cursor(conn) 51 | try: 52 | cur.execute(sql, parameters) 53 | result = cur.fetchone() 54 | if result is not None and record_class is not None: 55 | column_names = [c[0] for c in cur.description] 56 | # this fails if result is not a list or tuple 57 | result = record_class(**dict(zip(column_names, result))) 58 | finally: 59 | cur.close() 60 | return result 61 | 62 | def select_value(self, conn, query_name, sql, parameters): 63 | """Handle a scalar-returning (one value) SELECT (``$`` suffix). 64 | 65 | Return None if empty.""" 66 | cur = self._cursor(conn) 67 | try: 68 | cur.execute(sql, parameters) 69 | result = cur.fetchone() 70 | if result: 71 | if isinstance(result, (list, tuple)): 72 | return result[0] 73 | elif isinstance(result, dict): # pragma: no cover 74 | return next(iter(result.values())) 75 | else: # pragma: no cover 76 | raise Exception(f"unexpected value type: {type(result)}") 77 | else: 78 | return None 79 | finally: 80 | cur.close() 81 | 82 | @contextmanager 83 | def select_cursor(self, conn, query_name, sql, parameters): 84 | """Return the raw cursor after a SELECT exec.""" 85 | cur = self._cursor(conn) 86 | try: 87 | cur.execute(sql, parameters) 88 | yield cur 89 | finally: 90 | cur.close() 91 | 92 | def insert_update_delete(self, conn, query_name, sql, parameters): 93 | """Handle affected row counts (INSERT UPDATE DELETE) (``!`` suffix).""" 94 | cur = self._cursor(conn) 95 | cur.execute(sql, parameters) 96 | rc = cur.rowcount if hasattr(cur, "rowcount") else -1 97 | cur.close() 98 | return rc 99 | 100 | def insert_update_delete_many(self, conn, query_name, sql, parameters): 101 | """Handle affected row counts (INSERT UPDATE DELETE) (``*!`` suffix).""" 102 | cur = self._cursor(conn) 103 | cur.executemany(sql, parameters) 104 | rc = cur.rowcount if hasattr(cur, "rowcount") else -1 105 | cur.close() 106 | return rc 107 | 108 | # FIXME this made sense when SQLite had no RETURNING prefix (v3.35, 2021-03-12) 109 | def insert_returning(self, conn, query_name, sql, parameters): 110 | """Special case for RETURNING (`` DriverAdapterProtocol: 47 | """Get the driver adapter instance registered by the `driver_name`.""" 48 | if isinstance(driver_adapter, str): 49 | try: 50 | adapter = _ADAPTERS[driver_adapter.lower()] 51 | except KeyError: 52 | raise ValueError(f"Encountered unregistered driver_adapter: {driver_adapter}") 53 | # try some guessing if it is a PEP249 module 54 | elif hasattr(driver_adapter, "paramstyle"): 55 | style = getattr(driver_adapter, "paramstyle") # avoid mypy warning? 56 | if style == "pyformat": 57 | adapter = PyFormatAdapter # type: ignore 58 | elif style == "named": 59 | adapter = GenericAdapter # type: ignore 60 | else: 61 | raise ValueError(f"Unexpected driver: {driver_adapter} ({style})") 62 | # so, can we just call it? 63 | elif callable(driver_adapter): # pragma: no cover 64 | adapter = driver_adapter 65 | else: 66 | raise ValueError(f"Unexpected driver_adapter: {driver_adapter}") 67 | 68 | return adapter(*args, **kwargs) 69 | 70 | 71 | def from_str( 72 | sql: str, 73 | driver_adapter: Union[str, Callable[..., DriverAdapterProtocol]], 74 | record_classes: Optional[Dict] = None, 75 | kwargs_only: bool = True, 76 | attribute: Optional[str] = "__", 77 | args: List[Any] = [], 78 | kwargs: Dict[str, Any] = {}, 79 | loader_cls: Type[QueryLoader] = QueryLoader, 80 | queries_cls: Type[Queries] = Queries, 81 | ): 82 | """Load queries from a SQL string. 83 | 84 | **Parameters:** 85 | 86 | - **sql** - A string containing SQL statements and aiosql name. 87 | - **driver_adapter** - Either a string to designate one of the aiosql built-in database driver 88 | adapters. One of many available for SQLite, Postgres and MySQL. If you have defined your 89 | own adapter class, you can pass it's constructor. 90 | - **kwargs_only** - *(optional)* whether to only use named parameters on query execution, default is *True*. 91 | - **attribute** - *(optional)* ``.`` attribute access substitution, defaults to ``"__"``, *None* disables 92 | the feature. 93 | - **args** - *(optional)* adapter creation args (list), forwarded to cursor creation by default. 94 | - **kwargs** - *(optional)* adapter creation args (dict), forwarded to cursor creation by default. 95 | - **record_classes** - *(optional)* **DEPRECATED** Mapping of strings used in "record_class" 96 | declarations to the python classes which aiosql should use when marshaling SQL results. 97 | - **loader_cls** - *(optional)* Custom constructor for QueryLoader extensions. 98 | - **queries_cls** - *(optional)* Custom constructor for Queries extensions. 99 | 100 | **Returns:** ``Queries`` 101 | 102 | Usage: 103 | 104 | Loading queries from a SQL string. 105 | 106 | .. code-block:: python 107 | 108 | import sqlite3 109 | import aiosql 110 | 111 | sql_text = \"\"\" 112 | -- name: get-all-greetings 113 | -- Get all the greetings in the database 114 | select * from greetings; 115 | 116 | -- name: get-user-by-username^ 117 | -- Get all the users from the database, 118 | -- and return it as a dict 119 | select * from users where username = :username; 120 | \"\"\" 121 | 122 | queries = aiosql.from_str(sql_text, "sqlite3") 123 | queries.get_all_greetings(conn) 124 | queries.get_user_by_username(conn, username="willvaughn") 125 | """ 126 | adapter = _make_driver_adapter(driver_adapter, *args, **kwargs) 127 | query_loader = loader_cls(adapter, record_classes, attribute=attribute) 128 | query_data = query_loader.load_query_data_from_sql(sql, []) 129 | return queries_cls(adapter, kwargs_only=kwargs_only).load_from_list(query_data) 130 | 131 | 132 | def from_path( 133 | sql_path: Union[str, Path], 134 | driver_adapter: Union[str, Callable[..., DriverAdapterProtocol]], 135 | record_classes: Optional[Dict] = None, 136 | kwargs_only: bool = True, 137 | attribute: Optional[str] = "__", 138 | args: List[Any] = [], 139 | kwargs: Dict[str, Any] = {}, 140 | loader_cls: Type[QueryLoader] = QueryLoader, 141 | queries_cls: Type[Queries] = Queries, 142 | ext: Tuple[str] = (".sql",), 143 | encoding=None, 144 | ): 145 | """Load queries from a `.sql` file, or directory of `.sql` files. 146 | 147 | **Parameters:** 148 | 149 | - **sql_path** - Path to a `.sql` file or directory containing `.sql` files. 150 | - **driver_adapter** - Either a string to designate one of the aiosql built-in database driver 151 | adapters. One of many available for SQLite, Postgres and MySQL. If you have defined your own 152 | adapter class, you may pass its constructor. 153 | - **record_classes** - *(optional)* **DEPRECATED** Mapping of strings used in "record_class" 154 | - **kwargs_only** - *(optional)* Whether to only use named parameters on query execution, default is *True*. 155 | - **attribute** - *(optional)* ``.`` attribute access substitution, defaults to ``"__""``, *None* disables 156 | the feature. 157 | - **args** - *(optional)* adapter creation args (list), forwarded to cursor creation by default. 158 | - **kwargs** - *(optional)* adapter creation args (dict), forwarded to cursor creation by default. 159 | declarations to the python classes which aiosql should use when marshaling SQL results. 160 | - **loader_cls** - *(optional)* Custom constructor for `QueryLoader` extensions. 161 | - **queries_cls** - *(optional)* Custom constructor for `Queries` extensions. 162 | - **ext** - *(optional)* allowed file extensions for query files, default is `(".sql",)`. 163 | - **encoding** - *(optional)* encoding for reading files. 164 | 165 | **Returns:** `Queries` 166 | 167 | Usage: 168 | 169 | .. code-block:: python 170 | 171 | queries = aiosql.from_path("./sql", "psycopg2") 172 | queries = aiosql.from_path("./sql", MyDBAdapter) 173 | """ 174 | path = Path(sql_path) 175 | 176 | if not path.exists(): 177 | raise SQLLoadException(f"File does not exist: {path}") 178 | 179 | adapter = _make_driver_adapter(driver_adapter, *args, **kwargs) 180 | query_loader = loader_cls(adapter, record_classes, attribute=attribute) 181 | 182 | if path.is_file(): 183 | query_data = query_loader.load_query_data_from_file(path, encoding=encoding) 184 | return queries_cls(adapter, kwargs_only=kwargs_only).load_from_list(query_data) 185 | elif path.is_dir(): 186 | query_data_tree = query_loader.load_query_data_from_dir_path( 187 | path, ext=ext, encoding=encoding 188 | ) 189 | return queries_cls(adapter, kwargs_only=kwargs_only).load_from_tree(query_data_tree) 190 | else: # pragma: no cover 191 | raise SQLLoadException(f"The sql_path must be a directory or file, got {sql_path}") 192 | -------------------------------------------------------------------------------- /aiosql/py.typed: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nackjicholson/aiosql/d25247160fecd1992cb7465d20c9948bf4cc24b0/aiosql/py.typed -------------------------------------------------------------------------------- /aiosql/query_loader.py: -------------------------------------------------------------------------------- 1 | import re 2 | import inspect 3 | from pathlib import Path 4 | from typing import Dict, List, Optional, Tuple, Type, Sequence, Any, Union 5 | 6 | from .utils import SQLParseException, SQLLoadException, VAR_REF, VAR_REF_DOT, log 7 | from .types import QueryDatum, QueryDataTree, SQLOperationType, DriverAdapterProtocol 8 | 9 | # identifies name definition comments 10 | _QUERY_DEF = re.compile(r"--\s*name\s*:\s*") 11 | 12 | # identifies record class definition comments 13 | _RECORD_DEF = re.compile(r"--\s*record_class\s*:\s*(\w+)\s*") 14 | 15 | # extract a valid query name followed by an optional operation spec 16 | # FIXME this accepts "1st" but seems to reject "é" 17 | _NAME_OP = re.compile( 18 | # query name 19 | r"^(?P\w+)" 20 | # optional list of parameters (foo, bla) or () 21 | r"(|\((?P(\s*|\s*\w+\s*(,\s*\w+\s*)*))\))" 22 | # operation, empty for simple select 23 | r"(?P(|\^|\$|!|\'(\'\'|[^\'])*\')|" 47 | # double quote strings 48 | r'(?P"(""|[^"])+")|' 49 | # one-line comment 50 | r"(?P--.*?$)|" 51 | # multiline comments, excluding SQL hints 52 | r"|(?P/\*(?!\+[\s\S]*?\*/)[\s\S]*?\*/)", 53 | re.DOTALL | re.MULTILINE, 54 | ) 55 | 56 | 57 | def _remove_ml_comments(code: str) -> str: 58 | """Remove /* ... */ comments from code""" 59 | # identify commented regions to be removed 60 | rm = [] 61 | for m in _UNCOMMENT.finditer(code): 62 | ml = m.groupdict()["multiline"] 63 | if ml: 64 | rm.append(m.span()) 65 | # keep whatever else 66 | ncode, current = "", 0 67 | for start, end in rm: 68 | ncode += code[current:start] 69 | current = end 70 | # get tail 71 | ncode += code[current:] 72 | return ncode 73 | 74 | 75 | def _preprocess_object_attributes(attribute, sql): 76 | """Substitute o.a by oa and keep track of variables.""" 77 | 78 | attributes = {} 79 | 80 | def _replace(m): 81 | gd = m.groupdict() 82 | if gd["dquote"] is not None: 83 | return gd["dquote"] 84 | elif gd["squote"] is not None: 85 | return gd["squote"] 86 | else: 87 | var, att = gd["var_name"].split(".", 1) 88 | var_name = var + attribute + att 89 | if var not in attributes: 90 | attributes[var] = {} 91 | if att not in attributes[var]: 92 | attributes[var][att] = var_name 93 | return f"{gd['lead']}:{var_name}" 94 | 95 | sql = VAR_REF_DOT.sub(_replace, sql) 96 | 97 | return sql, attributes 98 | 99 | 100 | class QueryLoader: 101 | """Load Queries. 102 | 103 | This class holds the various utilities to read SQL files and build 104 | QueryDatum, which will be transformed as functions in Queries. 105 | 106 | - :param driver_adapter: driver name or class. 107 | - :param record_classes: nothing of dict. 108 | - :param attribute: string to insert in place of ``.``. 109 | """ 110 | 111 | def __init__( 112 | self, 113 | driver_adapter: DriverAdapterProtocol, 114 | record_classes: Optional[Dict[str, Any]], 115 | attribute: Optional[str] = None, 116 | ): 117 | self.driver_adapter = driver_adapter 118 | self.record_classes = record_classes if record_classes is not None else {} 119 | self.attribute = attribute 120 | 121 | def _make_query_datum( 122 | self, 123 | query: str, 124 | ns_parts: List[str], 125 | floc: Tuple[Union[Path, str], int], 126 | ) -> QueryDatum: 127 | """Build a query datum. 128 | 129 | - :param query: the spec and name (``query-name!\n-- comments\nSQL;\n``) 130 | - :param ns_parts: name space parts, i.e. subdirectories of loaded files 131 | - :param floc: file name and lineno the query was extracted from 132 | """ 133 | lines = [line.strip() for line in query.strip().splitlines()] 134 | qname, qop, qsig = self._get_name_op(lines[0]) 135 | if re.search(r"[^A-Za-z0-9_]", qname): 136 | log.warning(f"non ASCII character in query name: {qname}") 137 | if len(lines) <= 1: 138 | raise SQLParseException(f"empty query for: {qname} at {floc[0]}:{floc[1]}") 139 | record_class = self._get_record_class(lines[1]) 140 | sql, doc = self._get_sql_doc(lines[2 if record_class else 1 :]) 141 | if re.search("(?s)^[\t\n\r ;]*$", sql): 142 | raise SQLParseException(f"empty sql for: {qname} at {floc[0]}:{floc[1]}") 143 | signature = self._build_signature(sql, qname, qsig) 144 | query_fqn = ".".join(ns_parts + [qname]) 145 | if self.attribute: # :u.a -> :u__a, **after** signature generation 146 | sql, attributes = _preprocess_object_attributes(self.attribute, sql) 147 | else: # pragma: no cover 148 | attributes = None 149 | sql = self.driver_adapter.process_sql(query_fqn, qop, sql) 150 | return QueryDatum(query_fqn, doc, qop, sql, record_class, signature, floc, attributes, qsig) 151 | 152 | def _get_name_op(self, text: str) -> Tuple[str, SQLOperationType, Optional[List[str]]]: 153 | """Extract name, parameters and operation from spec.""" 154 | qname_spec = text.replace("-", "_") 155 | matched = _NAME_OP.match(qname_spec) 156 | if not matched or _BAD_PREFIX.match(qname_spec): 157 | raise SQLParseException(f'invalid query name and operation spec: "{qname_spec}"') 158 | nameop = matched.groupdict() 159 | params, rawparams = None, nameop["params"] 160 | if rawparams is not None: 161 | params = [p.strip() for p in rawparams.split(",")] 162 | if params == ['']: # handle "( )" 163 | params = [] 164 | operation = _OP_TYPES[nameop["op"]] 165 | if params and operation == "#": # pragma: no cover # FIXME it is covered? 166 | raise SQLParseException(f'cannot use named parameters in SQL script: "{qname_spec}"') 167 | return nameop["name"], operation, params 168 | 169 | def _get_record_class(self, text: str) -> Optional[Type]: 170 | """Extract record class from spec.""" 171 | rc_match = _RECORD_DEF.match(text) 172 | rc_name = rc_match.group(1) if rc_match else None 173 | # TODO: Probably will want this to be a class, marshal in, and marshal out 174 | return self.record_classes.get(rc_name) if isinstance(rc_name, str) else None 175 | 176 | def _get_sql_doc(self, lines: Sequence[str]) -> Tuple[str, str]: 177 | """Separate SQL-comment documentation and SQL code.""" 178 | doc, sql = "", "" 179 | for line in lines: 180 | doc_match = _SQL_COMMENT.match(line) 181 | if doc_match: 182 | doc += doc_match.group(1) + "\n" 183 | else: 184 | sql += line + "\n" 185 | 186 | return sql.strip(), doc.rstrip() 187 | 188 | def _build_signature(self, sql: str, qname: str, sig: Optional[List[str]]) -> inspect.Signature: 189 | """Return signature object for generated dynamic function.""" 190 | # FIXME what about the connection?! 191 | params = [inspect.Parameter("self", inspect.Parameter.POSITIONAL_OR_KEYWORD)] 192 | names = set() 193 | for match in VAR_REF.finditer(sql): 194 | gd = match.groupdict() 195 | if gd["squote"] or gd["dquote"]: 196 | continue 197 | name = gd["var_name"] 198 | if name.isdigit() or name in names: 199 | continue 200 | if sig is not None: # optional parameter declarations 201 | if name not in sig: 202 | raise SQLParseException(f"undeclared parameter name in query {qname}: {name}") 203 | names.add(name) 204 | params.append( 205 | inspect.Parameter( 206 | name=name, 207 | kind=inspect.Parameter.KEYWORD_ONLY, 208 | ) 209 | ) 210 | if sig is not None and len(sig) != len(names): 211 | unused = sorted(n for n in sig if n not in names) 212 | raise SQLParseException(f"unused declared parameter in query {qname}: {unused}") 213 | return inspect.Signature(parameters=params) 214 | 215 | def load_query_data_from_sql( 216 | self, sql: str, ns_parts: List[str], fname: Union[Path, str] = "" 217 | ) -> List[QueryDatum]: 218 | """Load queries from a string.""" 219 | usql = _remove_ml_comments(sql) 220 | qdefs = _QUERY_DEF.split(usql) 221 | # FIXME lineno is from the uncommented file 222 | lineno = 1 + qdefs[0].count("\n") 223 | data = [] 224 | # first item is anything before the first query definition, drop it! 225 | for qdef in qdefs[1:]: 226 | data.append(self._make_query_datum(qdef, ns_parts, (fname, lineno))) 227 | lineno += qdef.count("\n") 228 | return data 229 | 230 | def load_query_data_from_file( 231 | self, path: Path, ns_parts: List[str] = [], encoding=None 232 | ) -> List[QueryDatum]: 233 | """Load queries from a file.""" 234 | return self.load_query_data_from_sql(path.read_text(encoding=encoding), ns_parts, path) 235 | 236 | def load_query_data_from_dir_path( 237 | self, dir_path, ext=(".sql",), encoding=None 238 | ) -> QueryDataTree: 239 | """Load queries from a directory.""" 240 | if not dir_path.is_dir(): 241 | raise ValueError(f"The path {dir_path} must be a directory") 242 | 243 | def _recurse_load_query_data_tree(path, ns_parts=[], ext=(".sql",), encoding=None): 244 | query_data_tree = {} 245 | for p in path.iterdir(): 246 | if p.is_file(): 247 | if p.suffix not in ext: 248 | continue 249 | for query_datum in self.load_query_data_from_file( 250 | p, ns_parts, encoding=encoding 251 | ): 252 | query_data_tree[query_datum.query_name] = query_datum 253 | elif p.is_dir(): 254 | query_data_tree[p.name] = _recurse_load_query_data_tree( 255 | p, ns_parts + [p.name], ext=ext, encoding=encoding 256 | ) 257 | else: # pragma: no cover 258 | # This should be practically unreachable. 259 | raise SQLLoadException(f"The path must be a directory or file, got {p}") 260 | return query_data_tree 261 | 262 | return _recurse_load_query_data_tree(dir_path, ext=ext, encoding=encoding) 263 | -------------------------------------------------------------------------------- /aiosql/types.py: -------------------------------------------------------------------------------- 1 | import inspect 2 | from enum import Enum 3 | from pathlib import Path 4 | from typing import ( 5 | Any, 6 | AsyncContextManager, 7 | Callable, 8 | ContextManager, 9 | Dict, 10 | Generator, 11 | List, 12 | NamedTuple, 13 | Optional, 14 | Tuple, 15 | Union, 16 | ) 17 | from typing import Protocol 18 | 19 | # FIXME None added for MySQL buggy drivers 20 | ParamType = Union[Dict[str, Any], List[Any], None] 21 | 22 | 23 | class SQLOperationType(Enum): 24 | """Enumeration of aiosql operation types.""" 25 | 26 | INSERT_RETURNING = 0 27 | INSERT_UPDATE_DELETE = 1 28 | INSERT_UPDATE_DELETE_MANY = 2 29 | SCRIPT = 3 30 | SELECT = 4 31 | SELECT_ONE = 5 32 | SELECT_VALUE = 6 33 | 34 | 35 | class QueryDatum(NamedTuple): 36 | query_name: str 37 | doc_comments: str 38 | operation_type: SQLOperationType 39 | sql: str 40 | record_class: Any 41 | signature: Optional[inspect.Signature] 42 | floc: Tuple[Union[Path, str], int] 43 | attributes: Optional[Dict[str, Dict[str, str]]] 44 | parameters: Optional[List[str]] 45 | 46 | 47 | class QueryFn(Protocol): 48 | __name__: str 49 | __signature__: Optional[inspect.Signature] 50 | sql: str 51 | operation: SQLOperationType 52 | attributes: Optional[Dict[str, Dict[str, str]]] 53 | parameters: Optional[List[str]] 54 | 55 | def __call__(self, *args: Any, **kwargs: Any) -> Any: ... # pragma: no cover 56 | 57 | 58 | # Can't make this a recursive type in terms of itself 59 | # QueryDataTree = Dict[str, Union[QueryDatum, 'QueryDataTree']] 60 | QueryDataTree = Dict[str, Union[QueryDatum, Dict]] 61 | 62 | 63 | class SyncDriverAdapterProtocol(Protocol): 64 | def process_sql( 65 | self, query_name: str, op_type: SQLOperationType, sql: str 66 | ) -> str: ... # pragma: no cover 67 | 68 | def select( 69 | self, 70 | conn: Any, 71 | query_name: str, 72 | sql: str, 73 | parameters: ParamType, 74 | record_class: Optional[Callable], 75 | ) -> Generator[Any, None, None]: ... # pragma: no cover 76 | 77 | def select_one( 78 | self, 79 | conn: Any, 80 | query_name: str, 81 | sql: str, 82 | parameters: ParamType, 83 | record_class: Optional[Callable], 84 | ) -> Optional[Tuple[Any, ...]]: ... # pragma: no cover 85 | 86 | def select_value( 87 | self, conn: Any, query_name: str, sql: str, parameters: ParamType 88 | ) -> Optional[Any]: ... # pragma: no cover 89 | 90 | def select_cursor( 91 | self, conn: Any, query_name: str, sql: str, parameters: ParamType 92 | ) -> ContextManager[Any]: ... # pragma: no cover 93 | 94 | def insert_update_delete( 95 | self, conn: Any, query_name: str, sql: str, parameters: ParamType 96 | ) -> int: ... # pragma: no cover 97 | 98 | def insert_update_delete_many( 99 | self, conn: Any, query_name: str, sql: str, parameters: ParamType 100 | ) -> int: ... # pragma: no cover 101 | 102 | def insert_returning( 103 | self, conn: Any, query_name: str, sql: str, parameters: ParamType 104 | ) -> Optional[Any]: ... # pragma: no cover 105 | 106 | def execute_script(self, conn: Any, sql: str) -> str: ... # pragma: no cover 107 | 108 | 109 | class AsyncDriverAdapterProtocol(Protocol): 110 | def process_sql( 111 | self, query_name: str, op_type: SQLOperationType, sql: str 112 | ) -> str: ... # pragma: no cover 113 | 114 | async def select( 115 | self, 116 | conn: Any, 117 | query_name: str, 118 | sql: str, 119 | parameters: ParamType, 120 | record_class: Optional[Callable], 121 | ) -> List: ... # pragma: no cover 122 | 123 | async def select_one( 124 | self, 125 | conn: Any, 126 | query_name: str, 127 | sql: str, 128 | parameters: ParamType, 129 | record_class: Optional[Callable], 130 | ) -> Optional[Any]: ... # pragma: no cover 131 | 132 | async def select_value( 133 | self, conn: Any, query_name: str, sql: str, parameters: ParamType 134 | ) -> Optional[Any]: ... # pragma: no cover 135 | 136 | async def select_cursor( 137 | self, conn: Any, query_name: str, sql: str, parameters: ParamType 138 | ) -> AsyncContextManager[Any]: ... # pragma: no cover 139 | 140 | # TODO: Next major version introduce a return? Optional return? 141 | async def insert_update_delete( 142 | self, conn: Any, query_name: str, sql: str, parameters: ParamType 143 | ) -> None: ... # pragma: no cover 144 | 145 | # TODO: Next major version introduce a return? Optional return? 146 | async def insert_update_delete_many( 147 | self, conn: Any, query_name: str, sql: str, parameters: ParamType 148 | ) -> None: ... # pragma: no cover 149 | 150 | async def insert_returning( 151 | self, conn: Any, query_name: str, sql: str, parameters: ParamType 152 | ) -> Optional[Any]: ... # pragma: no cover 153 | 154 | async def execute_script(self, conn: Any, sql: str) -> str: ... # pragma: no cover 155 | 156 | 157 | DriverAdapterProtocol = Union[SyncDriverAdapterProtocol, AsyncDriverAdapterProtocol] 158 | -------------------------------------------------------------------------------- /aiosql/utils.py: -------------------------------------------------------------------------------- 1 | import re 2 | import logging 3 | 4 | # FIXME to be improved 5 | VAR_REF = re.compile( 6 | # NOTE probably pg specific? 7 | r'(?P"(""|[^"])+")|' 8 | # FIXME mysql/mariadb use backslash escapes 9 | r"(?P\'(\'\'|[^\'])*\')|" 10 | # NOTE beware of overlapping re 11 | r"(?P[^:]):(?P\w+)(?=[^:]?)" 12 | ) 13 | """Pattern to identify colon-variables (aka _named_ style) in SQL code""" 14 | 15 | # NOTE see comments above 16 | VAR_REF_DOT = re.compile( 17 | r'(?P"(""|[^"])+")|' 18 | r"(?P\'(\'\'|[^\'])*\')|" 19 | r"(?P[^:]):(?P\w+\.\w+)(?=[^:]?)" 20 | ) 21 | """Pattern to identify colon-variables with a simple attribute in SQL code.""" 22 | 23 | log = logging.getLogger("aiosql") 24 | """Shared package logging.""" 25 | # log.setLevel(logging.DEBUG) 26 | 27 | 28 | class SQLLoadException(Exception): 29 | """Raised when there is a problem loading SQL content from a file or directory""" 30 | 31 | pass 32 | 33 | 34 | class SQLParseException(Exception): 35 | """Raised when there was a problem parsing the aiosql comment annotations in SQL""" 36 | 37 | pass 38 | -------------------------------------------------------------------------------- /docker/.gitignore: -------------------------------------------------------------------------------- 1 | .docker-aiosql-* 2 | -------------------------------------------------------------------------------- /docker/Makefile: -------------------------------------------------------------------------------- 1 | SHELL = /bin/bash 2 | .ONESHELL: 3 | 4 | DOCKER = docker 5 | NAME = aiosql-tests 6 | 7 | clean: 8 | TEST=pytest $(DOCKER) compose down -v 9 | $(RM) .docker-aiosql-* 10 | 11 | # 12 | # Build docker client images 13 | # 14 | # NOTE it really depends on the base image version and 15 | # when it is run to know which are the latest packages 16 | # --no-cache? docker build prune? 17 | CACHE = 18 | # CACHE = --no-cache 19 | 20 | .docker-aiosql-%: dockerfile.python-% 21 | tag=$@ 22 | tag=$${tag#.docker-aiosql-} 23 | tag=python-aiosql-$$tag 24 | $(DOCKER) build $(CACHE) -t $$tag -f $< . && touch $@ 25 | 26 | .PHONY: docker.aiosql 27 | docker.aiosql: \ 28 | .docker-aiosql-postgres \ 29 | .docker-aiosql-mysql \ 30 | .docker-aiosql-dbs 31 | 32 | # 33 | # Run tests with docker 34 | # 35 | .PHONY: docker.run 36 | docker.run: 37 | [ "$$TEST" ] || { echo "# Missing \$$TEST" >&2 ; exit 1 ; } 38 | echo "# starting up…" 39 | $(DOCKER) compose up -d 40 | # wait and show results, in probable completion order 41 | # $(DOCKER) container logs -f $(NAME)-dbs-client-1 42 | $(DOCKER) container wait $(NAME)-dbs-client-1 43 | $(DOCKER) container logs $(NAME)-dbs-client-1 44 | # $(DOCKER) container logs -f $(NAME)-my-client-1 45 | $(DOCKER) container wait $(NAME)-my-client-1 46 | $(DOCKER) container logs $(NAME)-my-client-1 47 | # $(DOCKER) container logs -f $(NAME)-pg-client-1 48 | $(DOCKER) container wait $(NAME)-pg-client-1 49 | $(DOCKER) container logs $(NAME)-pg-client-1 50 | echo "# shutting down…" 51 | $(DOCKER) compose down -v 52 | 53 | .PHONY: docker.pytest 54 | docker.pytest: 55 | export TEST=pytest 56 | $(MAKE) docker.run 57 | 58 | .PHONY: docker.coverage 59 | docker.coverage: 60 | export TEST=coverage 61 | $(MAKE) docker.run 62 | -------------------------------------------------------------------------------- /docker/README.md: -------------------------------------------------------------------------------- 1 | # AioSQL Docker Tests 2 | 3 | As MySQL et MariaDB cannot be installed one alongside the other easily, 4 | this directory provides a docker solution with 3 servers (for postgres, 5 | mysql and mariadb) and their clients. Tests with databases sqlite3 and duckdb 6 | are run with mariadb because it has the lowest load. 7 | 8 | ## Servers 9 | 10 | They rely on the official images for `postgres`, `mysql` and `mariadb`. 11 | 12 | ## Clients 13 | 14 | They are built on top of `ubuntu` because using the official `python` 15 | image could not be made to work for all 5 databases. 16 | See docker specifications in `dockerfile.python-*`. 17 | 18 | ## Makefile 19 | 20 | Run docker compose for `pytest` or `coverage`. 21 | 22 | ```shell 23 | # get/update docker images 24 | docker image pull postgres 25 | docker image pull mariadb 26 | docker image pull mysql 27 | docker image pull ubuntu 28 | # generate client images 29 | make docker.aiosql 30 | # run tests in .. 31 | make docker.pytest 32 | make docker.coverage 33 | ``` 34 | 35 | ## Miscellaneous Commands 36 | 37 | Run a client with access to host: 38 | 39 | ```sh 40 | docker run -it -v .:/code --add-host=host.docker.internal:host-gateway some-image bash 41 | ``` 42 | 43 | Build an image: 44 | 45 | ```sh 46 | docker build -t aiosql-python-mysql -f dockerfile.python-mysql . 47 | ``` 48 | 49 | Run docker clients against manually started docker servers: 50 | 51 | ```sh 52 | docker run -it -v .:/code --add-host=host.docker.internal:host-gateway \ 53 | python-aiosql-dbs \ 54 | make VENV=/venv MA_HOST=host.docker.internal check.pytest.mariadb.detached 55 | docker run -it -v .:/code --add-host=host.docker.internal:host-gateway \ 56 | python-aiosql-mysql \ 57 | make VENV=/venv MY_HOST=host.docker.internal check.pytest.mysql.detached 58 | docker run -it -v .:/code --add-host=host.docker.internal:host-gateway \ 59 | python-aiosql-dbs \ 60 | make VENV=/venv MS_HOST=host.docker.internal check.pytest.mssql.detached 61 | ``` 62 | 63 | ## MS SQL Server 64 | 65 | See [ubuntu image](https://hub.docker.com/r/microsoft/mssql-server) and its associated 66 | [documentation](https://learn.microsoft.com/en-us/sql/linux/sql-server-linux-configure-environment-variables) 67 | 68 | ```sh 69 | docker pull mcr.microsoft.com/mssql/server:2022-latest 70 | docker run -e "ACCEPT_EULA=Y" -e "MSSQL_SA_PASSWORD=Abc123.." -e "MSSQL_PID=Developer" \ 71 | -p 1433:1433 --name mssqltest --hostname mssqltest -d mcr.microsoft.com/mssql/server:2022-latest 72 | docker exec -it mssqltest /opt/mssql-tools18/bin/sqlcmd -C -S localhost -U sa -P "Abc123.." 73 | # type a command 74 | # go 75 | ``` 76 | -------------------------------------------------------------------------------- /docker/docker-compose.yml: -------------------------------------------------------------------------------- 1 | # 2 | # Docker containers to test aiosql against various databases 3 | # 4 | # TEST is either "pytest" or "coverage" 5 | # 6 | 7 | name: aiosql-tests 8 | 9 | services: 10 | # 11 | # postgres tests 12 | # 13 | postgres-server: 14 | image: postgres 15 | environment: 16 | - POSTGRES_DB=pytest 17 | - POSTGRES_USER=pytest 18 | - POSTGRES_PASSWORD=pytest 19 | pg-client: 20 | image: python-aiosql-postgres 21 | volumes: 22 | - ..:/code 23 | depends_on: 24 | - postgres-server 25 | environment: 26 | - PG_HOST=postgres-server 27 | - PG_PORT=5432 28 | command: make VENV=/venv check.$TEST.postgres.detached 29 | # 30 | # mysql tests 31 | # 32 | mysql-server: 33 | image: mysql 34 | environment: 35 | - MYSQL_ROOT_PASSWORD=pytest 36 | - MYSQL_USER=pytest 37 | - MYSQL_PASSWORD=pytest 38 | - MYSQL_DATABASE=pytest 39 | my-client: 40 | image: python-aiosql-mysql 41 | depends_on: 42 | - mysql-server 43 | volumes: 44 | - ..:/code 45 | environment: 46 | - MY_HOST=mysql-server 47 | - MY_PORT=3306 48 | command: make VENV=/venv check.$TEST.mysql.detached 49 | # 50 | # other tests 51 | # 52 | mariadb-server: 53 | image: mariadb 54 | environment: 55 | - MYSQL_ROOT_PASSWORD=pytest 56 | - MYSQL_USER=pytest 57 | - MYSQL_PASSWORD=pytest 58 | - MYSQL_DATABASE=pytest 59 | mssql-server: 60 | image: mcr.microsoft.com/mssql/server:2022-latest 61 | environment: 62 | - ACCEPT_EULA=Y 63 | - MSSQL_TCP_PORT=1433 64 | - MSSQL_SA_PASSWORD=Abc123.. 65 | - MSSQL_PID=Developer 66 | dbs-client: 67 | image: python-aiosql-dbs 68 | depends_on: 69 | - mariadb-server 70 | - mssql-server 71 | volumes: 72 | - ..:/code 73 | environment: 74 | - MA_HOST=mariadb-server 75 | - MA_PORT=3306 76 | - MS_HOST=mssql-server 77 | - MS_PORT=1433 78 | - MS_PASS=Abc123.. 79 | command: > 80 | make VENV=/venv check.$TEST.mariadb.detached check.$TEST.mssql.detached check.$TEST.misc 81 | -------------------------------------------------------------------------------- /docker/dockerfile.python-dbs: -------------------------------------------------------------------------------- 1 | FROM ubuntu 2 | LABEL description="Python setup for AioSQL MariaDB, SQLite, DuckDB and MS SQL Server testing" 3 | RUN apt update 4 | RUN apt install -y --no-install-recommends \ 5 | python-is-python3 python3-venv python3-dev gcc make curl pkg-config 6 | RUN python -m venv /venv 7 | RUN /venv/bin/pip install asyncio pytest pytest-asyncio coverage 8 | WORKDIR /code 9 | # mariadb specific packages 10 | RUN curl -sS https://downloads.mariadb.com/MariaDB/mariadb_repo_setup | bash 11 | RUN apt install -y --no-install-recommends libmariadb-dev sqlite3 12 | RUN apt clean 13 | # manual fix for pytest-mysql dependency installation which looks for "mariadb" instead of "libmariadb" 14 | RUN cp /usr/lib/x86_64-linux-gnu/pkgconfig/libmariadb.pc /usr/lib/x86_64-linux-gnu/pkgconfig/mariadb.pc 15 | # drivers 16 | RUN /venv/bin/pip install mariadb pytest-mysql duckdb apsw aiosqlite pymssql 17 | -------------------------------------------------------------------------------- /docker/dockerfile.python-mysql: -------------------------------------------------------------------------------- 1 | FROM ubuntu 2 | LABEL description="Python setup for AioSQL MySQL testing" 3 | RUN apt update 4 | RUN apt install -y --no-install-recommends \ 5 | python-is-python3 python3-venv python3-dev gcc make curl pkg-config 6 | RUN python -m venv /venv 7 | RUN /venv/bin/pip install asyncio pytest pytest-asyncio coverage 8 | WORKDIR /code 9 | # mysql specific packages 10 | RUN apt install -y --no-install-recommends libmysqlclient-dev 11 | RUN apt clean 12 | RUN /venv/bin/pip install cryptography mysqlclient mysql-connector-python pymysql pytest-mysql 13 | -------------------------------------------------------------------------------- /docker/dockerfile.python-postgres: -------------------------------------------------------------------------------- 1 | FROM ubuntu 2 | LABEL description="Python setup for AioSQL Postgres testing" 3 | RUN apt update 4 | RUN apt install -y --no-install-recommends \ 5 | python-is-python3 python3-venv python3-dev gcc make curl pkg-config 6 | RUN python -m venv /venv 7 | RUN /venv/bin/pip install asyncio pytest pytest-asyncio coverage 8 | WORKDIR /code 9 | # postgres specific packages 10 | RUN apt install -y --no-install-recommends libpq-dev 11 | RUN apt clean 12 | RUN /venv/bin/pip install psycopg psycopg2 pygresql pg8000 asyncpg pytest-postgresql 13 | -------------------------------------------------------------------------------- /docs/source/advanced-topics.rst: -------------------------------------------------------------------------------- 1 | Advanced Topics 2 | =============== 3 | 4 | Accessing the ``cursor`` object 5 | ------------------------------- 6 | 7 | The cursor is a temporary object created in memory that allows you to perform 8 | row-by-row operations on your data and use handy methods such as 9 | ``.description``, ``.fetchall()`` and ``.fetchone()``. 10 | As long as you are running a SQL ``SELECT`` query, you can access the cursor 11 | object by appending ``_cursor`` to the end of the queries name. 12 | 13 | For example, say you have the following query named ``get-all-greetings`` in a ``sql`` file: 14 | 15 | .. literalinclude:: ../../example/greetings.sql 16 | :language: sql 17 | :lines: 1-5 18 | 19 | With this query, you can get all ``greeting_id``'s and ``greeting``'s, access 20 | the cursor object, and print the column names with the following code: 21 | 22 | .. literalinclude:: ../../example/greetings_cursor.py 23 | :language: python 24 | 25 | Accessing prepared SQL as a string 26 | ---------------------------------- 27 | 28 | When you need to do something not directly supported by aiosql, this is your 29 | escape hatch. 30 | You can still define your SQL in a file and load it with aiosql, but then you 31 | may choose to use it without calling your aiosql method. 32 | The prepared SQL string of a method is available as an attribute of each method 33 | ``queries..sql``. 34 | Here's an example of how you might use it with a unique feature of ``psycopg2`` like 35 | `execute_values `__. 36 | 37 | .. literalinclude:: ../../example/pg_execute_values.py 38 | :language: python 39 | 40 | Accessing the SQL Operation Type 41 | -------------------------------- 42 | 43 | Query functions also provide access to the SQL operation type you define in 44 | your library. 45 | This can be useful for observability (such as metrics, tracing, or logging), or 46 | customizing how you manage different operations within your codebase. Extending 47 | from the above example: 48 | 49 | .. literalinclude:: ../../example/observe_query.py 50 | :language: python 51 | -------------------------------------------------------------------------------- /docs/source/conf.py: -------------------------------------------------------------------------------- 1 | # Configuration file for the Sphinx documentation builder. 2 | # 3 | # This file only contains a selection of the most common options. For a full 4 | # list see the documentation: 5 | # https://www.sphinx-doc.org/en/master/usage/configuration.html 6 | 7 | # -- Path setup -------------------------------------------------------------- 8 | 9 | # If extensions (or modules to document with autodoc) are in another directory, 10 | # add these directories to sys.path here. If the directory is relative to the 11 | # documentation root, use os.path.abspath to make it absolute, like shown here. 12 | # 13 | # import os 14 | # import sys 15 | # sys.path.insert(0, os.path.abspath('.')) 16 | 17 | 18 | # -- Project information ----------------------------------------------------- 19 | 20 | project = "aiosql" 21 | copyright = "2018-2025, William Vaughn et alii." 22 | author = "William Vaughn " 23 | github_doc_root = "https://github.com/nackjicholson/aiosql/tree/master/docs/source/" 24 | 25 | # The full version, including alpha/beta/rc tags 26 | from importlib.metadata import version as pkg_version 27 | release = pkg_version("aiosql") 28 | 29 | # -- General configuration --------------------------------------------------- 30 | 31 | # Add any Sphinx extension module names here, as strings. They can be 32 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 33 | # ones. 34 | extensions = ["sphinx_rtd_theme", "sphinx.ext.autodoc", "sphinx.ext.githubpages"] 35 | 36 | # Add any paths that contain templates here, relative to this directory. 37 | # templates_path = ["_templates"] 38 | 39 | # The language for content autogenerated by Sphinx. Refer to documentation 40 | # for a list of supported languages. 41 | # 42 | # This is also used if you do content translation via gettext catalogs. 43 | # Usually you set "language" from the command line for these cases. 44 | language = "en" 45 | 46 | # List of patterns, relative to source directory, that match files and 47 | # directories to ignore when looking for source files. 48 | # This pattern also affects html_static_path and html_extra_path. 49 | exclude_patterns = [] # type: ignore 50 | 51 | 52 | # -- Options for HTML output ------------------------------------------------- 53 | 54 | # The theme to use for HTML and HTML Help pages. See the documentation for 55 | # a list of builtin themes. 56 | # 57 | html_theme = "sphinx_rtd_theme" 58 | 59 | # Add any paths that contain custom static files (such as style sheets) here, 60 | # relative to this directory. They are copied after the builtin static files, 61 | # so a file named "default.css" will overwrite the builtin "default.css". 62 | # html_static_path = ["_static"] 63 | 64 | 65 | # -- Extension configuration ------------------------------------------------- 66 | -------------------------------------------------------------------------------- /docs/source/contributing.rst: -------------------------------------------------------------------------------- 1 | Contributing 2 | ============ 3 | 4 | First, thank you for considering to make a contribution to this project. 5 | Spending your valuable time helping make this project better is deeply appreciated. 6 | All kinds of contributions are helpful and welcome. 7 | 8 | - Report issues ``__ 9 | - Review or make your own pull requests ``__ 10 | - Write documentation ``__ 11 | 12 | Whether you have an idea for a feature improvement or have found a troubling bug, thank you for being here. 13 | 14 | 15 | Packaging & Distribution 16 | ------------------------ 17 | 18 | This aiosql repository uses the Python standard packaging tools. 19 | Read about them in more detail at the following links. 20 | 21 | - `Python Packaging User Guide `__ 22 | - `PyPA - Packaging & Distributing projects `__ 23 | - `setuptools `__ 24 | - `build `__ 25 | - `twine `__ 26 | 27 | Development Setup 28 | ----------------- 29 | 30 | 1. Create a virtual environment 31 | 32 | .. code:: sh 33 | 34 | # get the project sources 35 | git clone git@github.com:nackjicholson/aiosql.git 36 | cd aiosql 37 | # create a venv manually 38 | python -m venv venv 39 | source venv/bin/activate 40 | pip install --upgrade pip 41 | 42 | All subsequent steps will assume you are using python within your activated virtual environment. 43 | 44 | 1. Install the development dependencies 45 | 46 | As a development library, aiosql is expected to work with all supported 47 | versions of Python, and many drivers. 48 | The bare minimum of version pinning is declared in the dependencies. 49 | 50 | .. code:: sh 51 | 52 | # development tools 53 | pip install .[dev] 54 | # per-database stuff 55 | pip install .[dev-sqlite] 56 | pip install .[dev-postgres] 57 | pip install .[dev-duckdb] 58 | pip install .[dev-mysql] 59 | pip install .[dev-mariadb] 60 | 61 | 1. Run tests 62 | 63 | .. code:: sh 64 | 65 | pytest 66 | 67 | Alternatively, there is a convenient ``Makefile`` to automate the above tasks: 68 | 69 | .. code:: sh 70 | 71 | make venv.dev # install dev virtual environment 72 | source venv/bin/activate 73 | make check # run all checks: pytest, flake8, coverage… 74 | 75 | Also, there is a working ``poetry`` setup in ``pyproject.toml``. 76 | 77 | Dependency Management 78 | --------------------- 79 | 80 | There is no dependency for using ``aiosql`` other than installing your 81 | driver of choice. 82 | 83 | For development you need to test with various databases and even more drivers, 84 | see above for generating a working python environment. 85 | 86 | See also the ``docker`` sub-directory which contains dockerfiles for testing 87 | with Postgres, MySQL, MariaDB and MS SQL Server. 88 | -------------------------------------------------------------------------------- /docs/source/database-driver-adapters.rst: -------------------------------------------------------------------------------- 1 | Database Driver Adapters 2 | ======================== 3 | 4 | Database driver adapters in aiosql allow extension of the library to support 5 | additional database drivers. 6 | If you are using a driver other than the ones currently supported by built-in 7 | driver adapters (``sqlite3``, ``apsw``, ``aiosqlite``, ``psycopg``, 8 | ``psycopg2``, ``pg8000``, ``pygresql``, ``asyncpg``, ``pymysql``, 9 | ``mysqlclient``, ``mysql-connector``, ``duckdb``, ``pymssql``), 10 | first check whether your driver supports *pyformat* or *named* paramstyles. 11 | If so, check (manually) whether the default PEP 249 drivers work: 12 | 13 | .. code:: python 14 | 15 | import acmedb # your PEP 249 driver 16 | import aiosql 17 | 18 | conn = acmedb.connect("…") 19 | queries = aiosql.from_str("-- name: add42$\nSELECT :n + 42;\n", acmedb) 20 | assert queries.add42(conn, n=18) == 60 21 | 22 | If this simplistic test works, do more tests involving all operators (see the 23 | pytest tests), then create an issue to notify that your driver works out of the 24 | box so it can be advertised from the readme. 25 | 26 | If it does not work or if you have an asynchronous driver, you will need to make 27 | your own. 28 | Good news, it should be very close to the existing supported drivers. 29 | A database driver adapter is a duck-typed class that follows either of the 30 | ``Protocol`` types defined in 31 | `aiosql/types.py `__: 32 | 33 | .. literalinclude:: ../../aiosql/types.py 34 | :language: python 35 | :lines: 61-104 36 | :caption: PEP 249 Synchronous Adapter 37 | 38 | .. literalinclude:: ../../aiosql/types.py 39 | :language: python 40 | :lines: 107-152 41 | :caption: Asynchronous Adapter 42 | 43 | Some comments about these classes, one for synchronous queries (PEP 249) and 44 | the other for asynchronous queries: 45 | 46 | - ``_cursor`` is an internal method to generate a cursor, as some drivers 47 | need to pass parameters at this phase. 48 | - ``process_sql`` is used to preprocess SQL queries so has to handle named 49 | parameters as they are managed by the target driver. 50 | - ``select``, ``select_one``, ``insert_update_delete``, ``insert_update_delete_many``, 51 | ``insert_returning`` and ``execute_script`` implement all operations. 52 | - ``select_cursor`` returns the raw cursor from a ``select``. 53 | 54 | There isn't much difference between these two protocols besides the 55 | ``async def`` syntax for the method definition. 56 | There is one more sneaky difference, the aiosql code expects async adapters to 57 | have a static class field ``is_aio_driver = True`` so it can tell when to use 58 | ``await`` for method returns. 59 | Looking at the source of the builtin 60 | `adapters/ `__ 61 | is a great place to start seeing how you may write your own database driver adapter. 62 | 63 | For a PEP 249 driver, consider inheriting from ``aiosql.adapters.Generic`` if you can. 64 | 65 | To use the adapter pass its constructor or factory as the ``driver_adapter`` 66 | argument when building Queries: 67 | 68 | .. code:: python 69 | 70 | queries = aiosql.from_path("foo.sql", driver_adapter=AcmeAdapter) 71 | 72 | Alternatively, an adapter can be registered or overriden: 73 | 74 | .. code:: python 75 | 76 | # in AcmeAdapter provider, eg module "acmedb_aiosql" 77 | import aiosql 78 | aiosql.register_adapter("acmedb", AcmeAdapter) 79 | 80 | # then use it elsewhere 81 | import aiosql 82 | queries = aiosql.from_path("some.sql", "acmedb") 83 | 84 | Please ask questions on `GitHub Issues `__. 85 | If the community makes additional adapter add-ons it will be listed from the doc. 86 | -------------------------------------------------------------------------------- /docs/source/defining-sql-queries.rst: -------------------------------------------------------------------------------- 1 | Defining SQL Queries 2 | ==================== 3 | 4 | Query Names 5 | ----------- 6 | 7 | Name definitions are how aiosql determines the name of the methods that SQL 8 | code blocks are accessible by. 9 | A query name is defined by a SQL comment of the form ``"-- name: "``. 10 | As a readability convenience, dash characters (``-``) in the name are turned 11 | into underlines (``_``). 12 | 13 | .. literalinclude:: ../../tests/blogdb/sql/blogs/blogs.sql 14 | :language: sql 15 | :lines: 14,16 16 | 17 | This query will be available in aiosql under the python method name ``.get_all_blogs(conn)`` 18 | 19 | Query Comments 20 | -------------- 21 | 22 | .. literalinclude:: ../../tests/blogdb/sql/blogs/blogs.sql 23 | :language: sql 24 | :lines: 14-16 25 | 26 | Any other SQL comments you make between the name definition and your code will 27 | be used a the python documentation string for the generated method. 28 | You can use ``help()`` in the Python REPL to view these comments while using python. 29 | 30 | .. 31 | FIXME method parameters are not shown… 32 | 33 | .. code:: pycon 34 | 35 | Python 3 … on Linux 36 | Type "help", "copyright", "credits" or "license" for more information. 37 | >>> import aiosql 38 | >>> queries = aiosql.from_path("blogs.sql", "sqlite3") 39 | >>> help(queries.get_all_blogs) 40 | Help on method get_all_blogs in module aiosql.queries: 41 | 42 | get_all_blogs(conn, *args, **kwargs) method of aiosql.queries.Queries instance 43 | Fetch all fields for every blog in the database. 44 | 45 | Named Parameters 46 | ---------------- 47 | 48 | Named parameters ``:param`` are accepted by all supported drivers and taken 49 | from Python named parameters passed to the query. 50 | In addition, simple attributes can be referenced with the ``.``-syntax. 51 | 52 | .. literalinclude:: ../../tests/blogdb/sql/blogs/blogs.sql 53 | :language: sql 54 | :lines: 61-62 55 | 56 | Then the generated function expects two named parameters: 57 | 58 | .. code:: python 59 | 60 | res = queries.with_params(name="Calvin", x=(1+1j)) 61 | # => (6, 2.0) 62 | 63 | Parameter Declarations 64 | ---------------------- 65 | 66 | Query parameter names may be declared in parentheses just after the method name. 67 | 68 | .. literalinclude:: ../../tests/blogdb/sql/blogs/blogs.sql 69 | :language: sql 70 | :lines: 55,56 71 | 72 | When declared they are checked, raising errors when parameters are unused or undeclared. 73 | 74 | Operators 75 | --------- 76 | 77 | This section describes the usage of various query operator symbols that you can 78 | annotate query names with in order to direct how aiosql will execute and return 79 | results. 80 | 81 | No Operator (Default) 82 | ~~~~~~~~~~~~~~~~~~~~~ 83 | 84 | In the above `Query Names <#query-names>`__ section the ``get-all-blogs`` 85 | name is written without any trailing operators. 86 | 87 | .. literalinclude:: ../../tests/blogdb/sql/blogs/blogs.sql 88 | :language: sql 89 | :lines: 14 90 | 91 | The lack of an explicit operator tells aiosql to execute the query and 92 | to return **all** the results. 93 | In the case of ``get-all-blogs`` that means a ``select`` statement will be 94 | executed and all the resulting rows will be returned. 95 | When writing your application you will often need to perform other operations 96 | besides ``select``, like ``insert``, ``delete``, and perhaps bulk operations. 97 | The operators detailed in the next sections let you declare in your SQL code 98 | how that query should be executed by a Python database driver. 99 | 100 | ``^`` Select One 101 | ~~~~~~~~~~~~~~~~ 102 | 103 | The ``^`` operator executes a query and returns the **first row** of a result set. 104 | When there are no rows in the result set it returns ``None``. 105 | This is useful when you know there should be one, and exactly one result from your query. 106 | 107 | As an example, if you have a unique constraint on the ``username`` field in your 108 | ``users`` table which makes it impossible for two users to share the same username, 109 | you could use ``^`` to direct aiosql to select a single user rather than a list of 110 | rows of length 1. 111 | 112 | .. literalinclude:: ../../tests/blogdb/sql/users/users.sql 113 | :language: sql 114 | :lines: 8-14 115 | 116 | When used from Python this query will either return ``None`` or the singular selected row. 117 | 118 | .. code:: python 119 | 120 | queries.get_user_by_username(conn, username="willvaughn") 121 | # => (1, "willvaughn", "William Vaughn") or None 122 | 123 | ``$`` Select Value 124 | ~~~~~~~~~~~~~~~~~~ 125 | 126 | The ``$`` operator will execute the query, and only return the **first value of the first row** 127 | of a result set. If there are no rows in the result set it returns ``None``. 128 | This is implemented by returing the first element of the tuple returned by ``cur.fetchone()`` 129 | from the underlying driver. 130 | This is mostly useful for queries returning IDs, COUNTs or other aggregates. 131 | 132 | .. literalinclude:: ../../tests/blogdb/sql/users/users.sql 133 | :language: sql 134 | :lines: 30,32 135 | 136 | When used from Python: 137 | 138 | .. code:: python 139 | 140 | queries.get_count(conn) 141 | # => 3 or None 142 | 143 | ``!`` Insert/Update/Delete 144 | ~~~~~~~~~~~~~~~~~~~~~~~~~~ 145 | 146 | The ``!`` operator executes SQL without returning any results. 147 | It is meant for statements that use ``insert``, ``update``, and ``delete`` to make 148 | modifications to database rows without a necessary return value. 149 | 150 | .. literalinclude:: ../../tests/blogdb/sql/blogs/blogs.sql 151 | :language: sql 152 | :lines: 64-66,32,34 153 | 154 | The methods generated are: 155 | 156 | .. code:: python 157 | 158 | def new_blog(conn, userid: int, title: str, content: str) -> int: 159 | pass 160 | 161 | def remove_blog(conn, blogid: int) -> int: 162 | pass 163 | 164 | Each can be called to alter the database, and returns the number of affected rows 165 | if available. 166 | 167 | Note that some SQL databases allow to return a relation after ``insert``, 168 | ``update`` or ``delete`` by using a ``returning`` clause. 169 | For such queries the result is a relation like a ``select``, so the same operators 170 | apply: 171 | 172 | .. literalinclude:: ../../tests/blogdb/sql/blogs/blogs.sql 173 | :language: sql 174 | :lines: 68-71 175 | 176 | .. code:: python 177 | 178 | blogid = queries.publish_new_blog(conn, userid=1, title="AioSQL New Features", content="…") 179 | 180 | ```__. 190 | 191 | As recent version of SQLite do support the ``returning`` clause, simply forget 192 | about this, use the clause explicitely and treat the whole command as a standard 193 | select with the *empty* operator (relation), or ``^`` (tuple), or ``$`` (scalar). 194 | 195 | .. literalinclude:: ../../tests/blogdb/sql/blogs/li/blogs.sql 196 | :language: sql 197 | :lines: 39-41 198 | 199 | Executing this query in python will return the ``blogid`` of the inserted row. 200 | 201 | .. code:: python 202 | 203 | blogid = queries.publish_a_blog(conn, userid=1, title="Hi", content="blah blah.") 204 | 205 | ``*!`` Insert/Update/Delete Many 206 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 207 | 208 | The ``*!`` operator directs aiosql to execute a SQL statement over all items of a given sequence. 209 | Under the hood this calls the ``executemany`` method of many database drivers. 210 | See `sqlite3 Cursor.executemany `__ 211 | for an example. 212 | 213 | In aiosql we can use this for a bulk publish method that operates over a list of blog entries. 214 | 215 | .. literalinclude:: ../../tests/blogdb/sql/blogs/pg/blogs.sql 216 | :language: sql 217 | :lines: 50-53 218 | 219 | .. code:: python 220 | 221 | queries = aiosql.from_path("blogs.sql", "psycopg2") 222 | blogs = [ 223 | {"userid": 1, "title": "First Blog", "content": "...", "published": datetime(2018, 1, 1)}, 224 | {"userid": 1, "title": "Next Blog", "content": "...", "published": datetime(2018, 1, 2)}, 225 | {"userid": 2, "title": "Hey, Hey!", "content": "...", "published": datetime(2018, 7, 28)}, 226 | ] 227 | queries.bulk_publish(conn, blogs) 228 | 229 | The methods returns the number of affected rows, if available. 230 | 231 | ``#`` Execute Scripts 232 | ~~~~~~~~~~~~~~~~~~~~~ 233 | 234 | Using this operarator will execute sql statements as a script. 235 | You can't do variable substitution with the ``#`` operator. 236 | An example usecase is using data definition statements like create table in order to setup a database. 237 | 238 | .. literalinclude:: ../../tests/blogdb/sql/blogs/pg/blogs.sql 239 | :language: sql 240 | :lines: 9-16 241 | 242 | .. code:: python 243 | 244 | queries = aiosql.from_path("create_schema.sql", "sqlite3") 245 | queries.create_table_blogs(conn) 246 | 247 | Note: SQL scripts do not accept parameters. 248 | -------------------------------------------------------------------------------- /docs/source/getting-started.rst: -------------------------------------------------------------------------------- 1 | Getting Started 2 | =============== 3 | 4 | Philosophy 5 | ---------- 6 | 7 | The aiosql project is for writing SQL to interact with a database. 8 | Most database libraries are intended to reduce the amount of SQL developers need to write, 9 | aiosql takes an alternative approach. 10 | Why? 11 | 12 | - Alternatives are good. No approach fits all use cases, no matter how predominant. 13 | - SQL is the most expressive and performant way to interact with a SQL database. 14 | - Investigating where a query came from is simpler when it is source controlled, named, and written by a human. 15 | - Writing SQL in files gives you built-in compatibility with powerful SQL tools like 16 | `DataGrip `__ and 17 | `psql `__. 18 | 19 | About ORMs 20 | ~~~~~~~~~~ 21 | 22 | .. 23 | SQLAlchemy 2.0.32: 129582 locs 24 | Django 5.1 + 2 deps: 129682 locs (python & js) 25 | 26 | ORMs and SQL Query Builders offer object interfaces to generate and execute SQL. 27 | They exist to ease development, not to make it simpler. 28 | They have a large code base to provide their services, and many complement packages 29 | to provide more advanced features, resulting in over a *hundred of thousand* lines of code. 30 | Inheriting object hierarchies, mixing data with behaviors, mirroring a database schema, and generating SQL are not simple. 31 | ORMs are introduced early in a project's life when requirements are limited and the need to move fast is paramount. 32 | As a project grows, ORM objects and their relations grow too, they become a source of complexity and coupling. 33 | 34 | ``aiosql`` doesn't solve these problems directly either, your application will still get more complex with time. 35 | You can write bad SQL and bad python. 36 | But, with aiosql there is no mandate that all interaction with the database go 37 | through a complex network of related python objects that mirror a database schema. 38 | The only mandates are that you write SQL to talk to the database and python to use the data. 39 | From there you start with a system in which the database and the application are intentionally 40 | separate and independent from each other so they can change independently. 41 | The architecture of your application and the boundaries you choose between it and the database is left to you. 42 | 43 | The documentation for projects like `SQLAlchemy `__ and 44 | `Django DB `__ can give you a better vision 45 | for the class of problems that ORMs do solve and the productivity gains they intend. 46 | Please choose these projects over ``aiosql`` if you find that they fit the needs of your application better. 47 | 48 | Loading Queries 49 | --------------- 50 | 51 | This section goes over the three ways to make SQL queries available for execution in python. 52 | You'll learn the basics of defining queries so aiosql can find them and turn them into methods 53 | on a ``Queries`` object. 54 | For more details reference the :doc:`defining-sql-queries` documentation. 55 | 56 | From a SQL File 57 | ~~~~~~~~~~~~~~~ 58 | 59 | SQL can be loaded by providing a path to a ``.sql`` file. 60 | Below is a *blogs.sql* file that defines two queries. 61 | 62 | .. code:: sql 63 | 64 | -- name: get_all_blogs 65 | select blogid, 66 | userid, 67 | title, 68 | content, 69 | published 70 | from blogs; 71 | 72 | -- name: get_user_blogs 73 | -- Get blogs with a fancy formatted published date and author field 74 | select b.blogid, 75 | b.title, 76 | strftime('%Y-%m-%d %H:%M', b.published) as published, 77 | u.username as author 78 | from blogs b 79 | inner join users u on b.userid = u.userid 80 | where u.username = :username 81 | order by b.published desc; 82 | 83 | Notice the ``-- name: `` comments and the ``:username`` substitution variable. 84 | The comments that start with ``-- name:`` are the magic of aiosql. 85 | They are used by ```aiosql.from_path`` <./api.md#aiosqlfrom_path>`__ to parse the file 86 | into separate methods accessible by the name. 87 | The ``aiosql.from_path`` function takes a path to a sql file or directory 88 | and the name of the database driver intended for use with the methods. 89 | 90 | .. code:: python 91 | 92 | queries = aiosql.from_path("blogs.sql", "sqlite3") 93 | 94 | In the case of *blogs.sql* we expect the following two methods to be available. 95 | The ``username`` parameter of ``get_user_blogs`` will substitute in for the ``:username`` variable in the SQL. 96 | Standard ``SELECT`` statements return a generator, i.e. something which can be iterated upon, 97 | for instance with a ``for`` loop. 98 | Just cast the result to ``list`` to get an actual list. 99 | The generator returns what the underlying driver returns, usually tuples. 100 | 101 | .. code:: python 102 | 103 | def get_all_blogs(self) -> Generator[Any]: 104 | pass 105 | 106 | def get_user_blogs(self, username: str) -> Generator[Any]: 107 | pass 108 | 109 | From an SQL String 110 | ~~~~~~~~~~~~~~~~~~ 111 | 112 | SQL can be loaded from a string as well. 113 | The result below is the same as the first example above that loads from a SQL file. 114 | 115 | .. code:: python 116 | 117 | sql_str = """ 118 | -- name: get_all_blogs 119 | select blogid, 120 | userid, 121 | title, 122 | content, 123 | published 124 | from blogs; 125 | 126 | -- name: get_user_blogs 127 | -- Get blogs with a fancy formatted published date and author field 128 | select b.blogid, 129 | b.title, 130 | strftime('%Y-%m-%d %H:%M', b.published) as published, 131 | u.username as author 132 | from blogs b 133 | inner join users u on b.userid = u.userid 134 | where u.username = :username 135 | order by b.published desc; 136 | """ 137 | 138 | queries = aiosql.from_str(sql_str, "sqlite3") 139 | 140 | The ``Queries`` object here will have two methods: 141 | 142 | .. code:: python 143 | 144 | queries.get_all_blogs(conn) 145 | queries.get_user_blogs(conn, username="johndoe") 146 | 147 | From a Directory of SQL Files 148 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 149 | 150 | Loading a directory of SQL files loads all of the queries defined in those files into a single object. 151 | The ``example/sql`` directory below contains three ``.sql`` files and can be loaded using 152 | ``aiosql.from_path`` `<./api.md#aiosqlfrom_path>`__. 153 | 154 | :: 155 | 156 | example/sql 157 | ├── blogs.sql 158 | ├── create_schema.sql 159 | └── users.sql 160 | 161 | .. code:: python 162 | 163 | queries = aiosql.from_path("example/sql", "sqlite3") 164 | 165 | The resulting ``queries`` object will have a mixture of methods from all the files. 166 | 167 | Subdirectories 168 | ^^^^^^^^^^^^^^ 169 | 170 | Introducing subdirectories allows namespacing queries. 171 | This provides a way to further organize and group queries conceptually. 172 | For instance, you could define blog queries separate from user queries access them on distinct 173 | properties of the queries object. 174 | 175 | Assume the *blogs.sql* and *users.sql* files both contain a ``-- name: get_all`` query. 176 | 177 | :: 178 | 179 | example/sql/ 180 | ├── blogs/ 181 | │   └── blogs.sql 182 | ├── create_schema.sql 183 | └── users/ 184 | └── users.sql 185 | 186 | .. code:: python 187 | 188 | queries = aiosql.from_path("example/sql", "sqlite3") 189 | 190 | The ``Queries`` object has two nested ``get_all`` methods accessible on attributes ``.blogs`` and ``.users``. 191 | The attributes reflect the names of the subdirectories. 192 | 193 | .. code:: python 194 | 195 | queries.blogs.get_all(conn) 196 | queries.users.get_all(conn) 197 | 198 | Calling Query Methods 199 | --------------------- 200 | 201 | Connections 202 | ~~~~~~~~~~~ 203 | 204 | The connection or ``conn`` is always the first argument to an ``aiosql`` method. 205 | The ``conn`` is an open connection to a database driver that your aiosql method can use for executing the sql it contains. 206 | Controlling connections outside of aiosql queries means you can call multiple queries and control them under one transaction, 207 | or otherwise set connection level properties that affect driver behavior. 208 | 209 | .. note:: 210 | 211 | For more see: :doc:`advanced-topics`. 212 | 213 | In the examples throughout this page a ``conn`` object has been passed. 214 | Here is a more code complete example that shows the connection creation and call to 215 | ``aiosql.from_path`` `<./api.md#aiosqlfrom_path>`__ that make a queries object. 216 | 217 | .. code:: pycon 218 | 219 | >>> import sqlite3 220 | >>> import aiosql 221 | >>> conn = sqlite3.connect("./blogs.db") 222 | >>> # Note the "sqlite3" driver_adapter argument is what tells 223 | >>> # aiosql it should be expecting a sqlite3 connection object. 224 | >>> queries = aiosql.from_path("./blogs.sql", "sqlite3") 225 | >>> queries.get_all_blogs(conn) 226 | [(1, 227 | 1, 228 | 'What I did Today', 229 | 'I mowed the lawn, washed some clothes, and ate a burger.\n' 230 | '\n' 231 | 'Until next time,\n' 232 | 'Bob', 233 | '2017-07-28'), 234 | (2, 3, 'Testing', 'Is this thing on?\n', '2018-01-01'), 235 | (3, 236 | 1, 237 | 'How to make a pie.', 238 | '1. Make crust\n2. Fill\n3. Bake\n4. Eat\n', 239 | '2018-11-23')] 240 | 241 | See the associated `AnoDB Project `__ for embedding both a connection pool and queries. 242 | 243 | Passing Parameters 244 | ~~~~~~~~~~~~~~~~~~ 245 | 246 | .. code:: sql 247 | 248 | -- name: get_user_blogs 249 | -- Get blogs with a fancy formatted published date and author field 250 | select b.blogid, 251 | b.title, 252 | strftime('%Y-%m-%d %H:%M', b.published) as published, 253 | u.username as author 254 | from blogs b 255 | inner join users u on b.userid = u.userid 256 | where u.username = :username 257 | order by b.published desc; 258 | 259 | ``aiosql`` allows parameterization of queries by parsing values like ``:username`` 260 | in the above query and having the resultant method expect an inbound argument to 261 | substitute for ``:username``. 262 | 263 | You can call the ``get_user_blogs`` function with plain arguments or keyword arguments with the 264 | name of the subsitution variable. 265 | 266 | .. code:: python 267 | 268 | >>> import sqlite3 269 | >>> import aiosql 270 | >>> conn = sqlite3.connect("./blogs.db") 271 | >>> queries = aiosql.from_path("./blogs.sql", "sqlite3") 272 | >>> 273 | >>> # Using keyword args 274 | >>> queries.get_user_blogs(conn, username="bobsmith") 275 | [(3, 'How to make a pie.', '2018-11-23 00:00', 'bobsmith'), (1, 'What I did Today', '2017-07-28 00:00', 'bobsmith')] 276 | >>> 277 | >>> # Using positional argument 278 | >>> queries.get_user_blogs(conn, "janedoe") 279 | [(2, 'Testing', '2018-01-01 00:00', 'janedoe')] 280 | 281 | .. warning:: 282 | 283 | When passing positional arguments aiosql will apply them in the order that the substitutions appear in your SQL. 284 | This can be convenient and clear in some cases, but very confusing in others. 285 | You might want to choose to always name your arguments for clarity. 286 | Consider enforcing this behavior by passing ``kwargs_only=True`` when creating the queries. 287 | 288 | It is also possible to access simple object attributes in a query, with the dot syntax: 289 | 290 | .. code:: sql 291 | 292 | -- name: add_user 293 | insert into users(username, name) 294 | values (:u.username, :u.name); 295 | 296 | Then simple pass your object as ``u``: 297 | 298 | .. code:: python 299 | 300 | # User is some class with attributes username and name 301 | calvin = User("calvin", "Calvin") 302 | queries.add_user(u=calvin) 303 | -------------------------------------------------------------------------------- /docs/source/index.rst: -------------------------------------------------------------------------------- 1 | aiosql - Simple SQL in Python 2 | ============================= 3 | 4 | `SQL `__ is code. 5 | Write it, version control it, comment it, and run it using files. 6 | Writing your SQL code in Python programs as strings doesn't allow you to easily 7 | reuse them in SQL GUIs or CLI tools like ``psql``. 8 | With aiosql you can organize your SQL statements in *.sql* files, load them 9 | into your python application as methods to call without losing the ability to 10 | use them as you would any other SQL file. 11 | 12 | This project supports standard 13 | `PEP 249 `__ 14 | and 15 | `asyncio `__ 16 | based drivers for 17 | `SQLite `__ 18 | (`sqlite3 `__, 19 | `aiosqlite `__, 20 | `apsw `__), 21 | `PostgreSQL `__ 22 | (`psycopg (3) `__, 23 | `psycopg2 `__, 24 | `pg8000 `__, 25 | `pygresql `__, 26 | `asyncpg `__), 27 | `MySQL `__ 28 | (`PyMySQL `__, 29 | `mysqlclient `__, 30 | `mysql-connector `__, 31 | `asyncmy `__ with 32 | `this adapter `__), 33 | `MariaDB `__ 34 | (`mariadb `__), 35 | `DuckDB `__ 36 | (`duckdb `__) and 37 | `MS SQL Server `__ 38 | (`pymssql `__), 39 | However, some detailed feature support may vary depending on the underlying driver 40 | and database engine actual capabilities. 41 | 42 | Other SQL database drivers which support the ``pyformat`` or ``named`` 43 | `PEP 249 `__ paramstyles should work as well 44 | by just passing the driver as a parameter when building queries. Thus 45 | `Oracle Database `__ 46 | (`oracledb `__) or 47 | `Snowflake `__ 48 | (`snowflake.connector `__) 49 | should work out of the box… 50 | Please report with an issue if it actually works for you! 51 | Otherwise, extensions to support other database drivers can be written by you! 52 | See: `Database Driver Adapters <./database-driver-adapters.html>`__. 53 | Feel free to pull request! 54 | 55 | This module is an implementation of 56 | `Kris Jenkins' yesql `__ 57 | `Clojure `__ library to the 58 | `Python `__ 59 | `ecosystem `__. 60 | 61 | Badges 62 | ------ 63 | 64 | .. 65 | NOTE :target: is needed so that github renders badges on a line. 66 | .. image:: https://github.com/nackjicholson/aiosql/actions/workflows/aiosql-package.yml/badge.svg?branch=main&style=flat 67 | :alt: Build status 68 | :target: https://github.com/nackjicholson/aiosql/actions/ 69 | .. 70 | NOTE hardcoded, this is maintained manually. 71 | .. image:: https://img.shields.io/badge/coverage-100%25-success 72 | :alt: Code Coverage 73 | :target: https://github.com/nackjicholson/aiosql/actions/ 74 | .. 75 | NOTE all tests 76 | # MIST 77 | loading: 17 78 | patterns: 5 79 | # SYNC 80 | sqlite3: 17 81 | apsw: 16 82 | duckdb: 15 83 | mariadb: 17 84 | pymysql: 16 85 | mysqldb: 15 86 | myco: 16 87 | pymssql: 16 88 | pg8000: 14 89 | psycopg2: 18 90 | psycopg3: 19 91 | pygresql: 15 92 | # ASYNC 93 | aiosqlite: 13 94 | asyncpg: 18 95 | .. image:: https://img.shields.io/badge/tests-247%20✓-success 96 | :alt: Tests 97 | :target: https://github.com/nackjicholson/aiosql/actions/ 98 | .. image:: https://img.shields.io/github/issues/nackjicholson/aiosql?style=flat 99 | :alt: Issues 100 | :target: https://github.com/nackjicholson/aiosql/issues/ 101 | .. image:: https://img.shields.io/github/contributors/nackjicholson/aiosql 102 | :alt: Contributors 103 | :target: https://github.com/nackjicholson/aiosql/graphs/contributors 104 | .. image:: https://img.shields.io/pypi/dm/aiosql?style=flat 105 | :alt: Pypi Downloads 106 | :target: https://pypistats.org/packages/aiosql 107 | .. image:: https://img.shields.io/github/stars/nackjicholson/aiosql?style=flat&label=Star 108 | :alt: Stars 109 | :target: https://github.com/nackjicholson/aiosql/stargazers 110 | .. image:: https://img.shields.io/pypi/v/aiosql 111 | :alt: Version 112 | :target: https://pypi.org/project/aiosql/ 113 | .. image:: https://img.shields.io/github/languages/code-size/nackjicholson/aiosql?style=flat 114 | :alt: Code Size 115 | :target: https://github.com/nackjicholson/aiosql/ 116 | .. image:: https://img.shields.io/badge/databases-6-informational 117 | :alt: Databases 118 | :target: https://github.com/nackjicholson/aiosql/ 119 | .. image:: https://img.shields.io/badge/drivers-15-informational 120 | :alt: Drivers 121 | :target: https://github.com/nackjicholson/aiosql/ 122 | .. image:: https://img.shields.io/github/languages/count/nackjicholson/aiosql?style=flat 123 | :alt: Language Count 124 | :target: https://en.wikipedia.org/wiki/Programming_language 125 | .. image:: https://img.shields.io/github/languages/top/nackjicholson/aiosql?style=flat 126 | :alt: Top Language 127 | :target: https://en.wikipedia.org/wiki/Python_(programming_language) 128 | .. image:: https://img.shields.io/pypi/pyversions/aiosql?style=flat 129 | :alt: Python Versions 130 | :target: https://www.python.org/ 131 | .. 132 | NOTE some non-sense badge about badges:-) 133 | .. image:: https://img.shields.io/badge/badges-16-informational 134 | :alt: Badges 135 | :target: https://shields.io/ 136 | .. image:: https://img.shields.io/pypi/l/aiosql?style=flat 137 | :alt: BSD 2-Clause License 138 | :target: https://opensource.org/licenses/BSD-2-Clause 139 | 140 | 141 | Usage 142 | ----- 143 | 144 | Install from `pypi `__, for instance by running ``pip install aiosql``. 145 | 146 | Then write parametric SQL queries in a file and execute it from Python methods, 147 | eg this *greetings.sql* file: 148 | 149 | .. code:: sql 150 | 151 | -- name: get_all_greetings() 152 | -- Get all the greetings in the database 153 | select greeting_id, greeting 154 | from greetings 155 | order by 1; 156 | 157 | -- name: get_user_by_username(username)^ 158 | -- Get a user from the database using a named parameter 159 | select user_id, username, name 160 | from users 161 | where username = :username; 162 | 163 | This example has an imaginary SQLite database with greetings and users. 164 | It prints greetings in various languages to the user and showcases the basic 165 | feature of being able to load queries from a SQL file and call them by name 166 | in python code. 167 | Query parameter declarations (eg ``(username)``) are optional, and enforced 168 | when provided. 169 | 170 | You can use ``aiosql`` to load the queries in this file for use in your Python 171 | application: 172 | 173 | .. code:: python 174 | 175 | import aiosql 176 | import sqlite3 177 | 178 | queries = aiosql.from_path("greetings.sql", "sqlite3") 179 | 180 | with sqlite3.connect("greetings.db") as conn: 181 | user = queries.get_user_by_username(conn, username="willvaughn") 182 | # user: (1, "willvaughn", "William") 183 | 184 | for _, greeting in queries.get_all_greetings(conn): 185 | # scan: (1, "Hi"), (2, "Aloha"), (3, "Hola"), … 186 | print(f"{greeting}, {user[2]}!") 187 | # Hi, William! 188 | # Aloha, William! 189 | # … 190 | 191 | Or even in an asynchroneous way, with two SQL queries running in parallel 192 | using ``aiosqlite`` and ``asyncio``: 193 | 194 | .. code:: python 195 | 196 | import asyncio 197 | import aiosql 198 | import aiosqlite 199 | 200 | queries = aiosql.from_path("greetings.sql", "aiosqlite") 201 | 202 | async def main(): 203 | async with aiosqlite.connect("greetings.db") as conn: 204 | # possibly in parallel… 205 | greetings, user = await asyncio.gather( 206 | queries.get_all_greetings(conn), 207 | queries.get_user_by_username(conn, username="willvaughn") 208 | ) 209 | 210 | for _, greeting in greetings: 211 | print(f"{greeting}, {user[2]}!") 212 | 213 | asyncio.run(main()) 214 | 215 | It may seem inconvenient to provide a connection on each call. 216 | You may have a look at the `AnoDB `__ `DB` 217 | class which wraps both a database connection *and* queries in one 218 | connection-like extended object, including performing automatic reconnection 219 | when needed. The wrapper also allows to cache query results. 220 | 221 | Why you might want to use this 222 | ------------------------------ 223 | 224 | * You think SQL is pretty good, and writing SQL is an important part of your applications. 225 | * You don't want to write your SQL in strings intermixed with your python code. 226 | * You're not using an ORM like `SQLAlchemy `__ or 227 | `Django `__ , 228 | with large (100k lines) code imprints vs under 1000 for `aiosql` and about 300 for `anodb`, 229 | and you don't need to or don't want to write SQL-like code with a Python syntax. 230 | * You want to be able to reuse your SQL in other contexts, 231 | eg loading it into `psql` or other database tools. 232 | 233 | 234 | Why you might NOT want to use this 235 | ---------------------------------- 236 | 237 | * You're looking for an `ORM `__. 238 | * You aren't comfortable writing SQL code. 239 | * You don't have anything in your application that requires complicated SQL beyond basic CRUD operations. 240 | * Dynamically loaded objects built at runtime really bother you. 241 | -------------------------------------------------------------------------------- /docs/source/todo.rst: -------------------------------------------------------------------------------- 1 | AioSQL - Backlog 2 | ================ 3 | 4 | Todo or not, that is the question… 5 | 6 | - maintain up-to-date wrt Python moving target… 7 | - add apsw, duckdb, asyncpg and psycopg2 to pypy 3.13 when possible. 8 | - once 3.9 support is dropped, remove old-style type hints. 9 | - write a small SQLite3-based tutorial? 10 | - tests with even more database and drivers? 11 | - rethink record classes? we just really want a row conversion function? 12 | - add documentation about docker runs? isn't `docker/README.md` enough? 13 | - `HugSQL `_ Clojure library as support for multiple 14 | kind of substitutions, maybe we could do the same. 15 | 16 | For instance for identifiers: 17 | 18 | .. code:: sql 19 | 20 | -- name: select(cols, table) 21 | SELECT :i*:cols FROM :i:table ORDER BY 1; 22 | 23 | .. code:: python 24 | 25 | res = db.select(conn, cols=["uid", "name"], table="users") 26 | 27 | This would require separating identifiers management and to build 28 | and memoize the query variants. 29 | -------------------------------------------------------------------------------- /docs/source/versions.rst: -------------------------------------------------------------------------------- 1 | AioSQL - Versions 2 | ================= 3 | 4 | 13.4 on 2025-04-09 5 | ------------------ 6 | 7 | - update GitHub CI configuration. 8 | - use SPDX format for licensing informations and add topics. 9 | - doc, separate backlog from versions. 10 | 11 | 13.3 on 2025-03-07 12 | ------------------ 13 | 14 | - rework dependencies. 15 | - enable *PyPy 3.11*, *Python 3.13t* and *Python 3.14* in GitHub CI. 16 | 17 | 13.2 on 2025-01-29 18 | ------------------ 19 | 20 | - improve empty query handling. 21 | - update documentation. 22 | 23 | 13.1 on 2025-01-23 24 | ------------------ 25 | 26 | - fix warning repetition and display for missing `!` on non-SELECT. 27 | - improve documentation with declared parameters in examples. 28 | - homogeneise test consistency wrt attribute and parameter names. 29 | - fix doc typos. 30 | 31 | 13.0 on 2024-11-10 32 | ------------------ 33 | 34 | - change `kwargs_only` parameter default value to _True_. **Compatibility break.** 35 | - add optional parameter declarations to queries, and check them when provided. 36 | - forbid positional parameters when named parameters are declared. 37 | - warn on probable missing operation. 38 | - silent some test warnings. 39 | - add *psycopg2* back to CI with Python 3.13. 40 | - improve documentation. 41 | - improve Makefile. 42 | 43 | 12.2 on 2024-10-02 44 | ------------------ 45 | 46 | - fix included source lines in documentation. 47 | 48 | 12.1 on 2024-10-01 49 | ------------------ 50 | 51 | - drop support for *Python 3.8*. 52 | - enable *DuckDB* with *Python 3.13*. 53 | - fix duckdb adapter for *DuckDB 1.1*. 54 | 55 | 12.0 on 2024-09-07 56 | ------------------ 57 | 58 | - add official support for MS SQL Server with `pymssql`. 59 | - pass misc parameters to cursor in generic adapter. 60 | - further improve typing to please pyright. 61 | - minor doc fixes… 62 | - improve one error message. 63 | - reduce verbosity when overriding an adapter. 64 | - refactor tests, simplifying the structure and adding over 50 tests. 65 | in particular, schema creation now relies on *aiosql* features 66 | instead of using driver capabilities directly. 67 | 68 | 11.1 on 2024-08-20 69 | ------------------ 70 | 71 | - improve documentation. 72 | - upgrade sphinx and corresponding read-the-doc theme. 73 | 74 | 11.0 on 2024-08-17 75 | ------------------ 76 | 77 | - update and improve documentation. 78 | - do not allow to override existing queries, as it can lead to hard to 79 | understand bugs. 80 | - use ``pytest.fail`` instead of ``assert False`` in tests. 81 | 82 | 10.4 on 2024-08-08 83 | ------------------ 84 | 85 | - add history of version changes in the documentation (this file!). 86 | - improve comments and doc strings. 87 | 88 | 10.3 on 2024-08-03 89 | ------------------ 90 | 91 | - add *Python 3.13* and *PyPy 3.10* 92 | 93 | 10.2 on 2024-05-29 94 | ------------------ 95 | 96 | - exclude SQL hints from pattern matching on C comments. 97 | - improve tests about SQL comments. 98 | 99 | 10.1 on 2024-03-06 100 | ------------------ 101 | 102 | - drop ``black`` and ``flake8`` checks, add ``ruff`` instead. 103 | - upgrade doc build GitHub CI version. 104 | 105 | 10.0 on 2024-03-02 106 | ------------------ 107 | 108 | - add ``:object.attribute`` support to reference object attributes in queries. 109 | - add tests about these with dataclasses. 110 | 111 | 9.5 on 2024-02-18 112 | ----------------- 113 | 114 | - add ``duckdb`` support for *Python 3.12* CI. 115 | 116 | 9.4 on 2024-01-28 117 | ----------------- 118 | 119 | - upgrade non regression tests CI version. 120 | - improve coverage test report. 121 | - add doc strings to more methods. 122 | - add ``kwargs*only`` option to fail on simple args. 123 | - add relevant tests about previous item. 124 | - move various utils in ``Queries``. 125 | - add more or improve static typing hints. 126 | - minor style changes. 127 | 128 | 9.3 on 2024-01-18 129 | ----------------- 130 | 131 | - add pyright check. 132 | - improve generic adapter. 133 | - improve static typing. 134 | 135 | 9.2 on 2023-12-24 136 | ----------------- 137 | 138 | - improve some tests. 139 | - minor improvements for async adapters. 140 | 141 | 9.1 on 2023-12-06 142 | ----------------- 143 | 144 | - add *Python 3.12* to GitHub CI. 145 | - get release number from package meta data. 146 | - update doc relating to `` sqlite3.Connection: 51 | conn = sqlite3.connect(db_path) 52 | conn.row_factory = sqlite3.Row 53 | return conn 54 | 55 | 56 | def createdb(): 57 | if db_path.exists(): 58 | raise SystemExit(f"Database at {db_path} already exists.") 59 | conn = get_conn() 60 | print("Inserting users and blogs data.") 61 | with conn: 62 | queries.create_schema(conn) 63 | queries.users.insert_many(conn, users) 64 | queries.blogs.insert_many(conn, blogs) 65 | print("Done!") 66 | conn.close() 67 | 68 | 69 | def deletedb(): 70 | print(f"Deleting the {db_path} file") 71 | if db_path.exists(): 72 | db_path.unlink() 73 | 74 | 75 | def get_users(): 76 | conn = get_conn() 77 | for user in queries.users.get_all(conn): 78 | s = "{" 79 | for k in user.keys(): 80 | s += f"{k}: {user[k]}, " 81 | s = s[:-2] 82 | s += "}" 83 | print(s) 84 | 85 | 86 | def get_user_blogs(username): 87 | conn = get_conn() 88 | user_blogs = queries.blogs.get_user_blogs(conn, username=username) 89 | for user_blog in user_blogs: 90 | print("------") 91 | print(f"{user_blog['title']}") 92 | print(f"by {user_blog['author']} at {user_blog['published']}") 93 | 94 | 95 | if __name__ == "__main__": 96 | parser = argparse.ArgumentParser() 97 | subparsers = parser.add_subparsers() 98 | 99 | createdb_parser = subparsers.add_parser("createdb") 100 | createdb_parser.set_defaults(cmd=createdb) 101 | 102 | deletedb_parser = subparsers.add_parser("deletedb") 103 | deletedb_parser.set_defaults(cmd=deletedb) 104 | 105 | get_users_parser = subparsers.add_parser("get-users") 106 | get_users_parser.set_defaults(cmd=get_users) 107 | 108 | get_user_blogs_parser = subparsers.add_parser("get-user-blogs") 109 | get_user_blogs_parser.add_argument("username") 110 | get_user_blogs_parser.set_defaults(cmd=get_user_blogs) 111 | 112 | args = parser.parse_args() 113 | cmd_kwargs = {k: v for k, v in vars(args).items() if k != "cmd"} 114 | args.cmd(**cmd_kwargs) 115 | -------------------------------------------------------------------------------- /example/greetings.py: -------------------------------------------------------------------------------- 1 | import aiosql 2 | import sqlite3 3 | 4 | queries = aiosql.from_path("greetings.sql", "sqlite3") 5 | 6 | with sqlite3.connect("greetings.db") as conn: 7 | user = queries.get_user_by_username(conn, username="willvaughn") 8 | # user: (1, "willvaughn", "William") 9 | 10 | for _, greeting in queries.get_all_greetings(conn): 11 | # scan: (1, "Hi"), (2, "Aloha"), (3, "Hola"), … 12 | print(f"{greeting}, {user[2]}!") 13 | # Hi, William! 14 | # Aloha, William! 15 | # … 16 | -------------------------------------------------------------------------------- /example/greetings.sh: -------------------------------------------------------------------------------- 1 | #! /bin/bash 2 | 3 | echo "# creating sqlite greetings database…" 4 | sqlite3 greetings.db < greetings_create.sql 5 | 6 | echo "# running standard aiosql example code…" 7 | python greetings.py 8 | 9 | echo "# running async aiosql example code…" 10 | python greetings_async.py 11 | 12 | echo "# running cursor aiosql example code…" 13 | python greetings_cursor.py 14 | 15 | echo "# running execute values and observer…" 16 | python observe_query.py 17 | 18 | echo "# removing greetings database." 19 | rm greetings.db 20 | -------------------------------------------------------------------------------- /example/greetings.sql: -------------------------------------------------------------------------------- 1 | -- name: get_all_greetings 2 | -- Get all the greetings in the database 3 | select greeting_id, greeting 4 | from greetings 5 | order by 1; 6 | 7 | -- name: get_user_by_username^ 8 | -- Get a user from the database using a named parameter 9 | select user_id, username, name 10 | from users 11 | where username = :username; 12 | -------------------------------------------------------------------------------- /example/greetings_async.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import aiosql 3 | import aiosqlite 4 | 5 | queries = aiosql.from_path("greetings.sql", "aiosqlite") 6 | 7 | async def main(): 8 | # Parallel queries!!! 9 | async with aiosqlite.connect("greetings.db") as conn: 10 | greetings, user = await asyncio.gather( 11 | queries.get_all_greetings(conn), 12 | queries.get_user_by_username(conn, username="willvaughn") 13 | ) 14 | 15 | for _, greeting in greetings: 16 | print(f"{greeting}, {user[2]}!") 17 | 18 | asyncio.run(main()) 19 | -------------------------------------------------------------------------------- /example/greetings_create.sql: -------------------------------------------------------------------------------- 1 | DROP TABLE IF EXISTS greetings; 2 | DROP TABLE IF EXISTS users; 3 | 4 | CREATE TABLE greetings( 5 | greeting_id INTEGER PRIMARY KEY, 6 | greeting TEXT NOT NULL 7 | ); 8 | 9 | INSERT INTO greetings(greeting_id, greeting) VALUES 10 | (1, 'Hi'), 11 | (2, 'Aloha'), 12 | (3, 'Hola'), 13 | (4, 'Bonjour'), 14 | (5, '你好'); 15 | 16 | CREATE TABLE users( 17 | user_id INTEGER PRIMARY KEY, 18 | username TEXT UNIQUE NOT NULL, 19 | name TEXT NOT NULL 20 | ); 21 | 22 | INSERT INTO users(user_id, username, name) VALUES 23 | (1, 'willvaughn', 'William'), 24 | (2, 'calvin', 'Fabien'); 25 | -------------------------------------------------------------------------------- /example/greetings_cursor.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import aiosql 3 | import aiosqlite 4 | from typing import List 5 | 6 | queries = aiosql.from_path("greetings.sql", "aiosqlite") 7 | 8 | async def access_cursor(): 9 | async with aiosqlite.connect("greetings.db") as conn: 10 | # append _cursor after query name 11 | async with queries.get_all_greetings_cursor(conn) as cur: 12 | print([col_info[0] for col_info in cur.description]) 13 | first_row = await cur.fetchone() 14 | all_data = await cur.fetchall() 15 | print(f"FIRST ROW: {first_row}") # tuple of first row data 16 | print(f"OTHER DATA: {all_data}") # remaining rows 17 | 18 | asyncio.run(access_cursor()) 19 | 20 | # ['greeting_id', 'greeting'] 21 | # FIRST ROW: (1, 'Hi') 22 | # OTHER DATA: [(2, 'Aloha'), (3, 'Hola'), (4, 'Bonjour'), (5, '你好')] 23 | -------------------------------------------------------------------------------- /example/observe_query.py: -------------------------------------------------------------------------------- 1 | import time 2 | import logging 3 | import contextlib 4 | import pg_execute_values as pev 5 | 6 | logging.basicConfig(level=logging.INFO) 7 | log = logging.getLogger("metrics") 8 | 9 | def report_metrics(op, sql, op_time): 10 | log.info(f"Operation: {op.name!r}\nSQL: {sql!r} \nTime (ms): {op_time}") 11 | 12 | @contextlib.contextmanager 13 | def observe_query(func): 14 | op = func.operation 15 | sql = func.sql 16 | start = time.time() 17 | yield 18 | end = time.time() 19 | op_time = end - start 20 | report_metrics(op, sql, op_time) 21 | 22 | with observe_query(pev.queries.getem): 23 | pev.queries.getem(pev.conn) 24 | 25 | # INFO:metrics:Operation: 'SELECT' 26 | # SQL: 'select * from test order by id;' 27 | # Time (ms): 2.6226043701171875e-06 28 | -------------------------------------------------------------------------------- /example/pg_execute_values.py: -------------------------------------------------------------------------------- 1 | import aiosql 2 | import psycopg2 3 | from psycopg2.extras import execute_values 4 | 5 | SQL = """ 6 | -- name: create_schema# 7 | create table if not exists test (id int primary key, v1 int, v2 int); 8 | 9 | -- name: insert! 10 | INSERT INTO test (id, v1, v2) VALUES %s; 11 | 12 | -- name: update! 13 | UPDATE test SET v1 = data.v1 FROM (VALUES %s) AS data (id, v1) 14 | WHERE test.id = data.id; 15 | 16 | -- name: getem 17 | select * from test order by id; 18 | """ 19 | 20 | queries = aiosql.from_str(SQL, "psycopg2") 21 | conn = psycopg2.connect("dbname=test") 22 | queries.create_schema(conn) 23 | with conn.cursor() as cur: 24 | execute_values(cur, queries.insert.sql, [(1, 2, 3), (4, 5, 6), (7, 8, 9)]) 25 | execute_values(cur, queries.update.sql, [(1, 20), (4, 50)]) 26 | 27 | print(list(queries.getem(conn))) 28 | # [(1, 20, 3), (4, 50, 6), (7, 8, 9)] 29 | -------------------------------------------------------------------------------- /example/sql/blogs/blogs.sql: -------------------------------------------------------------------------------- 1 | -- name: get_all_blogs 2 | select blogid, 3 | userid, 4 | title, 5 | content, 6 | published 7 | from blogs; 8 | 9 | -- name: publish_blog=3.9" 12 | license = "BSD-2-Clause" 13 | classifiers = [ 14 | "Programming Language :: Python :: 3", 15 | "Programming Language :: SQL", 16 | "Operating System :: OS Independent", 17 | "Development Status :: 5 - Production/Stable", 18 | "Intended Audience :: Developers", 19 | "Topic :: Database", 20 | "Topic :: Software Development :: Libraries :: Python Modules" 21 | ] 22 | 23 | [project.optional-dependencies] 24 | dev = [ 25 | "pytest", "pytest-asyncio", "coverage", 26 | "black", "flake8", "ruff", 27 | "mypy", "pyright", "types-setuptools", 28 | ] 29 | dev-duckdb = [ 30 | # skip, wheel compilation takes too much time… (confirmed 2024-09-30) 31 | "duckdb; implementation_name != 'pypy'" 32 | ] 33 | dev-sqlite = [ 34 | "aiosqlite", 35 | # fails on missing symbol with pypy (confirmed 2024-10-01) 36 | "apsw; implementation_name != 'pypy'" 37 | ] 38 | dev-postgres = [ 39 | "pytest-postgresql", 40 | "asyncpg; python_version < '3.13' and implementation_name != 'pypy'", 41 | "psycopg>=3", 42 | # 2.9.10 needed for 3.13 43 | "psycopg2 >= 2.9.10; implementation_name != 'pypy'", 44 | "pygresql", 45 | "pg8000" 46 | ] 47 | dev-mysql = [ 48 | "pytest-mysql", 49 | "mysqlclient", 50 | "mysql-connector-python", 51 | "pymysql" 52 | ] 53 | dev-mariadb = [ 54 | "mariadb" 55 | ] 56 | dev-mssql = [ 57 | "pymssql" 58 | ] 59 | doc = [ 60 | "sphinx", 61 | "sphinx-rtd-theme", 62 | # FIXME 3.13t rstcheck depends on pydantic 63 | "rstcheck", 64 | ] 65 | dist = [ 66 | "build", "wheel", "twine", 67 | ] 68 | 69 | [tool.setuptools.package-data] 70 | aiosql = [ "py.typed" ] 71 | 72 | [tool.setuptools.packages.find] 73 | include = [ "aiosql", "aiosql.adapters" ] 74 | exclude = [ "tests" ] 75 | 76 | [project.urls] 77 | repository = "https://github.com/nackjicholson/aiosql" 78 | documentation = "https://nackjicholson.github.io/aiosql/" 79 | issues = "https://github.com/nackjicholson/aiosql/issues" 80 | package = "https://pypi.org/project/aiosql/" 81 | 82 | [tool.black] 83 | line-length = 100 84 | target-version = ["py39"] 85 | 86 | [tool.mypy] 87 | exclude = ["(build|venv)/", ".*/virtualenvs/.*"] 88 | ignore_missing_imports = true 89 | -------------------------------------------------------------------------------- /tests/Makefile: -------------------------------------------------------------------------------- 1 | clean: 2 | $(RM) -r __pycache__ .pytest_cache 3 | -------------------------------------------------------------------------------- /tests/blogdb/data/blogs_data.csv: -------------------------------------------------------------------------------- 1 | 1,What I did Today,"I mowed the lawn - washed some clothes - ate a burger.",2017-07-28 2 | 3,Testing,Is this thing on?,2018-01-01 3 | 1,How to make a pie.,"1. Make crust\n2. Fill\n3. Bake\n4.Eat",2018-11-23 4 | -------------------------------------------------------------------------------- /tests/blogdb/data/users_data.csv: -------------------------------------------------------------------------------- 1 | bobsmith,Bob,Smith 2 | johndoe,John,Doe 3 | janedoe,Jane,Doe 4 | -------------------------------------------------------------------------------- /tests/blogdb/sql/blogs/blogs.oops: -------------------------------------------------------------------------------- 1 | This file should be ignored! 2 | -------------------------------------------------------------------------------- /tests/blogdb/sql/blogs/blogs.sql: -------------------------------------------------------------------------------- 1 | -- this part before the first description 2 | is expected 3 | TO BE IGNORED SILENTLY… 4 | 5 | -- name: drop-table-users# 6 | DROP TABLE IF EXISTS users; 7 | 8 | -- name: drop-table-blogs# 9 | DROP TABLE IF EXISTS blogs; 10 | 11 | -- name: drop-table-comments# 12 | DROP TABLE IF EXISTS comments; 13 | 14 | -- name: get-all-blogs() 15 | -- Fetch all fields for every blog in the database. 16 | select * from blogs; 17 | 18 | -- name: publish-blog= :published 28 | order by b.published desc; 29 | 30 | 31 | -- name: bulk-publish*! 32 | -- Insert many blogs at once 33 | insert into blogs ( 34 | blogid, 35 | userid, 36 | title, 37 | content, 38 | published 39 | ) 40 | values (nextval('blogs_seq'), ?, ?, ?, ?); 41 | 42 | -- name: publish-blog= :published 26 | order by b.published desc; 27 | 28 | 29 | -- name: bulk-publish*! 30 | -- Insert many blogs at once 31 | insert into blogs ( 32 | userid, 33 | title, 34 | content, 35 | published 36 | ) 37 | values (?, ?, ?, ?); 38 | 39 | -- name: publish-a-blog= :published 38 | order by published desc; 39 | 40 | 41 | -- name: publish-blog= :published 26 | order by b.published desc; 27 | 28 | 29 | -- name: bulk-publish*! 30 | -- Insert many blogs at once 31 | insert into blogs ( 32 | userid, 33 | title, 34 | content, 35 | published 36 | ) 37 | values (%s, %s, %s, %s); 38 | 39 | -- name: publish-new-blog 40 | insert into blogs (userid, title, content) 41 | values (:userid, :title, :contents) 42 | returning blogid, title; 43 | -------------------------------------------------------------------------------- /tests/blogdb/sql/blogs/pg/asyncpg/blogs.sql: -------------------------------------------------------------------------------- 1 | -- name: add_many_blogs*! 2 | INSERT INTO blogs (userid, title, content, published) 3 | VALUES (:userid, :title, :content, :published); 4 | -------------------------------------------------------------------------------- /tests/blogdb/sql/blogs/pg/blogs.sql: -------------------------------------------------------------------------------- 1 | -- name: create-table-users# 2 | CREATE TABLE IF NOT EXISTS users( 3 | userid SERIAL PRIMARY KEY, 4 | username TEXT NOT NULL, 5 | firstname TEXT NOT NULL, 6 | lastname TEXT NOT NULL 7 | ); 8 | 9 | -- name: create-table-blogs# 10 | CREATE TABLE IF NOT EXISTS blogs( 11 | blogid SERIAL PRIMARY KEY, 12 | userid INTEGER NOT NULL REFERENCES users, 13 | title TEXT NOT NULL, 14 | content TEXT NOT NULL, 15 | published DATE NOT NULL DEFAULT CURRENT_DATE 16 | ); 17 | 18 | -- name: get-blogs-published-after 19 | -- Get all blogs by all authors published after the given date. 20 | select title, 21 | username, 22 | to_char(published, 'YYYY-MM-DD HH24:MI') as "published" 23 | from blogs 24 | join users using(userid) 25 | where published >= :published 26 | order by published desc; 27 | 28 | 29 | -- name: publish-blog 0: 56 | tries -= 1 57 | try: 58 | with my_driver.connect(**my_dsn) as conn: 59 | tries = 0 60 | yield conn 61 | except Exception as e: 62 | fails += 1 63 | u.log.warning(f"{driver} connection failed ({fails}): {e}") 64 | time.sleep(1.0) 65 | 66 | @pytest.fixture 67 | def my_db(my_conn, queries): 68 | """Build the test database.""" 69 | create_user_blogs(my_conn, queries) 70 | fill_user_blogs(my_conn, queries) 71 | yield my_conn 72 | drop_user_blogs(my_conn, queries) 73 | 74 | except ModuleNotFoundError: 75 | # provide empty fixtures to please pytest "parsing" 76 | 77 | @pytest.fixture 78 | def my_dsn(): 79 | raise Exception("undefined fixture") 80 | 81 | @pytest.fixture 82 | def my_driver(): 83 | raise Exception("undefined fixture") 84 | 85 | @pytest.fixture 86 | def my_conn(): 87 | raise Exception("undefined fixture") 88 | 89 | @pytest.fixture 90 | def my_db(): 91 | raise Exception("undefined fixture") 92 | -------------------------------------------------------------------------------- /tests/conf_pgsql.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from conf_schema import create_user_blogs, fill_user_blogs, drop_user_blogs 3 | import run_tests as t 4 | 5 | # guess psycopg version from a connection 6 | def is_psycopg2(conn): 7 | return hasattr(conn, "get_dsn_parameters") 8 | 9 | try: 10 | from pytest_postgresql import factories as pg_factories 11 | 12 | @pytest.fixture 13 | def pg_conn(request): 14 | """Loads seed data and return a database connection.""" 15 | is_detached = request.config.getoption("postgresql_detached") 16 | if is_detached: # pragma: no cover 17 | # this is *NOT* a connection, it does not have a "cursor" 18 | pg = request.getfixturevalue("postgresql_noproc") 19 | import psycopg 20 | 21 | conn = psycopg.connect( 22 | host=pg.host, 23 | port=pg.port, 24 | user=pg.user, 25 | password=pg.password, 26 | dbname=pg.dbname, 27 | options=pg.options, 28 | ) 29 | else: 30 | # returns the underlying pytest-postgresql connection 31 | # which may be psycopg version 2 or 3, depending. 32 | conn = request.getfixturevalue("postgresql") 33 | 34 | # yield the psycopg? connection 35 | yield conn 36 | 37 | # done 38 | conn.close() 39 | 40 | @pytest.fixture 41 | def pg_params(request, pg_conn): 42 | """Build postgres connection parameters as a dictionary.""" 43 | if is_psycopg2(pg_conn): # pragma: no cover 44 | dsn = pg_conn.get_dsn_parameters() 45 | del dsn["tty"] 46 | else: # assume psycopg 3.x 47 | dsn = pg_conn.info.get_parameters() 48 | # non empty password? 49 | if "password" not in dsn: 50 | dsn["password"] = request.config.getoption("postgresql_password") or "" 51 | if "port" not in dsn: 52 | dsn["port"] = 5432 53 | return dsn 54 | 55 | @pytest.fixture 56 | def pg_dsn(request, pg_params): 57 | """Build a postgres URL connection string.""" 58 | p = pg_params 59 | yield f"postgres://{p['user']}:{p['password']}@{p['host']}:{p['port']}/{p['dbname']}" 60 | 61 | @pytest.fixture 62 | def pg_db(rconn, queries): 63 | create_user_blogs(rconn, queries) 64 | fill_user_blogs(rconn, queries) 65 | yield rconn 66 | drop_user_blogs(rconn, queries) 67 | 68 | except ModuleNotFoundError: 69 | # FIXME empty fixtures to please pytest 70 | 71 | @pytest.fixture 72 | def pg_conn(): 73 | raise Exception("unimplemented fixture") 74 | 75 | @pytest.fixture 76 | def pg_params(): 77 | raise Exception("unimplemented fixture") 78 | 79 | @pytest.fixture 80 | def pg_dsn(): 81 | raise Exception("unimplemented fixture") 82 | 83 | @pytest.fixture 84 | def pg_db(): 85 | raise Exception("unimplemented fixture") 86 | -------------------------------------------------------------------------------- /tests/conf_schema.py: -------------------------------------------------------------------------------- 1 | # non portable SQL statements to create, fill and clear the database schema 2 | 3 | import asyncio 4 | from pathlib import Path 5 | import csv 6 | import utils 7 | import datetime 8 | 9 | # 10 | # yuk… hide sync/async 11 | # 12 | # We do not want to replicate schema creation functions for async. 13 | # 14 | # I believe that the asynchronous approach is a poor performance kludge 15 | # against bad interpreter parallelism support (JavaScript, CPython). 16 | # Because the interpreter is so bad at switching between contexts, the model 17 | # just offloads the task to the user for a limited benefit as it only really 18 | # brings improvements to IO-bound loads. 19 | # This interpreter-level implementation induces significant code complexity and 20 | # execution overheads. 21 | # It makes no sense from the hardware and operating system point of view, 22 | # which already have pretty efficient threads running on multicore cpus. 23 | 24 | def execute_any(conn, queries, name): 25 | utils.log.debug(f"executing: {name}") 26 | f = queries.f(name) 27 | if queries.is_async: 28 | return utils.run_async(f(conn)) 29 | else: 30 | return f(conn) 31 | 32 | def execute_commit(conn, queries): 33 | if queries.is_async: 34 | # transaction management is different with asyncpg… 35 | if queries._driver == "asyncpg": 36 | return 37 | return utils.run_async(conn.commit()) 38 | else: 39 | return conn.commit() 40 | 41 | def execute_many(conn, queries, name, data): 42 | f = queries.f(name) 43 | if queries.is_async: 44 | return utils.run_async(f(conn, data)) 45 | else: 46 | return f(conn, data) 47 | 48 | # CSV data file paths 49 | BLOGDB_PATH = Path(__file__).parent / "blogdb" 50 | USERS_DATA_PATH = BLOGDB_PATH / "data/users_data.csv" 51 | BLOGS_DATA_PATH = BLOGDB_PATH / "data/blogs_data.csv" 52 | 53 | # schema creation 54 | _CREATE_USER_BLOGS = [ 55 | "blogs.create_table_users", 56 | "blogs.create_table_blogs", 57 | ] 58 | 59 | def create_user_blogs(conn, queries): 60 | for q in _CREATE_USER_BLOGS: 61 | execute_any(conn, queries, q) 62 | execute_commit(conn, queries) 63 | # sanity check! 64 | count = execute_any(conn, queries, "users.get_count") 65 | assert count == 0 66 | 67 | # schema data 68 | def fill_user_blogs(conn, queries): 69 | with USERS_DATA_PATH.open() as fp: 70 | users = [ tuple(r) for r in csv.reader(fp) ] 71 | if queries._driver in ("pg8000", "asyncpg"): 72 | users = [ { "name": t[0], "fname": t[1], "lname": t[2] } for t in users ] 73 | execute_many(conn, queries, "users.add_many_users", users) 74 | with BLOGS_DATA_PATH.open() as fp: 75 | blogs = [ tuple(r) for r in csv.reader(fp) ] 76 | if queries._driver in ("pg8000", "asyncpg"): 77 | blogs = [ { "userid": int(t[0]), "title": t[1], "content": t[2], "published": datetime.date.fromisoformat(t[3]) } 78 | for t in blogs ] 79 | execute_many(conn, queries, "blogs.add_many_blogs", blogs) 80 | execute_commit(conn, queries) 81 | 82 | # schema destruction 83 | _DROP_USER_BLOGS = [ 84 | "blogs.drop_table_comments", 85 | "blogs.drop_table_blogs", 86 | "blogs.drop_table_users", 87 | ] 88 | 89 | def drop_user_blogs(conn, queries): 90 | for q in _DROP_USER_BLOGS: 91 | execute_any(conn, queries, q) 92 | execute_commit(conn, queries) 93 | -------------------------------------------------------------------------------- /tests/conf_sqlite.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from pathlib import Path 3 | from conf_schema import create_user_blogs, fill_user_blogs, drop_user_blogs 4 | import utils 5 | 6 | @pytest.fixture 7 | def li_dbpath(tmpdir): 8 | db_path = str(Path(tmpdir.strpath) / "blogdb.db") 9 | yield db_path 10 | 11 | @pytest.fixture 12 | def li_db(rconn, queries): 13 | create_user_blogs(rconn, queries) 14 | fill_user_blogs(rconn, queries) 15 | yield rconn 16 | drop_user_blogs(rconn, queries) 17 | -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | import aiosql 3 | import utils 4 | 5 | from run_tests import queries 6 | from conf_mysql import my_dsn, my_db, my_conn, my_driver 7 | from conf_pgsql import pg_conn, pg_params, pg_dsn, pg_db 8 | from conf_sqlite import li_dbpath, li_db 9 | from conf_duckdb import duckdb_conn 10 | from conf_mssql import ms_dsn, ms_db, ms_conn, ms_driver, ms_master 11 | 12 | def pytest_addoption(parser): 13 | # Postgres 14 | parser.addoption("--postgresql-detached", action="store_true") 15 | # MySQL and MariaDB 16 | parser.addoption("--mysql-detached", action="store_true") 17 | parser.addoption("--mysql-tries", default=1, type=int) 18 | parser.addoption( 19 | "--mysql-driver", 20 | default="MySQLdb", 21 | choices=["MySQLdb", "mysql.connector", "pymysql", "mariadb"], 22 | help="which driver to use for creating connections", 23 | ) 24 | if not utils.has_pkg("pytest_mysql"): 25 | parser.addoption("--mysql-host", type=str, default="localhost") 26 | parser.addoption("--mysql-port", type=int, default=3306) 27 | parser.addoption("--mysql-user", type=str, default="pytest") 28 | parser.addoption("--mysql-passwd", type=str, default="pytest") 29 | parser.addoption("--mysql-dbname", type=str, default="pytest") 30 | # MS SQL Server 31 | parser.addoption("--mssql-tries", default=1, type=int) 32 | parser.addoption("--mssql-driver", default="pymssql") 33 | parser.addoption("--mssql-user", default="sa") 34 | parser.addoption("--mssql-password", type=str) 35 | parser.addoption("--mssql-server", default="localhost") 36 | parser.addoption("--mssql-port", default=1433, type=int) 37 | parser.addoption("--mssql-database", default="master") 38 | 39 | # test adapter registering and overriding 40 | aiosql.aiosql.register_adapter("named", aiosql.adapters.GenericAdapter) 41 | aiosql.register_adapter("named", aiosql.adapters.GenericAdapter) 42 | -------------------------------------------------------------------------------- /tests/pytest.ini: -------------------------------------------------------------------------------- 1 | [pytest] 2 | asyncio_default_fixture_loop_scope = function 3 | # asyncio: asynchronous tests 4 | markers = 5 | sqlite3: tests relying on sqlite 6 | postgres: tests relying on postgres 7 | mariadb: tests relying on mariadb 8 | mssql: tests relying on mssql 9 | mysql: tests relying on mysql 10 | duckdb: tests relying on duckdb 11 | misc: other tests 12 | -------------------------------------------------------------------------------- /tests/test_aiosqlite.py: -------------------------------------------------------------------------------- 1 | import aiosql 2 | import pytest 3 | import run_tests as t 4 | 5 | try: 6 | import aiosqlite 7 | import pytest_asyncio 8 | except ModuleNotFoundError as m: 9 | pytest.skip(f"missing module: {m}", allow_module_level=True) 10 | 11 | pytestmark = [ 12 | pytest.mark.sqlite3, 13 | ] 14 | 15 | @pytest.fixture(scope="module") 16 | def driver(): 17 | return "aiosqlite" 18 | 19 | @pytest.fixture(scope="module") 20 | def date(): 21 | return t.todate 22 | 23 | @pytest_asyncio.fixture 24 | async def rconn(li_dbpath): 25 | async with aiosqlite.connect(li_dbpath) as conn: 26 | yield conn 27 | 28 | @pytest_asyncio.fixture 29 | def aconn(li_db): 30 | yield li_db 31 | 32 | @pytest_asyncio.fixture 33 | def dconn(aconn): 34 | aconn.row_factory = aiosqlite.Row 35 | yield aconn 36 | 37 | from run_tests import ( 38 | run_async_sanity as test_sanity, 39 | run_async_record_query as test_record_query, 40 | run_async_parameterized_record_query as test_parameterized_record_query, 41 | run_async_parameterized_query as test_parameterized_query, 42 | run_async_record_class_query as test_record_class_query, 43 | run_async_select_one as test_record_select_one, 44 | run_async_select_value as test_record_select_value, 45 | run_async_insert_returning as test_record_insert_returning, 46 | run_async_delete as test_delete, 47 | run_async_insert_many as test_insert_many, 48 | run_async_execute_script as test_execute_script, 49 | run_async_methods as test_methods, 50 | run_async_select_cursor_context_manager as test_select_cursor_context_manager, 51 | ) 52 | -------------------------------------------------------------------------------- /tests/test_apsw.py: -------------------------------------------------------------------------------- 1 | import aiosql 2 | import pytest 3 | import run_tests as t 4 | import utils 5 | 6 | try: 7 | import apsw as db 8 | except ModuleNotFoundError: 9 | pytest.skip("missing driver: apsw", allow_module_level=True) 10 | 11 | pytestmark = [ 12 | pytest.mark.sqlite3, 13 | ] 14 | 15 | @pytest.fixture(scope="module") 16 | def driver(): 17 | return "apsw" 18 | 19 | @pytest.fixture(scope="module") 20 | def date(): 21 | return t.todate 22 | 23 | # driver does not seem to return row counts on ! 24 | @pytest.fixture(scope="module") 25 | def expect(): 26 | return -1 27 | 28 | class APSWConnection(db.Connection): 29 | """APSW Connection wrapper with autocommit off.""" 30 | 31 | def __init__(self, *args, **kwargs): 32 | super().__init__(*args, **kwargs) 33 | self._begin() 34 | 35 | def _begin(self): 36 | self.cursor().execute("BEGIN").close() 37 | 38 | def commit(self): # pragma: no cover 39 | self.cursor().execute("COMMIT").close() 40 | self._begin() 41 | 42 | def _rollback(self): 43 | self.cursor().execute("ROLLBACK").close() 44 | 45 | def rollback(self): # pragma: no cover 46 | self._rollback() 47 | self._begin() 48 | 49 | def close(self): 50 | self._rollback() 51 | super().close() 52 | 53 | @pytest.fixture 54 | def rconn(li_dbpath): 55 | conn = APSWConnection(li_dbpath) 56 | yield conn 57 | conn.close() 58 | 59 | @pytest.fixture 60 | def conn(li_db): 61 | yield li_db 62 | 63 | @pytest.fixture 64 | def dconn(conn): 65 | conn.setrowtrace(utils.dict_factory) 66 | return conn 67 | 68 | from run_tests import ( 69 | run_sanity as test_sanity, 70 | run_something as test_something, 71 | run_cursor as test_cursor, 72 | run_record_query as test_record_query, 73 | run_parameterized_query as test_parameterized_query, 74 | run_parameterized_record_query as test_parameterized_record_query, 75 | run_record_class_query as test_record_class_query, 76 | run_select_cursor_context_manager as test_select_cursor_context_manager, 77 | run_select_one as test_select_one, 78 | # FIXME not supported? 79 | # run_insert_returning as test_insert_returning, 80 | run_delete as test_delete, 81 | run_insert_many as test_insert_many, 82 | run_select_value as test_select_value, 83 | run_date_time as test_date_time, 84 | run_object_attributes as test_object_attributes, 85 | run_execute_script as test_execute_script, 86 | run_modulo as test_modulo, 87 | ) 88 | -------------------------------------------------------------------------------- /tests/test_asyncpg.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import pytest 3 | import aiosql 4 | import run_tests as t 5 | import utils as u 6 | 7 | try: 8 | import asyncpg 9 | import pytest_asyncio 10 | except ModuleNotFoundError as m: 11 | pytest.skip(f"missing module: {m}", allow_module_level=True) 12 | 13 | pytestmark = [ 14 | pytest.mark.postgres, 15 | # pytest.mark.asyncio, 16 | pytest.mark.skipif(not u.has_pkg("pytest_postgresql"), reason="no pytest_postgresql"), 17 | pytest.mark.skipif(not u.has_pkg("pytest_asyncio"), reason="no pytest_asyncio"), 18 | ] 19 | 20 | @pytest.fixture(scope="module") 21 | def driver(): 22 | return "asyncpg" 23 | 24 | @pytest.fixture(scope="module") 25 | def date(): 26 | return datetime.date 27 | 28 | @pytest_asyncio.fixture 29 | async def rconn(pg_dsn): 30 | conn = await asyncpg.connect(pg_dsn) 31 | yield conn 32 | await conn.close() 33 | 34 | @pytest_asyncio.fixture 35 | async def aconn(pg_db): 36 | yield pg_db 37 | 38 | @pytest_asyncio.fixture 39 | async def dconn(aconn): 40 | # FIXME dict row? 41 | yield aconn 42 | 43 | from run_tests import ( 44 | run_async_sanity as test_async_sanity, 45 | run_async_record_query as test_async_record_query, 46 | run_async_parameterized_query as test_async_parameterized_query, 47 | run_async_parameterized_record_query as test_async_parameterized_record_query, 48 | run_async_record_class_query as test_async_record_class_query, 49 | run_async_select_cursor_context_manager as test_async_select_cursor_context_manager, 50 | run_async_select_one as test_async_select_one, 51 | run_async_select_value as test_async_select_value, 52 | run_async_insert_returning as test_async_insert_returning, 53 | run_async_delete as test_async_delete, 54 | run_async_insert_many as test_async_insert_many, 55 | run_async_execute_script as test_async_execute_script, 56 | ) 57 | 58 | # TODO other pools? 59 | @pytest.mark.asyncio 60 | async def test_with_pool(pg_dsn, queries, pg_db): 61 | async with asyncpg.create_pool(pg_dsn) as pool: 62 | async with pool.acquire() as conn: 63 | await t.run_async_insert_returning(conn, queries, datetime.date) 64 | 65 | @pytest.mark.asyncio 66 | async def test_async_methods(pg_dsn, queries, pg_db): 67 | async with asyncpg.create_pool(pg_dsn) as pool: 68 | await t.run_async_methods(pool, queries) 69 | 70 | @pytest.mark.asyncio 71 | async def test_no_publish(aconn, queries): 72 | # TODO move in run 73 | no_publish = queries.f("blogs.no_publish") 74 | res = await no_publish(aconn) 75 | assert res is None 76 | 77 | def test_many_replacements(pg_dsn, queries): 78 | """If the replacement was longer than the variable, bad SQL was generated. 79 | 80 | The variable replacement code had a bug that caused it to miscalculate where in the 81 | original string to put the placeholders. The SQL below would produce a query that 82 | ended with "$8, $9, $10$11:k);" because of this bug. 83 | 84 | This test would fail before the bug was fixed and passes afterward. 85 | 86 | This issue was reported in https://github.com/nackjicholson/aiosql/issues/90. 87 | """ 88 | 89 | sql = """ 90 | -- name: test= (3, 14) 266 | except ValueError as e: 267 | assert "positional" in str(e) 268 | 269 | def test_parameter_declarations(): 270 | # ok 271 | conn = sqlite3.connect(":memory:") 272 | run_param_queries(conn, kwargs_only=True) 273 | run_param_queries(conn, kwargs_only=False) 274 | conn.close() 275 | # errors 276 | try: 277 | aiosql.from_str("-- name: foo()\nSELECT :N + 1;\n", "sqlite3") 278 | pytest.fail("must raise an exception") 279 | except SQLParseException as e: 280 | assert "undeclared" in str(e) and "N" in str(e) 281 | try: 282 | aiosql.from_str("-- name: foo(N, M)\nSELECT :N + 1;\n", "sqlite3") 283 | pytest.fail("must raise an exception") 284 | except SQLParseException as e: 285 | assert "unused" in str(e) and "M" in str(e) 286 | try: 287 | aiosql.from_str("-- name: foo(a)#\nCREATE TABLE :a();\n", "sqlite3") 288 | pytest.fail("must raise an exception") 289 | except SQLParseException as e: 290 | assert "script" in str(e) 291 | 292 | def test_empty_query(): 293 | try: 294 | aiosql.from_str("-- name: foo\n--name: bla\n", "sqlite3") 295 | pytest.fail("must raise an exception") 296 | except SQLParseException as e: 297 | assert "empty query" in str(e) 298 | try: 299 | aiosql.from_str("-- name: foo\n-- record_class: Foo\n--name: bla\n", "sqlite3") 300 | pytest.fail("must raise an exception") 301 | except SQLParseException as e: 302 | assert "empty sql" in str(e) 303 | try: 304 | aiosql.from_str("-- name: foo\n \r\n\t --name: bla\n", "sqlite3") 305 | pytest.fail("must raise an exception") 306 | except SQLParseException as e: 307 | assert "empty query" in str(e) 308 | try: 309 | aiosql.from_str("-- name: foo\n-- just a comment\n--name: bla\n", "sqlite3") 310 | pytest.fail("must raise an exception") 311 | except SQLParseException as e: 312 | assert "empty sql" in str(e) 313 | try: 314 | aiosql.from_str("-- name: foo\n-- record_class: Foo\n-- just a comment\n--name: bla\n", "sqlite3") 315 | pytest.fail("must raise an exception") 316 | except SQLParseException as e: 317 | assert "empty sql" in str(e) 318 | try: 319 | aiosql.from_str("-- name: foo\n-- just a comment\n ; \n-- hop\n--name: bla\n", "sqlite3") 320 | pytest.fail("must raise an exception") 321 | except SQLParseException as e: 322 | assert "empty sql" in str(e) 323 | try: 324 | aiosql.from_str("-- name: foo\n-- just a comment\n;\n", "sqlite3") 325 | pytest.fail("must raise an exception") 326 | except SQLParseException as e: 327 | assert "empty sql" in str(e) 328 | try: 329 | aiosql.from_str("-- name: foo\n-- record_class: Foo\n-- just a comment\n;\n", "sqlite3") 330 | pytest.fail("must raise an exception") 331 | except SQLParseException as e: 332 | assert "empty sql" in str(e) 333 | -------------------------------------------------------------------------------- /tests/test_mariadb.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import aiosql 3 | import pytest 4 | import run_tests as t 5 | 6 | try: 7 | import mariadb as db 8 | except ModuleNotFoundError: 9 | pytest.skip("missing driver: mariadb", allow_module_level=True) 10 | 11 | pytestmark = [ 12 | pytest.mark.mariadb, 13 | # FIXME this should run in detached mode!? 14 | # pytest.mark.skipif(not u.has_pkg("pytest_mysql"), reason="no pytest_mysql"), 15 | ] 16 | 17 | @pytest.fixture(scope="module") 18 | def driver(): 19 | return "mariadb" 20 | 21 | @pytest.fixture(scope="module") 22 | def date(): 23 | return datetime.date 24 | 25 | @pytest.fixture 26 | def conn(my_db): 27 | return my_db 28 | 29 | @pytest.fixture 30 | def dconn(my_db): 31 | return my_db 32 | 33 | def test_my_dsn(my_dsn): 34 | assert "user" in my_dsn and "host" in my_dsn and "port" in my_dsn 35 | assert "dbname" not in my_dsn and "database" in my_dsn 36 | 37 | from run_tests import ( 38 | run_sanity as test_sanity, 39 | run_something as test_something, 40 | run_cursor as test_cursor, 41 | # run_record_query as test_record_query, 42 | # run_parameterized_record_query as test_parameterized_record_query, 43 | run_record_class_query as test_record_class_query, 44 | run_parameterized_query as test_parameterized_query, 45 | run_select_cursor_context_manager as test_select_cursor_context_manager, 46 | run_select_one as test_select_one, 47 | # FIXME should work 48 | # run_insert_returning as test_insert_returning, 49 | run_delete as test_delete, 50 | run_insert_many as test_insert_many, 51 | run_select_value as test_select_value, 52 | run_date_time as test_date_time, 53 | run_object_attributes as test_object_attributes, 54 | run_execute_script as test_execute_script, 55 | run_modulo as test_modulo, 56 | ) 57 | 58 | def test_record_query(dconn, queries): 59 | queries._queries.driver_adapter._kwargs = {"dictionary": True} 60 | return t.run_record_query(dconn, queries) 61 | 62 | def test_parameterized_record_query(dconn, queries, date): 63 | queries._queries.driver_adapter._kwargs = {"dictionary": True} 64 | return t.run_parameterized_record_query(dconn, queries, date) 65 | -------------------------------------------------------------------------------- /tests/test_myco.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import aiosql 3 | import pytest 4 | import run_tests as t 5 | import utils as u 6 | 7 | try: 8 | import mysql.connector as db 9 | except ModuleNotFoundError: 10 | pytest.skip("missing driver: mysql.connector (mysql-connector)", allow_module_level=True) 11 | 12 | pytestmark = [ 13 | pytest.mark.mysql, 14 | pytest.mark.skipif(not u.has_pkg("pytest_mysql"), reason="no pytest_mysql"), 15 | ] 16 | 17 | @pytest.fixture(scope="module") 18 | def driver(): 19 | return "mysql-connector" 20 | 21 | @pytest.fixture(scope="module") 22 | def date(): 23 | return datetime.date 24 | 25 | @pytest.fixture 26 | def conn(my_db): 27 | return my_db 28 | 29 | def test_my_dsn(my_dsn): 30 | assert "user" in my_dsn and "host" in my_dsn and "port" in my_dsn 31 | 32 | def test_my_conn(conn): 33 | assert conn.__module__.startswith(db.__name__) 34 | t.run_something(conn) 35 | 36 | from run_tests import ( 37 | run_sanity as test_sanity, 38 | run_something as test_something, 39 | run_cursor as test_cursor, 40 | # run_record_query as test_record_query, 41 | # run_parameterized_record_query as test_parameterized_record_query, 42 | run_parameterized_query as test_parameterized_query, 43 | run_record_class_query as test_record_class_query, 44 | run_select_cursor_context_manager as test_select_cursor_context_manager, 45 | run_select_one as test_select_one, 46 | # run_insert_returning as test_insert_returning, 47 | run_delete as test_delete, 48 | run_insert_many as test_insert_many, 49 | run_select_value as test_select_value, 50 | run_date_time as test_date_time, 51 | run_object_attributes as test_object_attributes, 52 | run_execute_script as test_execute_script, 53 | run_modulo as test_modulo, 54 | ) 55 | -------------------------------------------------------------------------------- /tests/test_mysqldb.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import aiosql 3 | import pytest 4 | import run_tests as t 5 | import utils as u 6 | 7 | try: 8 | import MySQLdb as db 9 | except ModuleNotFoundError: 10 | pytest.skip("missing driver: MySQLdb (mysqlclient)", allow_module_level=True) 11 | 12 | pytestmark = [ 13 | pytest.mark.mysql, 14 | pytest.mark.skipif(not u.has_pkg("pytest_mysql"), reason="no pytest_mysql"), 15 | ] 16 | 17 | @pytest.fixture(scope="module") 18 | def driver(): 19 | return "mysqldb" 20 | 21 | @pytest.fixture(scope="module") 22 | def date(): 23 | return datetime.date 24 | 25 | @pytest.fixture 26 | def conn(my_db): 27 | return my_db 28 | 29 | def test_my_dsn(my_dsn): 30 | assert "user" in my_dsn and "host" in my_dsn and "port" in my_dsn 31 | 32 | def test_my_conn(conn): 33 | assert conn.__module__.startswith(db.__name__) 34 | t.run_something(conn) 35 | 36 | from run_tests import ( 37 | run_sanity as test_sanity, 38 | run_something as test_something, 39 | run_cursor as test_cursor, 40 | # FIXME 41 | # run_record_query as test_record_query, 42 | # run_parameterized_record_query as test_parameterized_record_query, 43 | run_parameterized_query as test_parameterized_query, 44 | run_record_class_query as test_record_class_query, 45 | run_select_cursor_context_manager as test_select_cursor_context_manager, 46 | run_select_one as test_select_one, 47 | # run_insert_returning as test_insert_returning, 48 | run_delete as test_delete, 49 | run_insert_many as test_insert_many, 50 | run_select_value as test_select_value, 51 | run_date_time as test_date_time, 52 | run_object_attributes as test_object_attributes, 53 | run_execute_script as test_execute_script, 54 | # FIXME kwargs -> args? 55 | # run_modulo as test_modulo, 56 | ) 57 | -------------------------------------------------------------------------------- /tests/test_patterns.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from aiosql.utils import VAR_REF 3 | from aiosql.query_loader import _UNCOMMENT, _remove_ml_comments 4 | 5 | pytestmark = [ 6 | pytest.mark.misc, 7 | ] 8 | 9 | 10 | def test_var_pattern_is_quote_aware(): 11 | sql = r""" 12 | select foo_id, 13 | bar_id, 14 | to_char(created_at, 'YYYY-MM-DD"T"HH24:MI:SSOF') 15 | from foos 16 | join bars using(bar_id) 17 | join bazs using(baz_id) 18 | where created_at < :created_at_mark 19 | and foo_mark > :foo_mark 20 | order by created_at desc, source_name asc; 21 | """ 22 | groupdicts = [m.groupdict() for m in VAR_REF.finditer(sql)] 23 | assert len(groupdicts) == 3 24 | 25 | expected = [ 26 | { 27 | "dquote": None, 28 | "lead": None, 29 | "squote": "'YYYY-MM-DD\"T\"HH24:MI:SSOF'", 30 | "var_name": None, 31 | }, 32 | { 33 | "dquote": None, 34 | "lead": " ", 35 | "squote": None, 36 | "var_name": "created_at_mark", 37 | }, 38 | {"dquote": None, "lead": " ", "squote": None, "var_name": "foo_mark"}, 39 | ] 40 | assert groupdicts == expected 41 | 42 | 43 | def test_var_pattern_does_not_require_semicolon_trail(): 44 | """Make sure keywords ending queries are recognized even without 45 | semi-colons. 46 | """ 47 | sql = r""" 48 | select a, 49 | b, 50 | c 51 | FROM foo 52 | WHERE a = :a""" 53 | 54 | groupdicts = [m.groupdict() for m in VAR_REF.finditer(sql)] 55 | assert len(groupdicts) == 1 56 | 57 | expected = {"dquote": None, "lead": " ", "squote": None, "var_name": "a"} 58 | assert groupdicts[0] == expected 59 | 60 | 61 | def test_var_pattern_handles_empty_sql_string_literals(): 62 | """Make sure SQL '' are treated correctly and don't cause a substitution to be skipped.""" 63 | sql = r""" 64 | select blah 65 | from foo 66 | where lower(regexp_replace(blah,'\\W','','g')) = lower(regexp_replace(:blah,'\\W','','g'));""" 67 | 68 | groupdicts = [m.groupdict() for m in VAR_REF.finditer(sql)] 69 | 70 | expected_single_quote_match = { 71 | "dquote": None, 72 | "lead": None, 73 | "squote": "''", 74 | "var_name": None, 75 | } 76 | assert groupdicts[1] == expected_single_quote_match 77 | 78 | expected_var_match = { 79 | "dquote": None, 80 | "lead": "(", 81 | "squote": None, 82 | "var_name": "blah", 83 | } 84 | assert groupdicts[3] == expected_var_match 85 | 86 | 87 | # must remove *only* OK comments 88 | COMMENTED = """ 89 | KO 90 | -- KO 91 | /* OK */ 92 | '/* KO */' 93 | "/* KO */" 94 | ' /* KO 95 | */' 96 | " /* KO 97 | */" 98 | /* 99 | * OK 100 | */ 101 | -- /* KO 102 | -- KO */ 103 | /* OK 104 | -- OK 105 | ' OK ' "OK " 106 | */ 107 | KO 108 | /* OK */ -- KO 'KO' 109 | -- KO */ 110 | /*+ KO (hints must be kept!) */ 111 | """ 112 | 113 | 114 | def test_comments(): 115 | n = 0 116 | for ma in _UNCOMMENT.finditer(COMMENTED): 117 | matches = ma.groupdict() 118 | s, d, c, m = matches["squote"], matches["dquote"], matches["oneline"], matches["multiline"] 119 | # assert s or d or c or m, f"bad match: {m} {matches}" 120 | if s or d or c or m: 121 | n += 1 122 | if m: 123 | assert "OK" in m and "KO" not in m 124 | if s: 125 | assert "KO" in s and "OK" not in s 126 | if d: 127 | assert "KO" in d and "OK" not in d 128 | if c: 129 | assert "KO" in c and "OK" not in c 130 | assert n == 13 131 | 132 | 133 | COMMENT_UNCOMMENT = [ 134 | ("", ""), 135 | ("hello", "hello"), 136 | ("world!\n", "world!\n"), 137 | ("/**/", ""), 138 | ("/*+ hint */", "/*+ hint */"), 139 | ("x/*\n*/y\n", "xy\n"), 140 | ("-- /* */\n", "-- /* */\n"), 141 | ("-- /* */", "-- /* */"), 142 | ("'/* */'", "'/* */'"), 143 | ("--\n/* */X\n", "--\nX\n"), 144 | ] 145 | 146 | 147 | def test_uncomment(): 148 | n = 0 149 | for c, u in COMMENT_UNCOMMENT: 150 | n += 1 151 | assert _remove_ml_comments(c) == u 152 | assert n == len(COMMENT_UNCOMMENT) 153 | -------------------------------------------------------------------------------- /tests/test_pg8000.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import aiosql 3 | import pytest 4 | import run_tests as t 5 | import utils as u 6 | 7 | try: 8 | import pg8000 as db 9 | except ModuleNotFoundError: 10 | pytest.skip("missing driver: pg8000", allow_module_level=True) 11 | 12 | pytestmark = [ 13 | pytest.mark.postgres, 14 | pytest.mark.skipif(not u.has_pkg("pytest_postgresql"), reason="no pytest_postgresql"), 15 | ] 16 | 17 | @pytest.fixture(scope="module") 18 | def driver(): 19 | return "pg8000" 20 | 21 | @pytest.fixture(scope="module") 22 | def date(): 23 | return datetime.date 24 | 25 | @pytest.fixture 26 | def rconn(pg_params, pg_conn): 27 | params = dict(pg_params) 28 | params["database"] = params["dbname"] 29 | del params["dbname"] 30 | # cleanup unsupported keyword arguments 31 | for kw in ("sslcertmode", "connect_timeout", "hostaddr"): 32 | if kw in params: 33 | del params[kw] 34 | u.log.debug(f"params: {params}") 35 | with db.connect(**params) as conn: 36 | yield conn 37 | 38 | @pytest.fixture 39 | def conn(pg_db): 40 | yield pg_db 41 | 42 | # TODO dconn: dict result is not supported 43 | 44 | from run_tests import ( 45 | run_sanity as test_sanity, 46 | run_something as test_something, 47 | run_cursor as test_cursor, 48 | # run_record_query as test_record_query, 49 | # run_parameterized_record_query as test_parameterized_record_query, 50 | run_parameterized_query as test_parameterized_query, 51 | run_record_class_query as test_record_class_query, 52 | run_select_cursor_context_manager as test_select_cursor_context_manager, 53 | run_select_one as test_select_one, 54 | run_insert_returning as test_insert_returning, 55 | run_delete as test_delete, 56 | run_insert_many as test_insert_many, 57 | run_select_value as test_select_value, 58 | run_date_time as test_date_time, 59 | # FIXME not supported? 60 | # run_object_attributes as test_object_attributes, 61 | run_execute_script as test_execute_script, 62 | run_modulo as test_modulo, 63 | ) 64 | -------------------------------------------------------------------------------- /tests/test_psycopg2.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import aiosql 3 | import pytest 4 | import utils as u 5 | 6 | try: 7 | import psycopg2 as db 8 | from psycopg2.extras import RealDictCursor as DictCursor 9 | except ModuleNotFoundError: 10 | pytest.skip("missing driver: psycopg2", allow_module_level=True) 11 | 12 | pytestmark = [ 13 | pytest.mark.postgres, 14 | pytest.mark.skipif(not u.has_pkg("pytest_postgresql"), reason="no pytest_postgresql"), 15 | ] 16 | 17 | @pytest.fixture(scope="module") 18 | def driver(): 19 | return "psycopg2" 20 | 21 | @pytest.fixture(scope="module") 22 | def date(): 23 | return datetime.date 24 | 25 | @pytest.fixture 26 | def rconn(pg_dsn): 27 | with db.connect(dsn=pg_dsn) as conn: 28 | yield conn 29 | 30 | @pytest.fixture 31 | def conn(pg_db): 32 | yield pg_db 33 | 34 | @pytest.fixture 35 | def dconn(pg_dsn, pg_db): 36 | with db.connect(dsn=pg_dsn, cursor_factory=DictCursor) as conn: 37 | yield conn 38 | 39 | from run_tests import ( 40 | run_sanity as test_sanity, 41 | run_something as test_something, 42 | run_cursor as test_cursor, 43 | run_record_query as test_record_query, 44 | run_parameterized_query as test_parameterized_query, 45 | run_parameterized_record_query as test_parameterized_record_query, 46 | run_record_class_query as test_record_class_query, 47 | run_select_cursor_context_manager as test_select_cursor_context_manager, 48 | run_select_one as test_select_one, 49 | run_insert_returning as test_insert_returning, 50 | run_delete as test_delete, 51 | run_insert_many as test_insert_many, 52 | run_select_value as test_select_value, 53 | run_date_time as test_date_time, 54 | run_object_attributes as test_object_attributes, 55 | run_execute_script as test_execute_script, 56 | run_modulo as test_modulo, 57 | ) 58 | 59 | def test_version(): 60 | assert db.__version__.startswith("2.") 61 | -------------------------------------------------------------------------------- /tests/test_psycopg3.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import aiosql 3 | import pytest 4 | import run_tests as t 5 | import utils as u 6 | 7 | try: 8 | import psycopg as db 9 | from psycopg.rows import dict_row 10 | except ModuleNotFoundError: 11 | pytest.skip("missing driver: psycopg", allow_module_level=True) 12 | 13 | pytestmark = [ 14 | pytest.mark.postgres, 15 | pytest.mark.skipif(not u.has_pkg("pytest_postgresql"), reason="no pytest_postgresql"), 16 | ] 17 | 18 | @pytest.fixture(scope="module") 19 | def driver(): 20 | return "psycopg" 21 | 22 | @pytest.fixture(scope="module") 23 | def date(): 24 | return datetime.date 25 | 26 | @pytest.fixture 27 | def rconn(pg_params): 28 | with db.connect(**pg_params) as conn: 29 | yield conn 30 | 31 | @pytest.fixture 32 | def conn(pg_db): 33 | yield pg_db 34 | 35 | @pytest.fixture 36 | def dconn(pg_params, pg_db): 37 | with db.connect(**pg_params, row_factory=dict_row) as conn: 38 | yield conn 39 | 40 | from run_tests import ( 41 | run_sanity as test_sanity, 42 | run_something as test_something, 43 | run_cursor as test_cursor, 44 | run_record_query as test_record_query, 45 | run_parameterized_record_query as test_parameterized_record_query, 46 | run_parameterized_query as test_parameterized_query, 47 | run_select_one as test_select_one, 48 | run_select_value as test_select_value, 49 | run_modulo as test_modulo, 50 | run_delete as test_delete, 51 | run_date_time as test_date_time, 52 | run_execute_script as test_execute_script, 53 | run_object_attributes as test_object_attributes, 54 | run_record_class_query as test_record_class_query, 55 | run_select_cursor_context_manager as test_select_cursor_context_manager, 56 | run_insert_returning as test_insert_returning, 57 | run_insert_many as test_insert_many, 58 | ) 59 | 60 | def test_version(): 61 | assert db.__version__.startswith("3.") 62 | 63 | def test_select_value_dict(dconn, queries): 64 | t.run_select_value(dconn, queries) 65 | -------------------------------------------------------------------------------- /tests/test_pygresql.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import aiosql 3 | import pytest 4 | import utils as u 5 | 6 | try: 7 | import pgdb as db # PyGreSQL DB-API driver 8 | except ModuleNotFoundError: 9 | pytest.skip("missing driver: pygresql", allow_module_level=True) 10 | 11 | pytestmark = [ 12 | pytest.mark.postgres, 13 | pytest.mark.skipif(not u.has_pkg("pytest_postgresql"), reason="no pytest_postgresql"), 14 | ] 15 | 16 | @pytest.fixture(scope="module") 17 | def driver(): 18 | return "pygresql" 19 | 20 | @pytest.fixture(scope="module") 21 | def date(): 22 | return datetime.date 23 | 24 | @pytest.fixture 25 | def rconn(pg_params): 26 | params = dict(pg_params) 27 | params["database"] = params["dbname"] 28 | del params["dbname"] 29 | if "port" in params: 30 | params["host"] += ":" + params["port"] 31 | del params["port"] 32 | u.log.debug(f"params: {params}") 33 | with db.connect(**params) as conn: 34 | yield conn 35 | 36 | @pytest.fixture 37 | def conn(pg_db): 38 | yield pg_db 39 | 40 | # FIXME dconn 41 | 42 | from run_tests import ( 43 | run_sanity as test_sanity, 44 | run_something as test_something, 45 | run_cursor as test_cursor, 46 | # run_record_query as test_record_query, 47 | # run_parameterized_record_query as test_parameterized_record_query, 48 | run_parameterized_query as test_parameterized_query, 49 | run_record_class_query as test_record_class_query, 50 | run_select_cursor_context_manager as test_select_cursor_context_manager, 51 | run_select_one as test_select_one, 52 | run_insert_returning as test_insert_returning, 53 | run_delete as test_delete, 54 | run_insert_many as test_insert_many, 55 | run_select_value as test_select_value, 56 | run_date_time as test_date_time, 57 | run_object_attributes as test_object_attributes, 58 | run_execute_script as test_execute_script, 59 | run_modulo as test_modulo, 60 | ) 61 | -------------------------------------------------------------------------------- /tests/test_pymssql.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import aiosql 3 | import pytest 4 | import run_tests as t 5 | 6 | try: 7 | import pymssql as db # Python MS SQL driver 8 | except ModuleNotFoundError: 9 | pytest.skip("missing driver: pymssql", allow_module_level=True) 10 | 11 | pytestmark = [ 12 | pytest.mark.mssql 13 | ] 14 | 15 | @pytest.fixture(scope="module") 16 | def driver(): 17 | return "pymssql" 18 | 19 | @pytest.fixture(scope="module") 20 | def date(): 21 | return datetime.date 22 | 23 | @pytest.fixture 24 | def conn(ms_db): 25 | yield ms_db 26 | 27 | @pytest.fixture 28 | def dconn(ms_db): 29 | yield ms_db 30 | 31 | def test_sanity_master(ms_master): 32 | with db.connect(**ms_master) as conn: 33 | t.run_sanity(conn) 34 | 35 | from run_tests import ( 36 | run_sanity as test_sanity, 37 | run_something as test_something, 38 | run_cursor as test_cursor, 39 | run_record_query as test_record_query, 40 | run_parameterized_query as test_parameterized_query, 41 | run_parameterized_record_query as test_parameterized_record_query, 42 | # FIXME broken with is_dict 43 | # run_record_class_query as test_record_class_query, 44 | run_select_cursor_context_manager as test_select_cursor_context_manager, 45 | run_select_one as test_select_one, 46 | run_insert_returning as test_insert_returning, 47 | run_delete as test_delete, 48 | # FIXME broken? 49 | # run_insert_many as test_insert_many, 50 | run_select_value as test_select_value, 51 | run_date_time as test_date_time, 52 | run_object_attributes as test_object_attributes, 53 | run_execute_script as test_execute_script, 54 | run_modulo as test_modulo, 55 | ) 56 | -------------------------------------------------------------------------------- /tests/test_pymysql.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import aiosql 3 | import pytest 4 | import run_tests as t 5 | import utils as u 6 | 7 | try: 8 | import pymysql as db 9 | except ModuleNotFoundError: 10 | pytest.skip("missing driver: pymysql", allow_module_level=True) 11 | 12 | pytestmark = [ 13 | pytest.mark.mysql, 14 | pytest.mark.skipif(not u.has_pkg("pytest_mysql"), reason="no pytest_mysql"), 15 | ] 16 | 17 | @pytest.fixture(scope="module") 18 | def driver(): 19 | return "pymysql" 20 | 21 | @pytest.fixture(scope="module") 22 | def date(): 23 | return datetime.date 24 | 25 | @pytest.fixture 26 | def conn(my_db): 27 | return my_db 28 | 29 | @pytest.fixture 30 | def dconn(my_dsn): 31 | with db.connect(**my_dsn, cursorclass=db.cursors.DictCursor) as conn: 32 | yield conn 33 | 34 | def test_my_dsn(my_dsn): 35 | assert "user" in my_dsn and "host" in my_dsn and "port" in my_dsn 36 | 37 | def test_my_conn(conn): 38 | assert conn.__module__.startswith(db.__name__) 39 | t.run_something(conn) 40 | 41 | from run_tests import ( 42 | run_sanity as test_sanity, 43 | run_something as test_something, 44 | run_cursor as test_cursor, 45 | run_parameterized_query as test_parameterized_query, 46 | # FIXME Programming Error: Table 'test.users' doesn't exist 47 | # run_record_query as test_record_query, 48 | # FIXME ValueError: unsupported format character 'Y' (0x59) at index 55 49 | # run_parameterized_record_query as test_parameterized_record_query, 50 | run_record_class_query as test_record_class_query, 51 | run_select_cursor_context_manager as test_select_cursor_context_manager, 52 | run_select_one as test_select_one, 53 | # run_insert_returning as test_insert_returning, 54 | run_delete as test_delete, 55 | run_insert_many as test_insert_many, 56 | run_select_value as test_select_value, 57 | run_date_time as test_date_time, 58 | run_object_attributes as test_object_attributes, 59 | run_execute_script as test_execute_script, 60 | run_modulo as test_modulo, 61 | ) 62 | -------------------------------------------------------------------------------- /tests/test_sqlite3.py: -------------------------------------------------------------------------------- 1 | import aiosql 2 | import pytest 3 | import run_tests as t 4 | import sqlite3 as db 5 | import utils 6 | 7 | pytestmark = [ 8 | pytest.mark.sqlite3 9 | ] 10 | 11 | @pytest.fixture(scope="module") 12 | def driver(): 13 | return "sqlite3" 14 | 15 | @pytest.fixture(scope="module") 16 | def date(): 17 | return t.todate 18 | 19 | @pytest.fixture 20 | def rconn(li_dbpath): 21 | conn = db.connect(li_dbpath) 22 | yield conn 23 | conn.close() 24 | 25 | @pytest.fixture 26 | def conn(li_db): 27 | return li_db 28 | 29 | @pytest.fixture 30 | def dconn(conn): 31 | conn.row_factory = utils.dict_factory 32 | return conn 33 | 34 | from run_tests import ( 35 | run_sanity as test_sanity, 36 | run_something as test_something, 37 | run_cursor as test_cursor, 38 | run_record_query as test_record_query, 39 | run_parameterized_query as test_parameterized_query, 40 | run_parameterized_record_query as test_parameterized_record_query, 41 | run_record_class_query as test_record_class_query, 42 | run_select_cursor_context_manager as test_select_cursor_context_manager, 43 | run_select_one as test_select_one, 44 | run_insert_returning as test_insert_returning, 45 | run_delete as test_delete, 46 | run_insert_many as test_insert_many, 47 | run_select_value as test_select_value, 48 | run_date_time as test_date_time, 49 | run_object_attributes as test_object_attributes, 50 | run_execute_script as test_execute_script, 51 | run_modulo as test_modulo, 52 | ) 53 | -------------------------------------------------------------------------------- /tests/utils.py: -------------------------------------------------------------------------------- 1 | import shutil 2 | import importlib 3 | import logging 4 | import time 5 | import contextlib 6 | import asyncio 7 | 8 | log = logging.getLogger("pytest-aiosql") 9 | logging.basicConfig(level=logging.INFO) 10 | 11 | def has_cmd(cmd): 12 | return shutil.which(cmd) is not None 13 | 14 | def has_pkg(pkg): 15 | """Tell whether a module is available.""" 16 | try: 17 | importlib.import_module(pkg) 18 | return True 19 | except ModuleNotFoundError: 20 | return False 21 | 22 | def has_service(host="localhost", port=22, retry=1): 23 | """Tell whether a service (host port) is available.""" 24 | import socket 25 | 26 | while retry > 0: 27 | retry -= 1 28 | try: 29 | tcp_ip = socket.socket(socket.AF_INET, socket.SOCK_STREAM) 30 | tcp_ip.settimeout(1) 31 | res = tcp_ip.connect_ex((host, port)) 32 | if res == 0: 33 | return True 34 | if retry > 0: 35 | time.sleep(3) 36 | except Exception as e: 37 | log.info(f"connection to {(host, port)} failed: {e}") 38 | if retry > 0: 39 | time.sleep(3) 40 | finally: 41 | tcp_ip.close() 42 | return False 43 | 44 | @contextlib.contextmanager 45 | def db_connect(db, tries, *args, **kwargs): 46 | """Return an auto-closing database connection, possibly with several attempts.""" 47 | fails, done = 0, False 48 | while not done and fails < tries: 49 | try: 50 | with db.connect(*args, **kwargs) as conn: 51 | done = True 52 | yield conn 53 | except Exception as e: 54 | fails += 1 55 | log.warning(f"{db.__name__} connection failed ({fails}): {e}") 56 | time.sleep(1.0) 57 | if not done: 58 | log.error(f"failed to connect after {tries} attempts") 59 | 60 | def dict_factory(cursor, row): 61 | d = {} 62 | for idx, col in enumerate(cursor.description): 63 | d[col[0]] = row[idx] 64 | return d 65 | 66 | def run_async(awaitable): 67 | loop = asyncio.get_event_loop() 68 | return loop.run_until_complete(awaitable) 69 | -------------------------------------------------------------------------------- /tests/wait.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env python 2 | 3 | import sys 4 | import utils as u 5 | 6 | assert len(sys.argv) == 4, f"usage: {sys.argv[0]} host port tries" 7 | 8 | host, port, tries = sys.argv[1], int(sys.argv[2]), int(sys.argv[3]) 9 | 10 | sys.exit(0 if u.has_service(host, port, tries) else 1) 11 | --------------------------------------------------------------------------------